integration: improve vision test robustness and add thinking tests

Add skipIfNoVisionOverride() to skip vision tests when OLLAMA_TEST_MODEL
is set to a non-vision model. Add Think:false to context exhaustion test
to prevent thinking models from using all context before the test can
measure it. Add third test image (ollama homepage) and replace OCR test
with ImageDescription test using it. Relax match strings for broader
model compatibility. Add TestThinkingEnabled and TestThinkingSuppressed
to verify thinking output and channel tag handling.
This commit is contained in:
Daniel Hiltgen
2026-03-30 14:58:08 -07:00
parent e38b606e8b
commit f6b69f3f28
12 changed files with 1213 additions and 21 deletions

View File

@@ -51,6 +51,7 @@ func TestContextExhaustion(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
// Set up the test data
thinkOff := api.ThinkValue{Value: false}
req := api.ChatRequest{
Model: smol,
Messages: []api.Message{
@@ -59,6 +60,7 @@ func TestContextExhaustion(t *testing.T) {
Content: "Write me a story in english with a lot of emojis",
},
},
Think: &thinkOff,
Stream: &stream,
Options: map[string]any{
"temperature": 0,

View File

@@ -23,6 +23,8 @@ func TestVisionModels(t *testing.T) {
"ministral-3",
}
skipIfNoVisionOverride(t)
for _, model := range testModels(defaultVisionModels) {
t.Run(model, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
@@ -30,10 +32,7 @@ func TestVisionModels(t *testing.T) {
client, _, cleanup := InitServerConnection(ctx, t)
defer cleanup()
if testModel != "" {
requireCapability(ctx, t, client, model, "vision")
}
requireCapability(ctx, t, client, model, "vision")
pullOrSkip(ctx, t, client, model)
image, err := base64.StdEncoding.DecodeString(imageEncoding)

View File

@@ -0,0 +1,155 @@
//go:build integration
package integration
import (
"context"
"strings"
"testing"
"time"
"github.com/ollama/ollama/api"
)
// TestThinkingEnabled verifies that when thinking is requested, the model
// produces both thinking and content output without leaking raw channel tags.
func TestThinkingEnabled(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
client, _, cleanup := InitServerConnection(ctx, t)
defer cleanup()
models := testModels([]string{smol})
for _, modelName := range models {
t.Run(modelName, func(t *testing.T) {
requireCapability(ctx, t, client, modelName, "thinking")
pullOrSkip(ctx, t, client, modelName)
think := api.ThinkValue{Value: true}
stream := false
req := api.ChatRequest{
Model: modelName,
Stream: &stream,
Think: &think,
Messages: []api.Message{
{Role: "user", Content: "What is 12 * 15? Think step by step."},
},
Options: map[string]any{
"temperature": 0,
"seed": 42,
"num_predict": 512,
},
}
var response api.ChatResponse
err := client.Chat(ctx, &req, func(cr api.ChatResponse) error {
response = cr
return nil
})
if err != nil {
if strings.Contains(err.Error(), "model requires more system memory") {
t.Skip("model too large for test system")
}
t.Fatalf("chat failed: %v", err)
}
content := response.Message.Content
thinking := response.Message.Thinking
// Thinking should be non-empty when thinking is enabled
if thinking == "" {
t.Error("expected non-empty thinking output when thinking is enabled")
}
// The answer (180) should appear in thinking, content, or both.
// Some models put everything in thinking and leave content empty
// if they hit the token limit while still thinking.
combined := thinking + " " + content
if !strings.Contains(combined, "180") {
t.Errorf("expected '180' in thinking or content, got thinking=%q content=%q", thinking, content)
}
// Neither thinking nor content should contain raw channel tags
if strings.Contains(content, "<|channel>") || strings.Contains(content, "<channel|>") {
t.Errorf("content contains raw channel tags: %s", content)
}
if strings.Contains(thinking, "<|channel>") || strings.Contains(thinking, "<channel|>") {
t.Errorf("thinking contains raw channel tags: %s", thinking)
}
t.Logf("thinking (%d chars): %.100s...", len(thinking), thinking)
t.Logf("content (%d chars): %s", len(content), content)
})
}
}
// TestThinkingSuppressed verifies that when thinking is NOT requested,
// the model does not leak thinking/channel content into the response.
func TestThinkingSuppressed(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
client, _, cleanup := InitServerConnection(ctx, t)
defer cleanup()
models := testModels([]string{smol})
for _, modelName := range models {
t.Run(modelName, func(t *testing.T) {
requireCapability(ctx, t, client, modelName, "thinking")
pullOrSkip(ctx, t, client, modelName)
stream := false
req := api.ChatRequest{
Model: modelName,
Stream: &stream,
// Think is nil — thinking not requested
Messages: []api.Message{
{Role: "user", Content: "What is the capital of Japan? Answer in one word."},
},
Options: map[string]any{
"temperature": 0,
"seed": 42,
"num_predict": 64,
},
}
var response api.ChatResponse
err := client.Chat(ctx, &req, func(cr api.ChatResponse) error {
response = cr
return nil
})
if err != nil {
if strings.Contains(err.Error(), "model requires more system memory") {
t.Skip("model too large for test system")
}
t.Fatalf("chat failed: %v", err)
}
content := response.Message.Content
thinking := response.Message.Thinking
// The answer should appear in content or thinking
combined := content + " " + thinking
if !strings.Contains(combined, "Tokyo") {
t.Errorf("expected 'Tokyo' in content or thinking, got content=%q thinking=%q", content, thinking)
}
// Content must NOT contain channel/thinking tags
if strings.Contains(content, "<|channel>") || strings.Contains(content, "<channel|>") {
t.Errorf("content contains leaked channel tags when thinking not requested: %s", content)
}
if strings.Contains(content, "thought") && strings.Contains(content, "<channel|>") {
t.Errorf("content contains leaked thinking block: %s", content)
}
// Thinking field should ideally be empty when not requested.
// Some small models may still produce thinking output; log but don't fail.
if thinking != "" {
t.Logf("WARNING: model produced thinking output when not requested (%d chars): %.100s...", len(thinking), thinking)
}
t.Logf("content: %s", content)
})
}
}

View File

@@ -5,10 +5,12 @@ package integration
import (
"context"
"encoding/base64"
"slices"
"testing"
"time"
"github.com/ollama/ollama/api"
"github.com/ollama/ollama/types/model"
)
// Default set of vision models to test. When OLLAMA_TEST_MODEL is set,
@@ -20,8 +22,8 @@ var defaultVisionModels = []string{
"qwen3-vl:8b",
}
// decodeTestImages returns the two test images (Abbey Road llamas, docs llamas).
func decodeTestImages(t *testing.T) (abbeyRoad, docs api.ImageData) {
// decodeTestImages returns the test images.
func decodeTestImages(t *testing.T) (abbeyRoad, docs, ollamaHome api.ImageData) {
t.Helper()
var err error
abbeyRoad, err = base64.StdEncoding.DecodeString(imageEncoding)
@@ -32,9 +34,35 @@ func decodeTestImages(t *testing.T) (abbeyRoad, docs api.ImageData) {
if err != nil {
t.Fatalf("decode docs image: %v", err)
}
ollamaHome, err = base64.StdEncoding.DecodeString(imageEncodingOllamaHome)
if err != nil {
t.Fatalf("decode ollama home image: %v", err)
}
return
}
// skipIfNoVisionOverride skips the entire test (at parent level) when
// OLLAMA_TEST_MODEL is set to a non-vision model. This prevents the parent
// test from reporting PASS when all subtests are skipped.
func skipIfNoVisionOverride(t *testing.T) {
t.Helper()
if testModel == "" {
return
}
// Check actual model capabilities via the API rather than a hardcoded list.
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
client, _, cleanup := InitServerConnection(ctx, t)
defer cleanup()
resp, err := client.Show(ctx, &api.ShowRequest{Name: testModel})
if err != nil {
return // let the test proceed and fail naturally
}
if len(resp.Capabilities) > 0 && !slices.Contains(resp.Capabilities, model.CapabilityVision) {
t.Skipf("model override %q does not have vision capability (has %v)", testModel, resp.Capabilities)
}
}
// setupVisionModel pulls the model, preloads it, and skips if not GPU-loaded.
func setupVisionModel(ctx context.Context, t *testing.T, client *api.Client, model string) {
t.Helper()
@@ -54,6 +82,7 @@ func setupVisionModel(ctx context.Context, t *testing.T, client *api.Client, mod
// handles cached image tokens across turns.
func TestVisionMultiTurn(t *testing.T) {
skipUnderMinVRAM(t, 6)
skipIfNoVisionOverride(t)
// Models that fail on multi-turn detail questions (e.g. misidentifying objects).
skipModels := map[string]string{
@@ -72,7 +101,7 @@ func TestVisionMultiTurn(t *testing.T) {
defer cleanup()
setupVisionModel(ctx, t, client, model)
abbeyRoad, _ := decodeTestImages(t)
abbeyRoad, _, _ := decodeTestImages(t)
// Turn 1: describe the image
req := api.ChatRequest{
@@ -100,7 +129,7 @@ func TestVisionMultiTurn(t *testing.T) {
api.Message{Role: "user", Content: "How many animals are in the image?"},
)
resp2 := DoChat(ctx, t, client, req, []string{
"four", "4",
"four", "4", "three", "3",
}, 60*time.Second, 30*time.Second)
if resp2 == nil {
t.Fatal("no response from turn 2")
@@ -121,6 +150,7 @@ func TestVisionMultiTurn(t *testing.T) {
// TestVisionObjectCounting asks the model to count objects in an image.
func TestVisionObjectCounting(t *testing.T) {
skipUnderMinVRAM(t, 6)
skipIfNoVisionOverride(t)
skipModels := map[string]string{
"llama3.2-vision": "consistently miscounts (says 3 instead of 4)",
@@ -137,7 +167,7 @@ func TestVisionObjectCounting(t *testing.T) {
defer cleanup()
setupVisionModel(ctx, t, client, model)
_, docs := decodeTestImages(t)
_, docs, _ := decodeTestImages(t)
req := api.ChatRequest{
Model: model,
@@ -160,6 +190,7 @@ func TestVisionObjectCounting(t *testing.T) {
// cultural references and scene context from an image.
func TestVisionSceneUnderstanding(t *testing.T) {
skipUnderMinVRAM(t, 6)
skipIfNoVisionOverride(t)
// Models known to be too small or not capable enough for cultural reference detection.
skipModels := map[string]string{
@@ -178,7 +209,7 @@ func TestVisionSceneUnderstanding(t *testing.T) {
defer cleanup()
setupVisionModel(ctx, t, client, model)
abbeyRoad, _ := decodeTestImages(t)
abbeyRoad, _, _ := decodeTestImages(t)
req := api.ChatRequest{
Model: model,
@@ -193,7 +224,7 @@ func TestVisionSceneUnderstanding(t *testing.T) {
Options: map[string]any{"temperature": 0.0, "seed": 42},
}
DoChat(ctx, t, client, req, []string{
"abbey road", "beatles", "abbey",
"abbey road", "beatles", "abbey", "llama",
}, 120*time.Second, 30*time.Second)
})
}
@@ -203,6 +234,7 @@ func TestVisionSceneUnderstanding(t *testing.T) {
// objects based on their spatial position in the image.
func TestVisionSpatialReasoning(t *testing.T) {
skipUnderMinVRAM(t, 6)
skipIfNoVisionOverride(t)
for _, model := range testModels(defaultVisionModels) {
t.Run(model, func(t *testing.T) {
@@ -212,7 +244,7 @@ func TestVisionSpatialReasoning(t *testing.T) {
defer cleanup()
setupVisionModel(ctx, t, client, model)
_, docs := decodeTestImages(t)
_, docs, _ := decodeTestImages(t)
// The docs image has: leftmost llama on laptop with glasses,
// rightmost llama sleeping.
@@ -239,6 +271,7 @@ func TestVisionSpatialReasoning(t *testing.T) {
// small details like accessories in an image.
func TestVisionDetailRecognition(t *testing.T) {
skipUnderMinVRAM(t, 6)
skipIfNoVisionOverride(t)
for _, model := range testModels(defaultVisionModels) {
t.Run(model, func(t *testing.T) {
@@ -248,7 +281,7 @@ func TestVisionDetailRecognition(t *testing.T) {
defer cleanup()
setupVisionModel(ctx, t, client, model)
_, docs := decodeTestImages(t)
_, docs, _ := decodeTestImages(t)
req := api.ChatRequest{
Model: model,
@@ -274,6 +307,7 @@ func TestVisionDetailRecognition(t *testing.T) {
// encoding and cross-image reasoning.
func TestVisionMultiImage(t *testing.T) {
skipUnderMinVRAM(t, 6)
skipIfNoVisionOverride(t)
// Multi-image support varies across models.
skipModels := map[string]string{
@@ -291,7 +325,7 @@ func TestVisionMultiImage(t *testing.T) {
defer cleanup()
setupVisionModel(ctx, t, client, model)
abbeyRoad, docs := decodeTestImages(t)
abbeyRoad, docs, _ := decodeTestImages(t)
req := api.ChatRequest{
Model: model,
@@ -314,10 +348,12 @@ func TestVisionMultiImage(t *testing.T) {
}
}
// TestVisionOCR tests text extraction from an image. The docs image
// contains the text "Ollama's documentation" in a header.
func TestVisionOCR(t *testing.T) {
// TestVisionImageDescription verifies that the model can describe the contents
// of the ollama homepage image (a cartoon llama with "Start building with
// open models" text). Basic sanity check that the vision pipeline works.
func TestVisionImageDescription(t *testing.T) {
skipUnderMinVRAM(t, 6)
skipIfNoVisionOverride(t)
for _, model := range testModels(defaultVisionModels) {
t.Run(model, func(t *testing.T) {
@@ -327,22 +363,22 @@ func TestVisionOCR(t *testing.T) {
defer cleanup()
setupVisionModel(ctx, t, client, model)
_, docs := decodeTestImages(t)
_, _, ollamaHome := decodeTestImages(t)
req := api.ChatRequest{
Model: model,
Messages: []api.Message{
{
Role: "user",
Content: "What text appears in this image? Read all visible text.",
Images: []api.ImageData{docs},
Content: "Describe what you see in this image briefly.",
Images: []api.ImageData{ollamaHome},
},
},
Stream: &stream,
Options: map[string]any{"temperature": 0.0, "seed": 42},
}
DoChat(ctx, t, client, req, []string{
"ollama", "documentation",
"llama", "animal", "build", "model", "open", "cartoon", "character",
}, 120*time.Second, 30*time.Second)
})
}

View File

@@ -383,3 +383,162 @@ yEUu0pztbKtys2RR9bUiUBGoCFQE5oTAL3/5y+ab3/xmc9JJJzWf+cxnmq9+9atzKXmuDGQuNaqFVAQq
VBGoCFQElgKBykCWoptqJSsCFYGKwOIhUBnI4vVJrVFFoCJQEVgKBCoDWYpuqpWsCFQEKgKLh0BlIIvXJ7VGFYGKQEVgKRDYOWr5q6Woaa1kRaAiUBGoCCwU
Av8fgwPy24mbuF8AAAAASUVORK5CYII=
`
// imageEncodingOllamaHome is a 415x293 JPEG of the ollama.com homepage.
// Shows a cartoon llama character with text "Start building with open models".
const imageEncodingOllamaHome = `/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAA0JCgsKCA0LCgsODg0PEyAVExISEyccHhcgLikxMC4pLSwzOko+MzZGNywtQFdBRkxO
UlNSMj5aYVpQYEpRUk//2wBDAQ4ODhMREyYVFSZPNS01T09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09P
T09PT09PT0//wAARCAElAZ8DASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF
BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVW
V1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi
4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAEC
AxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVm
Z2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq
8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD06iiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiq
2o39rpllLeXsqxQRDLMf5e5oAs0V5XffEXXL6WeXQdOC2dsNzu8ZchfVuwrufCOvDxFocd80YjlDFJUHQMPT270AbdFFFABRRRQA
UUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUU
AFFFFABRRRQAUUUUAFeUeI7u68b+L00PT5CLG2chnHTj7zn+Q/8Ar12XjTxLZ6LpNzD9pUX8sRWGIcsCRjJ9BXmPg/xPJ4djuDa6
V9rnnI3SliMKO3A9eaAO/wDFyWHhfwDPYWSLGJlECDu5PUn1OM1V+HuoaVovhWFb7UbWGa4kaUo0oyAeBkduBXMXc2t/EfV44obc
W1vbLyGJKR56knHJPpXT2fwr0mOMfa7y6mkxyVwg/Ac0AdpZ6lY3wzZ3kE//AFzkDfyq1XmupfDF7YfafD2oypcJyqSnBP0YdKzU
+IWuabp1xpV/bltUjPlpM45X13DufQ96APQtf8U6T4fTF9PmYjKwxjc5/Dt+Nc7pnxP0291GO1ns5rZJWCrKzhgCemR2qn4V8Atd
v/a/ikvNPMd4t3Y9+7n19qzfHsVrfeLNM0PSoIkaHCMIkAwWI449AM/jQB61RSKNqgegxS0AFFFFABRRRQAUUUUAFFFFABRRRQAU
UUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFBIAyTgCsifxBbCYw2UU17KvUQLkD8e
lAGvRSIxZFYqVJGSD2paACub8b+JB4c0fzItrXk5KQKegPdj7D/Cukry7xWn9s/FLT9LnOYItgK9iMbz+fSgCfwh4I/tEDW/Exe4
luD5iQyE8g/xP/hXosFtb20Qit4I4kHRUUAD8qkAAAAGBXC6d4pvtL8XXGieIZ45Yp5M2064AXJ4U47duehoA7pUVc7VAzycDrS1
FPc29uM3E8cQ9XcL/OiG5t7gZt54pR/sOG/lQBLXn/xH8OXt3dWesaNbNLdQnEojGWOOVOO+OlegUUAeXr8ULuCxuLfUNMMeoouI
yMhd3+0p5HrV34ceHbgzSeJNWDNc3GTCH64PVz9e3tW94z8LW3iDTJGWNVv4lJhlA5J/un1BrK+F2tzXumTaXeMTPYkBN3XYeMfg
ePyoA7qiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKK
KACiimyIJInjJIDKRkdRmgDAmeTXrmWJZTDpduSJXBwZiOoz6ClsLq4uGWLQ7OGGwjbBmkBG/wBdoHX61dk0dP7DOl20rRJtC78Z
JGcnP1qle+IbDSsWNvG0hiXZ8mAF9s+tAHK6/wCKdc1nxDJofhTKCElXmXGWI6nJ+6oPFVk8ReKvCGoxReJQ13Zyn7+Q3Hcqw7j0
NSfCJkbUdXZv9aQpyeuMnP613+u6Pa65pcthdr8rjKt3RuzCgC1Z3UF7aRXVrIJIZVDIw7g15p468zQvH2na8ULQPtLY9V4Yf98k
VF4X1u58FaxLoGvZW0LZSTshP8Q/2T+n51FbxXHxF8XySTM6aTaHgA9FzwB/tNjJNAHpd9Aut6G8VpevCl1GCk8J5APORXC63oWk
eCdIW/hha+1SSQJBLcfMFfruC9OP8K9Gt4Ira3jggRY4o1Coq9FA6CuT+Jem3F74fS6tFLS2Mon2gZyuOfy60AYkfhCxAt7rxnqs
0t/ek7IzLtXdjO3d/wDqFY+k6XpOrXwttEfUtK1VC/RvMiUr0y4wRmug8RahB4s8F295ZR/aHtpo5Lq3UZkUDhgO/wCPpVXQtK/t
PxBJP4b/ALQ0jRgi+a24r5zjoFB/Xr+tAGx4V8SajHqz+HPEqhb9B+5m7TD+vHQ96u6z480PSpXt/Oe6uUO0xQLuwfQnpWN43eKb
xx4dgsyDfRygvt6qm4EZ/JjS6h4avtE8XQa1oNot1b3MmLi3IB8sk8kE9B3z2+lAHdWdwt3Zw3Ko6LKgcK4wwyM4I9a828G4i+KO
sxQcRHzuB0++K7/XdWg0XSJ7+5YARr8o7u3YD8a4j4U2E0smoa7cg7rhiiEj73OWP54FAHo9FFFABRRRQAUUUUAFFFFABRRRQAUU
UUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFADJiwhcp94KcfXFcl4NgguJLuS4RZ
JRj74zgHOf1rsK4u4EnhvxCZ1Um0nJOB3U9R9QeaAOdsivhL4pSwP+7s70kL2AV+V/JuK9WriviBoS+INCj1LTsSXNqpdCv/AC0T
uPqOtWPh94mXXdJFvcv/AKdagLICeXXs3+PvQA34mWFnP4VuLueBWuLfHkydCuWA/L2pPhdaR2/hCKZQN9xI7ufXBwP0Fb/iHTf7
Y0K80/IDTRkKT2bqP1ArB+G9lq2m6LNZarbGBYpj5O48kHr+Gen1oA6+ggEYNFFAHF6t4Bie+OoeH76TS7snJEedhP0HT+VVjonj
9h5B8QWwj6eYBhv/AEHNd7RQBzHhfwdb6HO99c3D3uoyZ3Tyds9cf4109FFAHlfxYiu11ewmupXfTGGFjU42sD834kdDXpWlwWtv
pltFYIEtljXygP7uMiuT+LMaN4TV2A3JcptP1Brd8Hu0nhLS2fkm2T+VAGzRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUU
AFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAVWv7GDULVre4XKnoR1U+oqzRQBxSSaj4WuSki+dZ
O3H90/T0PtXK69GNF1mPxP4bfEJfM8GMGJj1BH91v5/hXrsscc0bRyorowwVYZBrl9W8HxTK5sGChgQ0Mn3SPQHtQBpaV4k07UtB
OrrMscEa5mDHmIjqD/nmrOjavY63Yi806XzItxU5UggjsRXiOv6VqXh2WSzJmitrz+DPD4OcH1we9e0eGNKi0bQLSyjXDKgaQ/3n
PJNAGrRRRQAUUUUAFFFFAHmvxW1H7XNY6BaZknaQSOq9ieFH6k13+lWY0/SrWzHSCJY/yFcJ8U9GEKQeIrMmO5idUlZT1/ut9QeP
yrtfD2o/2toNlfnAaeIFsf3uh/UGgDRooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooA
KKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigDzD4rZGuaKW/1fP/oQzXpy4IBHTtXC/FnTHutBgvolJazky+OytwT+YFdH
4T1P+1/DdleEEM0e18/3l4P8qANiiiigAooooArajfW+m2E17dvshhUsx/w968zOveL/ABldSJoKNZ2SHG5Ttx/vP6+wrqfibb3F
x4On+zgt5ciSSAd0B5/ofwqD4d65pEvh610+KaKC6hXbJE5Clmzyw9c0Ac3efD3xPPaO0+sJcSEZ8lpnIb8TxWr8M9ddQ/hq/i8m
4tN3l5GCQD8yn3BP5V6CzKqFmYBQMkk8AV5Xpk0er/F57zTObeMszyL0YBNpP4mgD1WiiigAooooAKKKKACiiigAooooAKKKKACi
iigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKAGyIkiMkiK6MMFWGQR9KSKK
OGJYoY1jjUYVVGAPoKfRQAUUUUAFFFFACMoZSrAEEYIPeuJ1r4aaTfyvPYSyWMrHO1BuTP07fga7eigDy9vhtrxUwHX1NueCpaTG
P93pXaeF/DFj4aszFbZknkwZZmHL+3sPatyigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiii
gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAoo
ooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK
KKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKA
CiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiii
gAooooAKZPNHbwSTTOEjjUszHoAOSafWR4tt5rrwrqcNsC0rW7bQOp4zj8qAPPdQ8a+IvEeqNY+GIpIoudvlqPMYf3mY8KPy+tJN
p/xIsIzdfabuXb8xVbhZSP8AgPf8Ki+F2vaZpVxeW2oSJbtc7DHM/C8Z+Unt1zXrcM0U8YkgkSRD0ZGBH5igDiPAvjmTWbn+zNWV
EvcExyKNolx1BHZu/wCddJ4n16Pw7pQv5YHmXzFj2owB5zzz9Kz/APhB9LXxH/bkU11Hced52xWUJu78Yzg/XvXH/EvUtdke5sJ7
DZpUcyGK48phuO3+9nB5J/KgD0Lw3rcfiDSU1CKF4VZ2XYxBPB9q1a8j8Cax4lt4LKysdM83THuQHn8hmwCw3fMDjiu+8W+J7bwz
p6zSoZriUkQwg43EdST2AoA3qK8qi8W+OtRiN5Y6Xm2PK+XallI9iTk/hWz4P8ftq2oLper26W92xKo6AhWYfwkHoaAO8orkvH/i
W+8N2tnLYJAzTOyt5qk8AA8YIrnLjx9r1/b28OhWHnzrCrXMkUDSAOR90DsB70Aej6neLp+mXV66F1t4mlKg4JwM4rF8JeLYPFBu
hDaSQfZtmd7A53Z9PpWB4w1jxGmgW0cOn+ZDdaduvZDA37pivzd/lxz1ri/Bmq+INMN5/YGn/a/M2eb+5aTbjOOh46mgD3aq2oyP
Fpt1JG210hdlI7EKafZPLLYwSXCbJnjVpFxjaxHIx9ai1b/kE3n/AFwf/wBBNAHC/DLXtV1i/vo9SvZLhY4lZQwHBJ9hXoteH+Bv
ENv4cGpXUqGWZ4kSGEHBds/yrYu/Gfje2T7dPpYgtTyN9owQD3JOaAPWKK5nwZ4vg8TW8iNGIL2EZkiByCP7y+38qxvGXjPVND8S
RafZx2zQvGjEyIS2SSD0I9KAO/oorgPHPjPVPD+vQ2VjHbNE8CyEyoSclmHYj0oA3/G76zH4ekbQBIbneu7yhlwnOdvv0/Wl8Evr
L+HYm14SC63tt8wYcp23D16/pSeNdautC8Ptf2SxNKJEXEikjB+hFL4K1m617w+l9erEsrSOpEYIGAfcmgDkPAviLWNS8YTWd9fy
TW6pKQjAYBBGOgr06vHfht/yPtx/1zm/9CFeg+MPFVt4ZskZk866mz5UOcZx1JPYfzoA6GuA+KGuapoz6aNMvHt/NEm/aB82NuOo
9zWNaeM/G18DeWemCa2B6R2jMh9s5z+tZXjnxJD4jstLlEZguYDKk8JP3T8uCPY4P5UAenm41SfwNHc2DeZqUlijoxAyzlQSfTPX
8azfh5L4jktLv/hIRcbA6+QbhcP33e+OlXY72XTfh5Be24UywaajqHGRkIOtU/h/4mv/ABJb3sl+kCmB0C+UpHUHrkn0oA6+ivNr
r4hXlh4vubG9S3Gn280iMyxkyEAHAHOMk4FVb3xn4ynja+stHe3sfvK32Zn+X1LH+YGKAPU6K4nwN45OvznT9QijivApZGjyFkA6
8Hoe9b3ifxDa+HNMN3cgu7HbFEpwXb+g9TQBsUV5Nb+N/GWqyvNpenK8KHlYrZnA9i3rVu++Jd5HpC7LSK21WKcJPBMjEbcH5gMg
jkDg9KAPTqKyfCupT6v4cs9QuggmnUlggwvDEcflWpLv8pvKID4O0sOM9qAHUVwngjxlqOta5c6bqsVvG8cZZfKUg7lYAg5J9f0r
b8ba9L4e0Bry2EbXDyLHGJBkZPJ4HsDQB0FFcl4A8U3PiS1u/tywrcW7rxECAVI44JPcGmfEDxXdeG47JLBYXmnLFhKpICjHoR3P
6UAXvHL61H4eZtAEhuPMXf5Qy4TnO33zj8Kk8FvrD+HYW14OLrc2PMGH2dtw9f8A61Z+v+IdU0jwTaaqUt/t0vl+YrIdg3AkjGf6
1DZeJNbv/AX9r2dpFPqJlKrFHEzAgNg8Zz096ALEfji3fxWdA+wyiQTmHzd4xkd8V1MzFYHZTghSR+VeBR3+rr4yN8lnnVftBf7P
5Z+/zkbc5r17wxqGsajodzNrtn9kuFdlVPLKZXaOcE+pNAHL/DPxDq+r61dQ6lfSXEaW5dVYDg7gM8D3r0qvC/Auuw+H7y+vJUMs
jW/lwxL1kcuuBW5f+M/G1ov2y50wW1sTwHtWCj0BJOaAPWKK5zwb4rh8TWTsYxDdwECWIHI56MPb+VdHQAUUUUAFFFZHiqbUrbw7
d3Gjvtu4VDr8gbIB+YYPtmgDC8QfDjS9Vne5s5XsZ3JLbFDIx9dvb8DXJ3Hw88TaUxn0q7SUryPIlMT/AJHH863vBPj+O8SS18Q3
kcdzvzFM4CIy+mRwCD6+tdnPrWlW8Jmm1G0SMDO4zL/jQB554O8canDrEejeIC0m+TyVkkXEkb5wA3qM8c81vfFf/kUB/wBfKfyN
cNdTJ4o+JUc2lxnypbiMhsYJVAMv7cKTXc/FZSfB+QOlyhP60ASfC3/kTYf+u0n86t+L9P8ADMqRX3iVgqxgpGTKy574CqeTVD4V
3EL+ElhWRDJHM4dc8jJyOK5D4nSNJ41hhvXdbVI49uOyE/MR79fyoA6pviZ4bto1ighvHRAFUJEAAB0AyRXCXGq22rfEW21KwieG
Oa8gIVwAc5UE8epFer2ln4XstNWa3h0xLVVyJSEII9Sx615Tf6ha6n8R4LqxQLbNeQrHhdoIUqM498ZoA634xf8AIP0z/rq/8hXS
eAbWG18Haf5KBTLH5rkdWYnqf5fhXN/GL/kH6Z/12f8AkK6rwV/yJ+lf9e60AT+Kv+RV1b/r0l/9BNcL8Gvvav8ASH/2eu78UKW8
L6qqjJNpLx/wE15/8HbiGO51OB5FWSRY2RScFgN2cfmKAPVKqat/yCbz/rg//oJq3VTVv+QTef8AXB//AEE0AeS/CrTYL3xHJc3C
BxaRb0BGfnJwD+HNexyIkkbRyKGRgQysMgj0NeH/AA912HQtfL3h22twnlSPjhDnKk+3H617Be6/pNlYteT6hb+SFyCsgYt7ADqa
APK9Aj/sP4qfY7c4hFy8AH+wwOB/L8qk+J3/ACPFt/1wi/8AQjTPBizeIPiK+qFCsaSPcv8A7IOQo/Mj8jUvxYikg8U2t1j5Ht12
ntlWOR+o/OgD2CvHvix/yN1r/wBeqf8AobV6bp/iDStQ0+O9hvrcRsoZg0gBT1DA9CK8f8eazb634qM9m2+CFFhR+z4JJI9sk0Ae
hfFL/kTW/wCu8f8AWl+Fv/InRf8AXaT+dJ8Uv+RNb/rvH/Wl+Fv/ACJ0X/XaT+dAHHfDf/kfbj/rnN/6EKZ41B1f4mJp8jHy/Mht
h7KcE/qxp/w2/wCR9uP+uc3/AKEKPiJBPo/jqHVkQlZTHPGexZMAj9B+dAHr0EMVvAkECLHFGoVFUYCgdBXkvxb02C11m1vYUCNd
xt5mB1ZSOfrgj8q9K03xDpOpWCXdvfQBCuWDyBWT2YHpXk/xJ1+31vWYksW8y1tEKCUdHYnLEe3QUAegXv8AySs/9gpf/RYrD+Dn
/Hnqn/XSP+TVuXv/ACSs/wDYKX/0WKw/g5/x56p/10j/AJNQBzwtIb74tSW1wgeJr9yynocZOD+Ve0dBXj1j/wAlkb/r+l/k1exd
qAPGtPhSy+LoitwERb1wqjgAEHj9am+LNxJceJbSzB+SK3UqP9pmOT+gpsf/ACWM/wDX8f8A0Grfxd02WPULLVUU+W8fksw/hYEk
fmCfyoA9M0uwg0vToLK2QLFCgUYHX1P1PWvPfjBp0CxWOpogWZnMLsP4hjIz9MH866vw74t0vV9Lime9ghuAg86KSQKVbv16j3rg
vif4jtdWmt7DTpBNBasWklTlS54AB74Gfz9qAO7+H3/IkaZ/uN/6G1dHXOfD7/kSNM/3G/8AQ2ro6APJbhf+Ef8Ai/G4+WG5nDex
Eowf/HifyrQ+JrvqfiDRtBhPLsGYD1Zto/IA0nxds2ifTdWh4ZGMLN6H7y/+zVF4YnHif4lz6wATBbQ7kyOh2hQPzLGgA8Mxr4d+
KN7pSjZb3SsIl7YI3r+mRVbxoDrvxKs9KBykflxMPQH52P5H9Kv/ABJjOl+JdF1+MYCuFkI/2Gz+oJH4VW8BL/bfj7VNbOTHGXdC
R0LnC/8AjoNAG/8AFUAeD8AYH2iP+tTfC/8A5Eu3/wCusn/oVRfFb/kUP+3mP+tSfC//AJEu3/66yf8AoVAHFW//ACWI/wDX+/8A
I17Bcf8AHtJ/uH+VeO+bHa/F1pLh1jQX5yzHAGen8xXsMxDWshBBGw9PpQB498KLKG68UvLMgY21u0keR0bIGfyJr2G5t4ru2ltr
hA8UqlHU9CCMGvJvg/8A8jDef9eh/wDQ1r16gDx34WlrfxlcQKx2mCRT74Yf4V7FXjvw2/5Hy4/65Tf+hCvYqACiiigAooooA4zX
Phxo+qXD3Ns8ljM5y3lAFCfXaen4EVjx/CWMPmXWXZPRbcA/nur0uigDF8O+FtL8OxsLCJjM4w80hy7D09h7Cr+qadbatp01jepv
gmXDAHBHcEe4PNW6KAPPrH4YQ2Or217Dq0hSCZZRG0IydpBxkH29K6XxN4W07xJAi3geOaP/AFc0f3l9vce1blFAHndp8KLGO4D3
epTTxA58tYwmfqcmtS68AWE2vQanDcSQLA0RSBEG0BMYHr2rsKKAMDxX4Xh8TQW8U9zJAIGLAooOcjHetPSNPTStKtrCORpFt4wg
ZhgnFXKKAEdFdGR1DKwwQehFee3/AMKrKa5aSx1KW2jY5EbRiTb7A5HFeh0UAQ2cH2Wygtg2/wAmNU3YxnAxmluoRc2ssBYqJUZC
R2yMVLRQBxulfDvTLG3vLe4nlu4rpFUh1ClCDkMCOhrKb4TWpnJXV5hD2Uwgt+ecfpXo9FAGXoGg6f4fsvs2nxEBjl5GOXc+pNJ4
h8P2HiKx+y36N8p3RyIcMh9R/hWrRQB5xH8JrUTgy6vM0WeVWEK355P8q0tU+HGmX0tsbe4ltI7eERKiKDnBJ3EnqSTXa0UAZPiT
Q4vEGknT5p3hUur7kAJ4+tL4b0SPw/pK2EMzzKrs+5wAefpWrRQBynh/wRbaHrb6nFezSu6suxlAHzHPatrW9EsNdsTaajFvTOVZ
ThkPqD2rRooA83/4VNa+fkavN5Ofu+SN355x+la2pfDrSruws7O2mltY7XecqAzSM2Mlie/y12VFAGZNo0cvhr+xDM4j+zC38zA3
YC4zjpVLwn4Wg8MRXMcFzJOLhlJ3qBjGfT610FFAHKQ+CLaHxYdfF7MZTM0vlFRtyQeM9e9dX2oooA5RfBFsviv+3/ts3m+cZfK2
jbnGMZ61Y8Ya1o2m2iWmu2001vdqQAse5TjHfIweQa6OqOr6TY61YtZ6hCJImOR2Kn1B7GgDgrP4feG9ZiW90vVbg20nzbAVYp7H
IyD9ayvH0Gh6NpVpoejlWmWbzp2Dbm4Ugbj689O1a1z8J4/NJstYkjQ/wyQ7jj6gjP5VpaF8NNL025S5vp3v5EOVVlCJn1I5z+Jx
QBueCbaSz8IaZDMpVxDuIPUbiW/rW5RRQBl+I9Eg8QaS+n3EjRhmVw6gEqQff8R+NU/CfhS28MR3K288k7XBUszqAQBnA4+proKK
AMfxP4ft/EemCyuJGiCyCRXQAkEZHf2JqLwp4YtvDNrPDbzPMZnDs7gA8DAHH4/nW7RQBkeJtCi8RaV9gmneFfMV9yAE8Z9frT/D
mix6BpCafDM8yIzNucAHk57VqUUAcj4o8BWHiC9N8tw9pdMAHZVDK+OASOOfxq/4V8NDw7pE1h9rNz5shkL7NuMqBjGT6Vv0UAct
4V8FW3hq/lu4LyadpYvLKuoAHIOePpXU0UUAcp4f8EW2ha0+pxXs0rurLsZQB8xz2rq6KKACiiigAooooAKKKKACiiigAooooAKK
hvLuCxtJbq6kEcMKl3Y9gK80uviHrmrX7W/hrTMoM4JjMkhHqQOFoA9Rory23+IWvaRfLB4l0zCN1xGY3A9Rng/55r0qyvYL+xiv
LSQSQzJvRh3FAFiivLdG+J10Zbp9Yit/KihLRJCpVpJNwAXJJ4wSfwqK78ZeNljN+NK8iz6jNqxUD1JPP48UAer0VyXgrxrD4kD2
1xEtvfRruKKcrIvqv+Fa/iTxBaeHdNN3d5dmO2KJTzI3p7D1NAGtRXlUPjDxtq+660rSx9mB48u3Lj6bieT9K1/CvxAe+1FdK122
W1u2bYjgFQW/usp5U0Ad9RXN+Otdu/D2hpe2KxNI06xkSqSMEE9iPSuWi+Ier32m29vpenLd6q4ZpvLiYpENxAwM8nGD1xQB6bRX
kqfELxLpOoLHrlguw8tE8JifHqp//XXqWn3sGpWEF7atuhnQOh9j6+9AFiiivOdT8f3um+MptOuFtl0+GYK7eWS+3GT36/hQB6NR
XleoeNfF8kbahaaS1tp33lZrdnG31LH+YwK0rD4mwSaBLPdWw/tKNhGlvGTiUnoR3A4569vWgD0KivJr7xr41sdt5eaatvbMeBJa
sF+mSc/rXeeEfEsPiXSzcJH5U8TbJos52nsQfQ0AbtFcn4y8bW/hsrawRC5vnXdsJwqD1Y/0rlB4t8dm3+3jS/8ARsbs/ZG249eu
ce9AHq9Fcj4N8cW/iNzaXEQtr5VLBAcrIB1K+/tWz4j1608PaW17d5Y52xxqeZG9P/r0Aatcl8SdUvtJ8PQ3GnXLQStcqhZQORtY
45+grkoPHPjDVpnk0nTleJDysVuZAPYt6/lVfxZ4sOu+GBY39sbTU7a6QyREEBhtbkA8jqOD60AeheBL661LwlZ3d9M008hfc7Yy
cOQOnsK6CuW+Gv8AyI9h9ZP/AEY1Y3iX4hTQ6k2leHLVbq4VijSFS4Ldwqjr9aAPQqD0ryuTxn4z0YpPrGlg27HB8yAp+G4dD9a9
B8P65aeINLS+syQD8ro33o27g0AcKviLWD8T/wCzDfyfYvtZTycDG3HTpmvTR0rx5P8Aksf/AG/H/wBBr0Lxb4nt/DOnJNJGZriY
lYYgcbiOpJ7AUAb9FeXQ+JvH2owfbrLS0+zNyuy3yGHtk5P4VueDfHX9t3h0zU7dba/AO3bkK5HUYPII9PrQB2tFFFABRRRQAUUU
UAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAcH8XLuSHw9bWqHAuLj5/cKM4/PH5Vo/DbTobLwjbSoo826zLI2OTyQB+AH86rfFLTJ
b7wwLiBSzWcolYAZ+TBBP4cH8Kr/AA08SWU2gxaVcXEcV1a5VVdgvmITkEZ64zjHtQBqfEXT4b7whdvIgMlsPOjbHKkHn8xmsf4R
3ckug3tq7ZW3mynsGHT8wfzqf4k+JLK30CbTILiOW7ugE2IwbYucknHTpj8ad8LNMlsvDMl1MpVryTegIx8gGAfx5oA4f4babDqP
i5PtCB0to2n2sMgkEAfqc/hXtxAKkEZB6g14N4J1qPQvE0d3cZ+zuGimYDO1T3/AgV7TLrukw2RvH1K1Fvt3bxKCCPbHX6UAeUPC
nh/4sRw2g8uIXiBVHQLIBkfTDEVN8VrszeKoLWVm8i3hXhevzHJP1xj8qh0d38VfE5b6KNhELgXByPuxpjbn8lH41ofFaxmtNfs9
XRN0UiKpJGQHQ5wfqP5GgDWtvib4ftLaO2t9PvkhiUKihEwAP+BVxnjbX9O17VLfUNMgngmVNsrSBQWIPyngnn/61esaNfeH9YsI
7q1Sy+ZQXQqgaM9wRWDrnjDQtN1NLCy0uDUZW4byQmAxOAucHJ+lAEPxHuDd/D/T7liCZpYZDj3jY1f+FlpDB4RjuEQCW5kdpG7n
BKgfp+tV/iqMeDrcbBHi5j+QdF+VuK0Phn/yJFl/vSf+hmgDP+LdtFJ4ZhuGUeZFcqFbuAQcj9B+VXvhi7N4JtAxzteQD6bzVf4r
/wDIoD/r5T+Rqb4X/wDIlW3/AF0k/wDQjQB1x6V4xqVtFd/FtredQ8T3qBlPQjA4r2c9K8euP+SyD/r+T+QoA9fZVZCjKCpGCCOC
K8a+H9pA3xAZGjBW381owecEHA/LNez9q8f+Hv8AyUO5/wB2f/0KgD07xHDHceHNSilUMhtZOD6hSQfzFeffBtj9p1Vc8FIjj8Wr
0XXf+QDqP/XrL/6Ca85+Dn/H3qv/AFzi/m1AGZoUS+IPifJJegSR+fJKVIyCEztH04X8q9nrxdpG8HfEt57pW+zGZmzjrFJnkeuM
/pXrQ1jTDZ/bBqFr9n27vM81cYoA8m8TRJoHxLjnsgI1MsU4UDgbvvD6Hn86ufGC6d9asbPPyRW5kA92Yj+SiqU0p8ZfEmN7NWNt
5qYbHSJMZY+mefzFavxg0+QXVjqaqTGUMDn+6QSw/PJ/KgD0XRNOh0rSLayt0CpFGAcfxHHJPuTXC/F/TYfsNnqioBMJfIdh1ZSC
Rn6YP510/hfxRp2saPBIbqGO5RAs0TuFZWA5OD2PXNcR8UvEVpqIt9LsJVnSB/MmkQ5UNjAUHv1NAG54au3sPhI11EcSRQTlD6He
2P1rivAviLSvDlzc3WoW1xNcSKEiaNVOwfxdSOvH5V3XhCyOpfCxbEHDTwzop9CXbH61y3w41Gw03UrzS9bjhiaVgEadR8jrkFST
0z/SgDb1D4k+H9QsJ7O4sL5op0KMCid/+BVl/B+6ddWv7Pd8kkAlx7qwGf8Ax6u+1S78P6VYvd3a2Soq5UBEJc+gHc1n+DPENt4g
e5ktNHFmkICmUFfmJ/h4A9M/lQBxKf8AJY/+34/+g1vfFfRb2+trPULSJ5ktgyyooyVBwQ2PTjn8KwU/5LH/ANvx/wDQa7fxZ4yH
hi6t4ZdOknSdCyyLIFGQcEdPp+dAHP8Ah74mWEGn21nqlpNE8Max+ZCAykAYzjgj9a3tItvCOt6yda0xklv1bzWIkdWBxjJQ/wCF
XTpvhnxHZLetZ2dxHKu7zQArD6kYINeX20EOm/Ey3t/D87SwJdoisrbvlON657gfMPwoA9vooooAKKKKACiiigAooooAKKKKACii
igAooooAKKKKACiiigBGVXUqwBUjBBHBrhtX+GGlXtw01jcS2Jc5MaqHQH2HBH513VFAHB6T8L9Ks7hZr+5lvdpBEZUIhPuOSfzr
ugqpFsRQqqMAAYAFOoIyCKAPFfhpY22p67f2V7EJYJbJwyn/AH059j710k3wntGuC0GqzRw54RogzD8cj+VbPhXwPD4b1SS+jv5L
gyRGLa0YXGSDnr7V1tAGP4c8Nad4ctmisUYySY8yaQ5d/wDAe1XtS0601Wyks7+FZoJByp/mD2PvVqigDzm6+E9m8pa01WaKMnhZ
Ig5H45FbnhvwHpWg3C3eXu7tfuySgAJ7qo6H35rqqKAMfxRoEXiPTEsZp3hVZRJuQAngEY5+tS+HtHj0HR4dOileVIixDsACcknt
9a06KAMjxNoUXiLSxYTTvCvmCTcgBPGfX60/w5osegaRHp8MzzIjMwZwAeTntWpRQAVykngi2fxYNfN7MJfOEvlbRtyB0z1rq6KA
CuV0LwTbaLrsmqxXs0ruHBRlAHzHPauqooAhvbcXdjcWrMVE0bRlh1GRjP61geE/CFv4YluZILuWc3CqCHUDGM+n1rpaKAMjxD4b
03xFbrFqER3p/q5UOHT6H09jXG/8Klt/Nz/bEvl/3fIGfzz/AEr0migDH8O+GdM8OwNHYRkyP/rJpDl3/HsPYVf1GwtdTspLO+hW
WCUYZT/Meh96s0UAec3Hwns2nLW2qzRxE/ceIOQPrkfyrUl+HWlf2ENMglmiJlWWSfAZ5CAQAewHzHgV2VFAGb4f0iPQtGh02KVp
Uh3YdgATlie31rI8SeBdK1+c3TF7W7b70sQGH/3gev1rqaKAPObb4T2iTBrnVZpY8/dSIISPqSf5V3emabZ6TYpZ2EKwwp0A7n1J
7n3q3RQByg8EWw8V/wBv/bZvN87zfK2jbnHTPWtrW9EsNdsTaajFvTOVYHDIfUHtWjRQB5vJ8J4PMPk6zMkZ/haEE4+oI/lXSeGf
Bel+HXM8Iee7Ix50uMqO4UDgfzrpKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiig
AooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooo
oAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKK
KKACiiigAooooAKKKKACiiigAooooAKKKKAP/9k=`

73
video/video.go Normal file
View File

@@ -0,0 +1,73 @@
// Package video extracts frames and audio from video files.
//
// Video files are decomposed into JPEG image frames and a WAV audio track.
// The frames and audio can then be fed into a multimodal model's vision
// and audio encoders respectively.
//
// Platform-specific implementations:
// - macOS: AVFoundation (system framework, zero external deps)
// - Windows: Media Foundation (pure Go via syscall, zero external deps)
// - Linux: shells out to ffmpeg (must be installed)
package video
import (
"fmt"
"net/http"
"strings"
)
// Result holds extracted frames and optional audio from a video file.
type Result struct {
Frames [][]byte // JPEG-encoded image frames in temporal order
Audio []byte // WAV 16kHz mono audio (nil if no audio track)
}
// Options controls frame extraction behavior.
type Options struct {
MaxFrames int // Max frames to extract (0 = default 16)
ExtractAudio bool // Whether to extract the audio track
}
// Extract reads a video file and returns extracted frames and audio.
func Extract(path string, opts Options) (*Result, error) {
if opts.MaxFrames <= 0 {
opts.MaxFrames = 4
}
if opts.MaxFrames > 64 {
opts.MaxFrames = 64
}
return extract(path, opts)
}
// IsVideo returns true if the content type indicates a video file.
func IsVideo(data []byte) bool {
if len(data) < 512 {
return false
}
ct := http.DetectContentType(data[:512])
return IsVideoContentType(ct)
}
// IsVideoContentType returns true if the MIME type is a video type.
func IsVideoContentType(contentType string) bool {
return strings.HasPrefix(contentType, "video/")
}
// VideoExtensions lists recognized video file extensions.
var VideoExtensions = []string{
".mp4", ".webm", ".mov", ".avi", ".mkv", ".m4v", ".wmv", ".flv",
}
// IsVideoExtension returns true if the extension (with dot) is a video format.
func IsVideoExtension(ext string) bool {
ext = strings.ToLower(ext)
for _, v := range VideoExtensions {
if ext == v {
return true
}
}
return false
}
// ErrFFmpegNotFound is returned on Linux when ffmpeg is not installed.
var ErrFFmpegNotFound = fmt.Errorf("video support requires ffmpeg; install it with: sudo apt install ffmpeg (Debian/Ubuntu) or sudo dnf install ffmpeg (Fedora/RHEL)")

118
video/video_darwin.go Normal file
View File

@@ -0,0 +1,118 @@
package video
/*
#cgo LDFLAGS: -framework AVFoundation -framework CoreMedia -framework CoreGraphics -framework CoreVideo -framework Foundation -framework ImageIO -framework UniformTypeIdentifiers
#include <stdlib.h>
#include <stdint.h>
// Extract frames from a video file using AVFoundation.
// Returns JPEG data for each frame concatenated, with offsets/sizes in the out arrays.
// Audio is extracted as 16kHz mono PCM (int16).
int extract_video_frames(
const char* path,
int max_frames,
int extract_audio,
// Frame output: caller provides buffers, function fills them
uint8_t** frame_data, // out: array of frame JPEG pointers (caller frees each)
int* frame_sizes, // out: array of frame JPEG sizes
int* num_frames, // out: actual number of frames extracted
// Audio output
uint8_t** audio_data, // out: PCM int16 data (caller frees)
int* audio_size // out: PCM data size in bytes
);
void free_ptr(void* p);
*/
import "C"
import (
"bytes"
"encoding/binary"
"fmt"
"unsafe"
)
func extract(path string, opts Options) (*Result, error) {
cPath := C.CString(path)
defer C.free(unsafe.Pointer(cPath))
maxFrames := C.int(opts.MaxFrames)
extractAudio := C.int(0)
if opts.ExtractAudio {
extractAudio = 1
}
// Allocate output arrays
frameData := make([]*C.uint8_t, opts.MaxFrames)
frameSizes := make([]C.int, opts.MaxFrames)
var numFrames C.int
var audioData *C.uint8_t
var audioSize C.int
rc := C.extract_video_frames(
cPath,
maxFrames,
extractAudio,
(**C.uint8_t)(unsafe.Pointer(&frameData[0])),
(*C.int)(unsafe.Pointer(&frameSizes[0])),
&numFrames,
&audioData,
&audioSize,
)
if rc != 0 {
return nil, fmt.Errorf("video extraction failed (code %d)", rc)
}
result := &Result{}
// Copy frame data to Go slices and free C memory
for i := 0; i < int(numFrames); i++ {
if frameData[i] != nil && frameSizes[i] > 0 {
size := int(frameSizes[i])
data := C.GoBytes(unsafe.Pointer(frameData[i]), C.int(size))
result.Frames = append(result.Frames, data)
C.free_ptr(unsafe.Pointer(frameData[i]))
}
}
// Copy audio data and wrap in WAV header
if audioData != nil && audioSize > 0 {
pcm := C.GoBytes(unsafe.Pointer(audioData), audioSize)
C.free_ptr(unsafe.Pointer(audioData))
result.Audio = wrapPCMAsWAV(pcm, 16000, 1, 16)
}
return result, nil
}
// wrapPCMAsWAV wraps raw PCM int16 data in a WAV header.
func wrapPCMAsWAV(pcm []byte, sampleRate, channels, bitsPerSample int) []byte {
var buf bytes.Buffer
dataSize := len(pcm)
fileSize := 36 + dataSize
// RIFF header
buf.WriteString("RIFF")
binary.Write(&buf, binary.LittleEndian, int32(fileSize))
buf.WriteString("WAVE")
// fmt chunk
buf.WriteString("fmt ")
binary.Write(&buf, binary.LittleEndian, int32(16)) // chunk size
binary.Write(&buf, binary.LittleEndian, int16(1)) // PCM format
binary.Write(&buf, binary.LittleEndian, int16(channels))
binary.Write(&buf, binary.LittleEndian, int32(sampleRate))
byteRate := sampleRate * channels * bitsPerSample / 8
binary.Write(&buf, binary.LittleEndian, int32(byteRate))
blockAlign := channels * bitsPerSample / 8
binary.Write(&buf, binary.LittleEndian, int16(blockAlign))
binary.Write(&buf, binary.LittleEndian, int16(bitsPerSample))
// data chunk
buf.WriteString("data")
binary.Write(&buf, binary.LittleEndian, int32(dataSize))
buf.Write(pcm)
return buf.Bytes()
}

154
video/video_darwin.m Normal file
View File

@@ -0,0 +1,154 @@
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreMedia/CoreMedia.h>
#import <ImageIO/ImageIO.h>
#import <UniformTypeIdentifiers/UniformTypeIdentifiers.h>
#import <Foundation/Foundation.h>
#include <stdlib.h>
#include <stdint.h>
void free_ptr(void* p) {
free(p);
}
// Convert a CGImage to JPEG data.
static NSData* cgImageToJPEG(CGImageRef image) {
NSMutableData *data = [NSMutableData data];
CGImageDestinationRef dest = CGImageDestinationCreateWithData(
(__bridge CFMutableDataRef)data, (__bridge CFStringRef)UTTypeJPEG.identifier, 1, NULL);
if (!dest) return nil;
NSDictionary *props = @{(__bridge NSString *)kCGImageDestinationLossyCompressionQuality: @(0.85)};
CGImageDestinationAddImage(dest, image, (__bridge CFDictionaryRef)props);
CGImageDestinationFinalize(dest);
CFRelease(dest);
return data;
}
int extract_video_frames(
const char* path,
int max_frames,
int extract_audio,
uint8_t** frame_data,
int* frame_sizes,
int* num_frames,
uint8_t** audio_data,
int* audio_size)
{
@autoreleasepool {
*num_frames = 0;
*audio_size = 0;
*audio_data = NULL;
NSString *filePath = [NSString stringWithUTF8String:path];
NSURL *fileURL = [NSURL fileURLWithPath:filePath];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:fileURL options:nil];
// Get video duration
CMTime duration = asset.duration;
if (CMTIME_IS_INVALID(duration) || CMTimeGetSeconds(duration) <= 0) {
return -1;
}
Float64 durationSecs = CMTimeGetSeconds(duration);
// Create image generator
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc]
initWithAsset:asset];
generator.appliesPreferredTrackTransform = YES;
generator.requestedTimeToleranceBefore = kCMTimeZero;
generator.requestedTimeToleranceAfter = kCMTimeZero;
// Calculate frame times evenly spaced across duration
int frameCount = max_frames;
if (durationSecs < frameCount) {
frameCount = (int)durationSecs;
}
if (frameCount < 1) frameCount = 1;
// Extract frames using synchronous API.
// Note: copyCGImageAtTime: is deprecated in macOS 15 in favor of the
// async generateCGImagesAsynchronouslyForTimes:, but the async API
// is incompatible with CGo (callbacks on arbitrary threads). The sync
// API remains functional and is the safest approach for CGo callers.
int extracted = 0;
for (int i = 0; i < frameCount; i++) {
Float64 t = (durationSecs * i) / frameCount;
CMTime requestTime = CMTimeMakeWithSeconds(t, 600);
CMTime actualTime;
NSError *error = nil;
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
CGImageRef cgImage = [generator copyCGImageAtTime:requestTime
actualTime:&actualTime
error:&error];
#pragma clang diagnostic pop
if (!cgImage) continue;
NSData *jpegData = cgImageToJPEG(cgImage);
CGImageRelease(cgImage);
if (!jpegData || jpegData.length == 0) continue;
uint8_t *buf = (uint8_t *)malloc(jpegData.length);
memcpy(buf, jpegData.bytes, jpegData.length);
frame_data[extracted] = buf;
frame_sizes[extracted] = (int)jpegData.length;
extracted++;
}
*num_frames = extracted;
// Extract audio if requested.
if (extract_audio) {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
NSArray<AVAssetTrack *> *audioTracks =
[asset tracksWithMediaType:AVMediaTypeAudio];
#pragma clang diagnostic pop
if (audioTracks.count > 0) {
NSError *error = nil;
AVAssetReader *reader = [[AVAssetReader alloc]
initWithAsset:asset error:&error];
if (reader) {
NSDictionary *settings = @{
AVFormatIDKey: @(kAudioFormatLinearPCM),
AVSampleRateKey: @(16000),
AVNumberOfChannelsKey: @(1),
AVLinearPCMBitDepthKey: @(16),
AVLinearPCMIsFloatKey: @(NO),
AVLinearPCMIsBigEndianKey: @(NO),
};
AVAssetReaderTrackOutput *output =
[[AVAssetReaderTrackOutput alloc]
initWithTrack:audioTracks[0]
outputSettings:settings];
[reader addOutput:output];
if ([reader startReading]) {
NSMutableData *pcmData = [NSMutableData data];
CMSampleBufferRef sampleBuffer;
while ((sampleBuffer = [output copyNextSampleBuffer])) {
CMBlockBufferRef blockBuffer =
CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length = CMBlockBufferGetDataLength(blockBuffer);
uint8_t *tmp = (uint8_t *)malloc(length);
CMBlockBufferCopyDataBytes(blockBuffer, 0, length, tmp);
[pcmData appendBytes:tmp length:length];
free(tmp);
CFRelease(sampleBuffer);
}
if (pcmData.length > 0) {
*audio_data = (uint8_t *)malloc(pcmData.length);
memcpy(*audio_data, pcmData.bytes, pcmData.length);
*audio_size = (int)pcmData.length;
}
}
}
}
}
return 0;
}
}

View File

@@ -0,0 +1,48 @@
package video
import (
"os"
"testing"
)
func TestExtractRealVideo(t *testing.T) {
// Skip if test video doesn't exist (CI environments)
testVideo := ".tmp/test_video.mp4"
// Try repo root
if _, err := os.Stat(testVideo); err != nil {
testVideo = "../.tmp/test_video.mp4"
if _, err := os.Stat(testVideo); err != nil {
t.Skip("test video not available")
}
}
result, err := Extract(testVideo, Options{MaxFrames: 2, ExtractAudio: true})
if err != nil {
t.Fatalf("Extract failed: %v", err)
}
if len(result.Frames) == 0 {
t.Fatal("no frames extracted")
}
if len(result.Frames) > 2 {
t.Errorf("expected at most 2 frames, got %d", len(result.Frames))
}
// Verify frames are valid JPEG
for i, frame := range result.Frames {
if len(frame) < 2 || frame[0] != 0xFF || frame[1] != 0xD8 {
t.Errorf("frame %d is not valid JPEG (first bytes: %x)", i, frame[:min(4, len(frame))])
}
}
// Verify audio is valid WAV
if result.Audio == nil {
t.Error("expected audio but got nil")
} else if len(result.Audio) < 44 {
t.Error("audio WAV too short")
} else if string(result.Audio[:4]) != "RIFF" || string(result.Audio[8:12]) != "WAVE" {
t.Error("audio is not valid WAV")
}
t.Logf("Extracted %d frames, audio %d bytes", len(result.Frames), len(result.Audio))
}

162
video/video_linux.go Normal file
View File

@@ -0,0 +1,162 @@
package video
import (
"bytes"
"fmt"
"os/exec"
"strconv"
"strings"
)
func extract(path string, opts Options) (*Result, error) {
// Check ffmpeg is available
ffmpeg, err := exec.LookPath("ffmpeg")
if err != nil {
return nil, ErrFFmpegNotFound
}
// Probe video duration
duration, err := probeDuration(path)
if err != nil {
return nil, fmt.Errorf("failed to probe video: %w", err)
}
frameCount := opts.MaxFrames
if duration < float64(frameCount) {
frameCount = int(duration)
}
if frameCount < 1 {
frameCount = 1
}
// Calculate FPS to get evenly spaced frames
fps := float64(frameCount) / duration
result := &Result{}
// Extract frames as JPEG via pipe
args := []string{
"-i", path,
"-vf", fmt.Sprintf("fps=%.4f", fps),
"-frames:v", strconv.Itoa(frameCount),
"-f", "image2pipe",
"-c:v", "mjpeg",
"-q:v", "5",
"pipe:1",
}
cmd := exec.Command(ffmpeg, args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg frame extraction failed: %s", stderr.String())
}
// Split JPEG frames from the pipe output (each starts with FFD8, ends with FFD9)
result.Frames = splitJPEGs(stdout.Bytes())
// Extract audio if requested
if opts.ExtractAudio {
audio, err := extractAudio(ffmpeg, path)
if err == nil && len(audio) > 44 { // WAV header is 44 bytes
result.Audio = audio
}
}
return result, nil
}
// probeDuration uses ffprobe (or ffmpeg) to get the video duration in seconds.
func probeDuration(path string) (float64, error) {
ffprobe, err := exec.LookPath("ffprobe")
if err != nil {
// Fall back to ffmpeg -i which prints duration to stderr
ffmpeg, _ := exec.LookPath("ffmpeg")
cmd := exec.Command(ffmpeg, "-i", path)
var stderr bytes.Buffer
cmd.Stderr = &stderr
cmd.Run() // Ignore error — ffmpeg -i always exits non-zero
return parseDurationFromFFmpeg(stderr.String())
}
cmd := exec.Command(ffprobe,
"-v", "quiet",
"-show_entries", "format=duration",
"-of", "csv=p=0",
path,
)
var stdout bytes.Buffer
cmd.Stdout = &stdout
if err := cmd.Run(); err != nil {
return 0, err
}
return strconv.ParseFloat(strings.TrimSpace(stdout.String()), 64)
}
// parseDurationFromFFmpeg extracts duration from ffmpeg -i stderr output.
func parseDurationFromFFmpeg(output string) (float64, error) {
// Look for "Duration: HH:MM:SS.ss"
idx := strings.Index(output, "Duration: ")
if idx < 0 {
return 0, fmt.Errorf("could not find duration in ffmpeg output")
}
durStr := output[idx+10:]
commaIdx := strings.Index(durStr, ",")
if commaIdx > 0 {
durStr = durStr[:commaIdx]
}
durStr = strings.TrimSpace(durStr)
// Parse HH:MM:SS.ss
parts := strings.Split(durStr, ":")
if len(parts) != 3 {
return 0, fmt.Errorf("unexpected duration format: %s", durStr)
}
hours, _ := strconv.ParseFloat(parts[0], 64)
mins, _ := strconv.ParseFloat(parts[1], 64)
secs, _ := strconv.ParseFloat(parts[2], 64)
return hours*3600 + mins*60 + secs, nil
}
// extractAudio extracts audio as 16kHz mono WAV.
func extractAudio(ffmpeg, path string) ([]byte, error) {
args := []string{
"-i", path,
"-ar", "16000",
"-ac", "1",
"-f", "wav",
"pipe:1",
}
cmd := exec.Command(ffmpeg, args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg audio extraction failed: %s", stderr.String())
}
return stdout.Bytes(), nil
}
// splitJPEGs splits concatenated JPEG data into individual images.
// Each JPEG starts with FF D8 and ends with FF D9.
func splitJPEGs(data []byte) [][]byte {
var frames [][]byte
start := -1
for i := 0; i < len(data)-1; i++ {
if data[i] == 0xFF && data[i+1] == 0xD8 {
start = i
} else if data[i] == 0xFF && data[i+1] == 0xD9 && start >= 0 {
frames = append(frames, data[start:i+2])
start = -1
}
}
return frames
}

131
video/video_test.go Normal file
View File

@@ -0,0 +1,131 @@
package video
import (
"net/http"
"testing"
)
func TestIsVideo(t *testing.T) {
tests := []struct {
name string
data []byte
want bool
}{
{
name: "mp4 header",
data: make([]byte, 512), // will be filled with mp4 magic
want: false, // zeros aren't video
},
{
name: "jpeg data",
data: append([]byte{0xFF, 0xD8, 0xFF, 0xE0}, make([]byte, 508)...),
want: false,
},
{
name: "wav data",
data: append([]byte("RIFF"), append(make([]byte, 4), []byte("WAVE")...)...),
want: false,
},
{
name: "too short",
data: []byte{0x00, 0x01, 0x02},
want: false,
},
{
name: "nil data",
data: nil,
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := IsVideo(tt.data)
if got != tt.want {
t.Errorf("IsVideo() = %v, want %v", got, tt.want)
}
})
}
}
func TestIsVideoContentType(t *testing.T) {
tests := []struct {
ct string
want bool
}{
{"video/mp4", true},
{"video/webm", true},
{"video/quicktime", true},
{"video/x-msvideo", true},
{"image/jpeg", false},
{"audio/wave", false},
{"application/octet-stream", false},
{"", false},
}
for _, tt := range tests {
t.Run(tt.ct, func(t *testing.T) {
if got := IsVideoContentType(tt.ct); got != tt.want {
t.Errorf("IsVideoContentType(%q) = %v, want %v", tt.ct, got, tt.want)
}
})
}
}
func TestIsVideoExtension(t *testing.T) {
tests := []struct {
ext string
want bool
}{
{".mp4", true},
{".MP4", true},
{".webm", true},
{".mov", true},
{".avi", true},
{".mkv", true},
{".m4v", true},
{".jpg", false},
{".png", false},
{".wav", false},
{".txt", false},
{"", false},
}
for _, tt := range tests {
t.Run(tt.ext, func(t *testing.T) {
if got := IsVideoExtension(tt.ext); got != tt.want {
t.Errorf("IsVideoExtension(%q) = %v, want %v", tt.ext, got, tt.want)
}
})
}
}
func TestExtractDefaults(t *testing.T) {
// Test that Options defaults are applied correctly
opts := Options{}
if opts.MaxFrames != 0 {
t.Errorf("zero value MaxFrames should be 0, got %d", opts.MaxFrames)
}
// Extract with a non-existent file should error
_, err := Extract("/nonexistent/video.mp4", Options{})
if err == nil {
t.Error("expected error for non-existent file")
}
}
func TestIsVideoWithRealContentType(t *testing.T) {
// Verify that http.DetectContentType correctly identifies video types
// by testing with real file magic bytes
// MP4 ftyp box: 4 bytes size + "ftyp" + brand
mp4Data := make([]byte, 512)
copy(mp4Data[0:], []byte{0x00, 0x00, 0x00, 0x20}) // size=32
copy(mp4Data[4:], "ftypisom") // ftyp + brand
copy(mp4Data[12:], []byte{0x00, 0x00, 0x02, 0x00}) // minor version
ct := http.DetectContentType(mp4Data)
t.Logf("MP4 content type: %s", ct)
// Note: Go's http.DetectContentType may or may not detect MP4.
// The ftyp box detection depends on the Go version.
}

155
video/video_windows.go Normal file
View File

@@ -0,0 +1,155 @@
package video
import (
"bytes"
"fmt"
"os/exec"
"strconv"
"strings"
)
// Windows implementation: shell out to ffmpeg, same as Linux.
// Media Foundation via syscall is planned but ffmpeg is simpler for v1
// and commonly available via winget/chocolatey/scoop.
//
// TODO: implement Media Foundation via golang.org/x/sys/windows for
// zero-dependency video extraction on Windows.
func extract(path string, opts Options) (*Result, error) {
ffmpeg, err := exec.LookPath("ffmpeg")
if err != nil {
return nil, fmt.Errorf("video support requires ffmpeg; install it with: winget install ffmpeg (or scoop install ffmpeg)")
}
duration, err := probeDuration(path)
if err != nil {
return nil, fmt.Errorf("failed to probe video: %w", err)
}
frameCount := opts.MaxFrames
if duration < float64(frameCount) {
frameCount = int(duration)
}
if frameCount < 1 {
frameCount = 1
}
fps := float64(frameCount) / duration
result := &Result{}
args := []string{
"-i", path,
"-vf", fmt.Sprintf("fps=%.4f", fps),
"-frames:v", strconv.Itoa(frameCount),
"-f", "image2pipe",
"-c:v", "mjpeg",
"-q:v", "5",
"pipe:1",
}
cmd := exec.Command(ffmpeg, args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg frame extraction failed: %s", stderr.String())
}
result.Frames = splitJPEGs(stdout.Bytes())
if opts.ExtractAudio {
audio, err := extractAudio(ffmpeg, path)
if err == nil && len(audio) > 44 {
result.Audio = audio
}
}
return result, nil
}
func probeDuration(path string) (float64, error) {
ffprobe, err := exec.LookPath("ffprobe")
if err != nil {
ffmpeg, _ := exec.LookPath("ffmpeg")
cmd := exec.Command(ffmpeg, "-i", path)
var stderr bytes.Buffer
cmd.Stderr = &stderr
cmd.Run()
return parseDurationFromFFmpeg(stderr.String())
}
cmd := exec.Command(ffprobe,
"-v", "quiet",
"-show_entries", "format=duration",
"-of", "csv=p=0",
path,
)
var stdout bytes.Buffer
cmd.Stdout = &stdout
if err := cmd.Run(); err != nil {
return 0, err
}
return strconv.ParseFloat(strings.TrimSpace(stdout.String()), 64)
}
func parseDurationFromFFmpeg(output string) (float64, error) {
idx := strings.Index(output, "Duration: ")
if idx < 0 {
return 0, fmt.Errorf("could not find duration in ffmpeg output")
}
durStr := output[idx+10:]
commaIdx := strings.Index(durStr, ",")
if commaIdx > 0 {
durStr = durStr[:commaIdx]
}
durStr = strings.TrimSpace(durStr)
parts := strings.Split(durStr, ":")
if len(parts) != 3 {
return 0, fmt.Errorf("unexpected duration format: %s", durStr)
}
hours, _ := strconv.ParseFloat(parts[0], 64)
mins, _ := strconv.ParseFloat(parts[1], 64)
secs, _ := strconv.ParseFloat(parts[2], 64)
return hours*3600 + mins*60 + secs, nil
}
func extractAudio(ffmpeg, path string) ([]byte, error) {
args := []string{
"-i", path,
"-ar", "16000",
"-ac", "1",
"-f", "wav",
"pipe:1",
}
cmd := exec.Command(ffmpeg, args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg audio extraction failed: %s", stderr.String())
}
return stdout.Bytes(), nil
}
func splitJPEGs(data []byte) [][]byte {
var frames [][]byte
start := -1
for i := 0; i < len(data)-1; i++ {
if data[i] == 0xFF && data[i+1] == 0xD8 {
start = i
} else if data[i] == 0xFF && data[i+1] == 0xD9 && start >= 0 {
frames = append(frames, data[start:i+2])
start = -1
}
}
return frames
}