|
| 1 | +package extsort_test |
| 2 | + |
| 3 | +import ( |
| 4 | + "context" |
| 5 | + "encoding/json" |
| 6 | + "errors" |
| 7 | + "testing" |
| 8 | + |
| 9 | + "github.com/lanrat/extsort" |
| 10 | +) |
| 11 | + |
| 12 | +// TestSerializationErrorInFirstChunk tests error handling when the first chunk fails serialization |
| 13 | +func TestSerializationErrorInFirstChunk(t *testing.T) { |
| 14 | + inputChan := make(chan *ErrorEdgeItem, 1) |
| 15 | + |
| 16 | + // Only item fails serialization |
| 17 | + inputChan <- &ErrorEdgeItem{Key: 1, FailSerialize: true} |
| 18 | + close(inputChan) |
| 19 | + |
| 20 | + config := &extsort.Config{ChunkSize: 1} |
| 21 | + sort, outChan, errChan := extsort.Generic( |
| 22 | + inputChan, |
| 23 | + edgeFromBytes, |
| 24 | + edgeToBytes, |
| 25 | + edgeCompare, |
| 26 | + config, |
| 27 | + ) |
| 28 | + |
| 29 | + sort.Sort(context.Background()) |
| 30 | + |
| 31 | + // Should get no output |
| 32 | + outputCount := 0 |
| 33 | + for range outChan { |
| 34 | + outputCount++ |
| 35 | + } |
| 36 | + |
| 37 | + if outputCount != 0 { |
| 38 | + t.Errorf("Expected 0 output items, got %d", outputCount) |
| 39 | + } |
| 40 | + |
| 41 | + // Should get error |
| 42 | + err := <-errChan |
| 43 | + if err == nil { |
| 44 | + t.Fatal("Expected serialization error but got nil") |
| 45 | + } |
| 46 | + |
| 47 | + var serErr *extsort.SerializationError |
| 48 | + if !errors.As(err, &serErr) { |
| 49 | + t.Errorf("Expected SerializationError, got %T", err) |
| 50 | + } |
| 51 | + |
| 52 | + t.Logf("First chunk serialization error handled correctly: %v", err) |
| 53 | +} |
| 54 | + |
| 55 | +// TestSerializationErrorInLastChunk tests error handling when the last chunk fails serialization |
| 56 | +func TestSerializationErrorInLastChunk(t *testing.T) { |
| 57 | + inputChan := make(chan *ErrorEdgeItem, 3) |
| 58 | + |
| 59 | + // Good items followed by failing item |
| 60 | + inputChan <- &ErrorEdgeItem{Key: 1, FailSerialize: false} |
| 61 | + inputChan <- &ErrorEdgeItem{Key: 2, FailSerialize: false} |
| 62 | + inputChan <- &ErrorEdgeItem{Key: 3, FailSerialize: true} // Last chunk fails |
| 63 | + close(inputChan) |
| 64 | + |
| 65 | + config := &extsort.Config{ChunkSize: 1} // Each item in its own chunk |
| 66 | + sort, outChan, errChan := extsort.Generic( |
| 67 | + inputChan, |
| 68 | + edgeFromBytes, |
| 69 | + edgeToBytes, |
| 70 | + edgeCompare, |
| 71 | + config, |
| 72 | + ) |
| 73 | + |
| 74 | + sort.Sort(context.Background()) |
| 75 | + |
| 76 | + // Should get no output due to error |
| 77 | + outputCount := 0 |
| 78 | + for range outChan { |
| 79 | + outputCount++ |
| 80 | + } |
| 81 | + |
| 82 | + // Should get error even though first chunks were fine |
| 83 | + err := <-errChan |
| 84 | + if err == nil { |
| 85 | + t.Fatal("Expected serialization error but got nil") |
| 86 | + } |
| 87 | + |
| 88 | + t.Logf("Last chunk serialization error handled correctly: %v", err) |
| 89 | +} |
| 90 | + |
| 91 | +// TestEmptyChunkWithError tests error handling when chunks become empty due to errors |
| 92 | +func TestEmptyChunkWithError(t *testing.T) { |
| 93 | + inputChan := make(chan *ErrorEdgeItem, 1) |
| 94 | + |
| 95 | + inputChan <- &ErrorEdgeItem{Key: 1, FailSerialize: true} |
| 96 | + close(inputChan) |
| 97 | + |
| 98 | + // Use larger chunk size - the single failing item should still cause error |
| 99 | + config := &extsort.Config{ChunkSize: 10} |
| 100 | + sort, outChan, errChan := extsort.Generic( |
| 101 | + inputChan, |
| 102 | + edgeFromBytes, |
| 103 | + edgeToBytes, |
| 104 | + edgeCompare, |
| 105 | + config, |
| 106 | + ) |
| 107 | + |
| 108 | + sort.Sort(context.Background()) |
| 109 | + |
| 110 | + // Drain output |
| 111 | + for range outChan { |
| 112 | + // Should be empty |
| 113 | + } |
| 114 | + |
| 115 | + // Should still get error |
| 116 | + err := <-errChan |
| 117 | + if err == nil { |
| 118 | + t.Fatal("Expected error even with large chunk size") |
| 119 | + } |
| 120 | + |
| 121 | + t.Logf("Empty chunk error handling works: %v", err) |
| 122 | +} |
| 123 | + |
| 124 | +// TestDeserializationErrorDuringMerge tests deserialization errors during the merge phase |
| 125 | +func TestDeserializationErrorDuringMerge(t *testing.T) { |
| 126 | + inputChan := make(chan *ErrorEdgeItem, 2) |
| 127 | + |
| 128 | + // Items that serialize fine |
| 129 | + inputChan <- &ErrorEdgeItem{Key: 1, FailSerialize: false} |
| 130 | + inputChan <- &ErrorEdgeItem{Key: 2, FailSerialize: false} |
| 131 | + close(inputChan) |
| 132 | + |
| 133 | + config := &extsort.Config{ChunkSize: 1} // Force multiple chunks to trigger merge |
| 134 | + sort, outChan, errChan := extsort.Generic( |
| 135 | + inputChan, |
| 136 | + edgeFailingFromBytes, // This will fail during merge |
| 137 | + edgeToBytes, |
| 138 | + edgeCompare, |
| 139 | + config, |
| 140 | + ) |
| 141 | + |
| 142 | + sort.Sort(context.Background()) |
| 143 | + |
| 144 | + // Should get no output due to deserialization failure during merge |
| 145 | + outputCount := 0 |
| 146 | + for range outChan { |
| 147 | + outputCount++ |
| 148 | + } |
| 149 | + |
| 150 | + err := <-errChan |
| 151 | + if err == nil { |
| 152 | + t.Fatal("Expected deserialization error during merge") |
| 153 | + } |
| 154 | + |
| 155 | + var deserErr *extsort.DeserializationError |
| 156 | + if !errors.As(err, &deserErr) { |
| 157 | + t.Errorf("Expected DeserializationError, got %T", err) |
| 158 | + } |
| 159 | + |
| 160 | + t.Logf("Merge phase deserialization error handled: %v", err) |
| 161 | +} |
| 162 | + |
| 163 | +// TestConcurrentErrorHandling tests error handling with multiple workers |
| 164 | +func TestConcurrentErrorHandling(t *testing.T) { |
| 165 | + inputChan := make(chan *ErrorEdgeItem, 4) |
| 166 | + |
| 167 | + // Simpler test - just a few items with one that fails early |
| 168 | + inputChan <- &ErrorEdgeItem{Key: 1, FailSerialize: false} |
| 169 | + inputChan <- &ErrorEdgeItem{Key: 2, FailSerialize: true} // This will fail |
| 170 | + inputChan <- &ErrorEdgeItem{Key: 3, FailSerialize: false} |
| 171 | + close(inputChan) |
| 172 | + |
| 173 | + config := &extsort.Config{ |
| 174 | + ChunkSize: 1, // Each item in its own chunk for predictable behavior |
| 175 | + NumWorkers: 2, // Multiple workers |
| 176 | + } |
| 177 | + sort, outChan, errChan := extsort.Generic( |
| 178 | + inputChan, |
| 179 | + edgeFromBytes, |
| 180 | + edgeToBytes, |
| 181 | + edgeCompare, |
| 182 | + config, |
| 183 | + ) |
| 184 | + |
| 185 | + sort.Sort(context.Background()) |
| 186 | + |
| 187 | + // Drain output |
| 188 | + outputCount := 0 |
| 189 | + for range outChan { |
| 190 | + outputCount++ |
| 191 | + } |
| 192 | + |
| 193 | + // Should get error from one of the workers |
| 194 | + err := <-errChan |
| 195 | + if err == nil { |
| 196 | + t.Fatal("Expected error with concurrent processing") |
| 197 | + } |
| 198 | + |
| 199 | + t.Logf("Concurrent error handling works: %v (got %d outputs)", err, outputCount) |
| 200 | +} |
| 201 | + |
| 202 | +// Helper types for edge case testing |
| 203 | + |
| 204 | +type ErrorEdgeItem struct { |
| 205 | + Key int `json:"key"` |
| 206 | + FailSerialize bool `json:"failSerialize"` |
| 207 | +} |
| 208 | + |
| 209 | +func edgeToBytes(item *ErrorEdgeItem) ([]byte, error) { |
| 210 | + if item.FailSerialize { |
| 211 | + return nil, errors.New("edge case serialization failure") |
| 212 | + } |
| 213 | + return json.Marshal(item) |
| 214 | +} |
| 215 | + |
| 216 | +func edgeFromBytes(data []byte) (*ErrorEdgeItem, error) { |
| 217 | + var item ErrorEdgeItem |
| 218 | + err := json.Unmarshal(data, &item) |
| 219 | + return &item, err |
| 220 | +} |
| 221 | + |
| 222 | +func edgeFailingFromBytes(data []byte) (*ErrorEdgeItem, error) { |
| 223 | + return nil, errors.New("edge case deserialization failure") |
| 224 | +} |
| 225 | + |
| 226 | +func edgeCompare(a, b *ErrorEdgeItem) int { |
| 227 | + if a.Key < b.Key { |
| 228 | + return -1 |
| 229 | + } else if a.Key > b.Key { |
| 230 | + return 1 |
| 231 | + } |
| 232 | + return 0 |
| 233 | +} |
0 commit comments