Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add header order configurabillity #46

Open
wants to merge 39 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
4e991f8
feat: add textproto fork
sleeyax Jun 2, 2023
9d9ff5d
feat: add header order
sleeyax Jun 3, 2023
5878d39
feat: add server header order
sleeyax Jun 3, 2023
6ac0447
feat: enable custom initial HTTP2 SETTINGS frame
sleeyax Jun 3, 2023
8a7eb6f
Merge remote-tracking branch 'sleeyax/main'
BRUHItsABunny Sep 18, 2023
04b2891
fix: incompatibilities
BRUHItsABunny Sep 18, 2023
411d589
fix: http 1.1 and capitalization inconsistency
BRUHItsABunny Sep 22, 2023
b754cb3
feat: all headers should be order
BRUHItsABunny Sep 22, 2023
1d02990
feat: add http2 framing settings, priority header params and priority…
BRUHItsABunny Jan 11, 2024
11c5219
feat: add http2 framing settings, priority header params and priority…
BRUHItsABunny Jan 11, 2024
dce64bb
fix: compression error caused by header table size
BRUHItsABunny Jan 11, 2024
0d5c9f6
cleanup
BRUHItsABunny Jan 11, 2024
3c547a9
Merge remote-tracking branch 'ooni/main'
BRUHItsABunny Jan 12, 2024
55808fb
Merge remote-tracking branch 'ooni/main'
BRUHItsABunny Apr 20, 2024
c44602d
Merge remote-tracking branch 'ooni/main'
BRUHItsABunny May 26, 2024
8f8c5d0
Merge remote-tracking branch 'ooni/main'
BRUHItsABunny Oct 23, 2024
9852578
feat: add ability to use browser encodings
BRUHItsABunny Nov 3, 2024
4b34daf
feat: add ability to use browser encodings
BRUHItsABunny Nov 3, 2024
a3cdb82
fix: default encoding to gzip
BRUHItsABunny Nov 4, 2024
f2e7ae5
feat: add post handshake callback for certificate pinning
BRUHItsABunny Nov 4, 2024
acfb16b
feat: add post handshake callback for certificate pinning
BRUHItsABunny Nov 7, 2024
a9672b1
feat: add post handshake callback for certificate pinning
BRUHItsABunny Nov 7, 2024
469fd7d
feat: compressor streamable and lazy-init, hot path
ii64 Dec 6, 2024
9fa006d
chore: use encoding name type
ii64 Dec 6, 2024
bb81738
chore: h2 bundle decompressor reader space
ii64 Dec 6, 2024
cb35a8c
chore: rename factories to registry
ii64 Dec 6, 2024
78c2cbf
Merge pull request #2 from BRUHItsABunny/feat/compressor-streamable
ii64 Dec 6, 2024
e3308c1
fix: write request extra header to wire
ii64 Dec 6, 2024
07e5014
Merge pull request #3 from BRUHItsABunny/fix/extra-header-write-wire
ii64 Dec 6, 2024
726f96d
fix: accept-encoding fallback gzip
ii64 Dec 6, 2024
2d1d7bc
Merge pull request #4 from BRUHItsABunny/fix/compression-default
ii64 Dec 6, 2024
3d4b7cb
fix: extra header orderable
ii64 Dec 6, 2024
c49ef06
Merge pull request #5 from BRUHItsABunny/fix/extra-header-orderable
ii64 Dec 6, 2024
48b2637
fix: h2 bundle decompress encoding order
ii64 Dec 9, 2024
8b48780
Merge pull request #6 from BRUHItsABunny/fix/h2-decompress
ii64 Dec 9, 2024
7469984
Merge remote-tracking branch 'ooni/main' into feat/merge-upstream
BRUHItsABunny Dec 27, 2024
c43b283
Merge pull request #7 from BRUHItsABunny/feat/merge-upstream
BRUHItsABunny Dec 30, 2024
1cccadf
fix: revert to the old way of checking for user agent presence
BRUHItsABunny Jan 3, 2025
996109d
Merge pull request #8 from BRUHItsABunny/fix/user-agent-issues-http1
BRUHItsABunny Jan 3, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
288 changes: 288 additions & 0 deletions compression.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,288 @@
package http

import (
"bytes"
"compress/flate"
"compress/gzip"
"compress/lzw"
"compress/zlib"
"errors"
"fmt"
"io"
"strings"
"sync"

"github.com/andybalholm/brotli"
"github.com/klauspost/compress/zstd"
)

type CompressionFactory func(writer io.Writer) (io.Writer, error)
type DecompressionFactory func(reader io.Reader) (io.Reader, error)

type EncodingName = string

type CompressionRegistry map[EncodingName]CompressionFactory
type DecompressionRegistry map[EncodingName]DecompressionFactory

var (
DefaultCompressionRegistry = CompressionRegistry{
"": func(writer io.Writer) (io.Writer, error) { return writer, nil },
"identity": func(writer io.Writer) (io.Writer, error) { return writer, nil },
"gzip": func(writer io.Writer) (io.Writer, error) { return gzip.NewWriter(writer), nil },
"zlib": func(writer io.Writer) (io.Writer, error) { return zlib.NewWriter(writer), nil },
"br": func(writer io.Writer) (io.Writer, error) { return brotli.NewWriter(writer), nil },
"deflate": func(writer io.Writer) (io.Writer, error) { return flate.NewWriter(writer, -1) },
// TODO: Confirm compress
"compress": func(writer io.Writer) (io.Writer, error) { return lzw.NewWriter(writer, lzw.LSB, 8), nil },
"zstd": func(writer io.Writer) (io.Writer, error) { return zstd.NewWriter(writer) },
}

DefaultDecompressionRegistry = DecompressionRegistry{
"": func(reader io.Reader) (io.Reader, error) { return reader, nil },
"identity": func(reader io.Reader) (io.Reader, error) { return reader, nil },
"gzip": func(reader io.Reader) (io.Reader, error) { return gzip.NewReader(reader) },
"zlib": func(reader io.Reader) (io.Reader, error) { return zlib.NewReader(reader) },
"br": func(reader io.Reader) (io.Reader, error) { return brotli.NewReader(reader), nil },
"deflate": func(reader io.Reader) (io.Reader, error) { return flate.NewReader(reader), nil },
"compress": func(reader io.Reader) (io.Reader, error) { return lzw.NewReader(reader, lzw.LSB, 8), nil },
"zstd": func(reader io.Reader) (io.Reader, error) { return zstd.NewReader(reader) },
}
)

func compress(data []byte, registry CompressionRegistry, order ...EncodingName) ([]byte, error) {
var (
err error
writers []io.Writer
writer io.Writer
)

if registry == nil {
registry = DefaultCompressionRegistry
}

dst := bytes.NewBuffer(nil)
writer = dst

for idx, compression := range order {
// fmt.Println(fmt.Sprintf("Compression added: %s", compression))
mapping, ok := registry[compression]
if !ok {
return nil, errors.New(compression + " is not supported")
}
writer, err = mapping(writer)
if err != nil {
return nil, fmt.Errorf("mapping[%d:%s]: %w", idx, compression, err)
}

writers = append(writers, writer)
}

_, err = writers[len(writers)-1].Write(data)
if err != nil {
return nil, fmt.Errorf("writer.Write: %w", err)
}

// Close all writers in reverse order to ensure all data is flushed
for i := len(writers) - 1; i >= 0; i-- {
err = writers[i].(io.Closer).Close()
if err != nil {
return nil, fmt.Errorf("writers[%d].(io.Closer).Close: %w", i, err)
}
}

// fmt.Printf("lenIn: %d lenOut: %d\n", len(data), dst.Len())
return dst.Bytes(), nil
}

// CompressorWriter compressor writer
type CompressorWriter struct {
io.Writer

Registry CompressionRegistry
Order []EncodingName

wrs []io.Writer

once sync.Once
}

var _ io.WriteCloser = (*CompressorWriter)(nil)

func (cw *CompressorWriter) init() error {
if cw.Registry == nil {
cw.Registry = DefaultCompressionRegistry
}
cw.wrs = nil
for i := 0; i < len(cw.Order); i++ {
directive := cw.Order[i]
directive = strings.Trim(directive, " ")
if directive == "" {
continue
}
compressorWrapper, exist := cw.Registry[directive]
if !exist {
return fmt.Errorf("%s is not supported", directive)
}
writer, err := compressorWrapper(cw.Writer)
if err != nil {
return fmt.Errorf("compressor wrapper init: %s: %w", directive, err)
}
cw.wrs = append(cw.wrs, writer)
cw.Writer = writer
}
return nil
}

// Init initialize decompressor early instead of lazy-initialize on first read op
func (cw *CompressorWriter) Init() (err error) {
cw.once.Do(func() {
err = cw.init()
})
return
}

// Write write buffer to compressor
func (cw *CompressorWriter) Write(b []byte) (nb int, err error) {
cw.once.Do(func() {
err = cw.init()
})
if err != nil {
return
}
nb, err = cw.Writer.Write(b)
return
}

// Close close compressor
func (cw *CompressorWriter) Close() error {
for i := len(cw.wrs) - 1; i >= 0; i-- {
if closer, ok := cw.wrs[i].(io.Closer); ok {
err := closer.Close()
if err != nil {
return err
}
}
}
return nil
}

func decompress(data []byte, registry DecompressionRegistry, order ...EncodingName) ([]byte, error) {
var (
err error
reader io.Reader
readers []io.Reader
)

if registry == nil {
registry = DefaultDecompressionRegistry
}

src := bytes.NewBuffer(data)
reader = src

readers = append(readers, src)

// Reverse the order of compressions for decompression
for idx := 0; idx < len(order); idx++ {
compression := order[idx]
// fmt.Println(fmt.Sprintf("Decompression added: %s", compression))
mapping, ok := registry[compression]
if !ok {
return nil, errors.New(compression + " is not supported")
}
reader, err = mapping(reader)
if err != nil {
return nil, fmt.Errorf("mapping[%d:%s]: %w", idx, compression, err)
}

readers = append(readers, reader)
}

dataOut, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("io.ReadAll: %w", err)
}

for _, readerObj := range readers {
typedReader, ok := readerObj.(io.Closer)
if ok {
defer typedReader.Close()
}
}

// fmt.Printf("lenIn: %d lenOut: %d\n", len(data), len(dataOut))
return dataOut, nil
}

// DecompressorReader decompressor reader
type DecompressorReader struct {
io.Reader

Registry DecompressionRegistry
Order []EncodingName

rds []io.Reader

once sync.Once
}

var _ io.ReadCloser = (*DecompressorReader)(nil)

func (dr *DecompressorReader) init() error {
if dr.Registry == nil {
dr.Registry = DefaultDecompressionRegistry
}
dr.rds = nil
for i := 0; i < len(dr.Order); i++ {
directive := dr.Order[i]
directive = strings.Trim(directive, " ")
if directive == "" {
continue
}
// fmt.Println(directive)
decompressorWrapper, exist := dr.Registry[directive]
if !exist {
return fmt.Errorf("%s is not supported", directive)
}
reader, err := decompressorWrapper(dr.Reader)
if err != nil {
return fmt.Errorf("decompressor wrapper init: %s: %w", directive, err)
}
dr.rds = append(dr.rds, reader)
dr.Reader = reader
}
return nil
}

// Init initialize decompressor early instead of lazy-initialize on first read op
func (dr *DecompressorReader) Init() (err error) {
dr.once.Do(func() {
err = dr.init()
})
return
}

// Read read buffer from decompressor
func (dr *DecompressorReader) Read(b []byte) (nb int, err error) {
dr.once.Do(func() {
err = dr.init()
})
if err != nil {
return
}
nb, err = dr.Reader.Read(b)
return
}

// Close close decompressor
func (dr *DecompressorReader) Close() error {
for i := len(dr.rds) - 1; i >= 0; i-- {
if closer, ok := dr.rds[i].(io.Closer); ok {
err := closer.Close()
if err != nil {
return err
}
}
}
return nil
}
Loading