Skip to content

Commit

Permalink
all: imp code, docs, tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ainar-g committed Mar 15, 2024
1 parent 45849e6 commit 56756b2
Show file tree
Hide file tree
Showing 8 changed files with 115 additions and 74 deletions.
29 changes: 19 additions & 10 deletions internal/filtering/rulelist/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ func NewEngine(c *EngineConfig) (e *Engine) {
}
}

// Close closes the underlying rule list engine as well as the rule lists.
// Close closes the underlying rule-list engine as well as the rule lists.
func (e *Engine) Close() (err error) {
e.mu.Lock()
defer e.mu.Unlock()
Expand All @@ -68,27 +68,36 @@ func (e *Engine) Close() (err error) {
return nil
}

return errors.Annotate(e.storage.Close(), "closing engine %q: %w", e.name)
err = e.storage.Close()
if err != nil {
return fmt.Errorf("closing engine %q: %w", e.name, err)
}

return nil
}

// FilterRequest returns the result of filtering req using the DNS filtering
// engine.
func (e *Engine) FilterRequest(
req *urlfilter.DNSRequest,
) (res *urlfilter.DNSResult, hasMatched bool) {
var engine *urlfilter.DNSEngine

func() {
e.mu.RLock()
defer e.mu.RUnlock()
return e.currentEngine().MatchRequest(req)
}

engine = e.engine
}()
// currentEngine returns the current filtering engine.
func (e *Engine) currentEngine() (enging *urlfilter.DNSEngine) {
e.mu.RLock()
defer e.mu.RUnlock()

return engine.MatchRequest(req)
return e.engine
}

// Refresh updates all rule lists in e. ctx is used for cancellation.
// parseBuf, cli, cacheDir, and maxSize are used for updates of rule-list
// filters; see [Filter.Refresh].
//
// TODO(a.garipov): Unexport and test in an internal test or through enigne
// tests.
func (e *Engine) Refresh(
ctx context.Context,
parseBuf []byte,
Expand Down
44 changes: 4 additions & 40 deletions internal/filtering/rulelist/engine_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,7 @@ package rulelist_test

import (
"context"
"io"
"net/http"
"net/http/httptest"
"net/url"
"os"
"path/filepath"
"testing"

"github.com/AdguardTeam/AdGuardHome/internal/filtering/rulelist"
Expand All @@ -21,41 +16,10 @@ import (
func TestEngine_Refresh(t *testing.T) {
cacheDir := t.TempDir()

initialFile := filepath.Join(cacheDir, "initial.txt")
initialData := []byte(testRuleTextBlocked)
err := os.WriteFile(initialFile, initialData, 0o644)
require.NoError(t, err)

srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
pt := testutil.PanicT{}

_, writeErr := io.WriteString(w, testRuleTextBlocked2)
require.NoError(pt, writeErr)
}))

srvURL, err := url.Parse(srv.URL)
require.NoError(t, err)
fileURL, srvURL := newFilterLocations(t, cacheDir, testRuleTextBlocked, testRuleTextBlocked2)

fileFlt, err := rulelist.NewFilter(&rulelist.FilterConfig{
URL: &url.URL{
Scheme: "file",
Path: filepath.Clean(initialFile),
},
Name: "File Filter",
UID: rulelist.MustNewUID(),
URLFilterID: rulelist.URLFilterID(1),
Enabled: true,
})
require.NoError(t, err)

httpFlt, err := rulelist.NewFilter(&rulelist.FilterConfig{
URL: srvURL,
Name: "HTTP Filter",
UID: rulelist.MustNewUID(),
URLFilterID: rulelist.URLFilterID(2),
Enabled: true,
})
require.NoError(t, err)
fileFlt := newFilter(t, fileURL, "File Filter")
httpFlt := newFilter(t, srvURL, "HTTP Filter")

eng := rulelist.NewEngine(&rulelist.EngineConfig{
Name: "Engine",
Expand All @@ -72,7 +36,7 @@ func TestEngine_Refresh(t *testing.T) {
Timeout: testTimeout,
}

err = eng.Refresh(ctx, buf, cli, cacheDir, rulelist.DefaultMaxRuleListSize)
err := eng.Refresh(ctx, buf, cli, cacheDir, rulelist.DefaultMaxRuleListSize)
require.NoError(t, err)

fltReq := &urlfilter.DNSRequest{
Expand Down
5 changes: 5 additions & 0 deletions internal/filtering/rulelist/filter.go
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,11 @@ func NewFilter(c *FilterConfig) (f *Filter, err error) {
// Refresh updates the data in the rule-list filter. parseBuf is the initial
// buffer used to parse information from the data. cli and maxSize are only
// used when f is a URL-based list.
//
// TODO(a.garipov): Unexport and test in an internal test or through enigne
// tests.
//
// TODO(a.garipov): Consider not returning parseRes.
func (f *Filter) Refresh(
ctx context.Context,
parseBuf []byte,
Expand Down
23 changes: 3 additions & 20 deletions internal/filtering/rulelist/filter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,7 @@ package rulelist_test

import (
"context"
"io"
"net/http"
"net/http/httptest"
"net/url"
"os"
"path/filepath"
Expand All @@ -20,23 +18,8 @@ func TestFilter_Refresh(t *testing.T) {
cacheDir := t.TempDir()
uid := rulelist.MustNewUID()

initialFile := filepath.Join(cacheDir, "initial.txt")
initialData := []byte(
testRuleTextTitle +
testRuleTextBlocked,
)
writeErr := os.WriteFile(initialFile, initialData, 0o644)
require.NoError(t, writeErr)

srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
pt := testutil.PanicT{}

_, err := io.WriteString(w, testRuleTextTitle+testRuleTextBlocked)
require.NoError(pt, err)
}))

srvURL, urlErr := url.Parse(srv.URL)
require.NoError(t, urlErr)
const fltData = testRuleTextTitle + testRuleTextBlocked
fileURL, srvURL := newFilterLocations(t, cacheDir, fltData, fltData)

testCases := []struct {
url *url.URL
Expand All @@ -56,7 +39,7 @@ func TestFilter_Refresh(t *testing.T) {
name: "file",
url: &url.URL{
Scheme: "file",
Path: initialFile,
Path: fileURL.Path,
},
wantNewErrMsg: "",
}, {
Expand Down
5 changes: 5 additions & 0 deletions internal/filtering/rulelist/rulelist.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,18 @@ const DefaultMaxRuleListSize = 64 * datasize.MB

// URLFilterID is a semantic type-alias for IDs used for working with package
// urlfilter.
//
// TODO(a.garipov): Use everywhere in package filtering.
type URLFilterID = int

// The IDs of built-in filter lists.
//
// NOTE: Do not change without the need for it and keep in sync with
// client/src/helpers/constants.js.
//
// TODO(a.garipov): Add type [URLFilterID] once it is used consistently in
// package filtering.
//
// TODO(d.kolyshev): Add URLFilterIDLegacyRewrite here and to the UI.
const (
URLFilterIDCustom = 0
Expand Down
75 changes: 75 additions & 0 deletions internal/filtering/rulelist/rulelist_test.go
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
package rulelist_test

import (
"io"
"net/http"
"net/http/httptest"
"net/url"
"os"
"path/filepath"
"sync/atomic"
"testing"
"time"

"github.com/AdguardTeam/AdGuardHome/internal/filtering/rulelist"
"github.com/AdguardTeam/golibs/testutil"
"github.com/stretchr/testify/require"
)

func TestMain(m *testing.M) {
Expand Down Expand Up @@ -35,3 +43,70 @@ const (
// See https://github.com/AdguardTeam/AdGuardHome/issues/6003.
testRuleTextCosmetic = "||cosmetic.example## :has-text(/\u200c/i)\n"
)

// urlFilterIDCounter is the atomic integer used to create unique filter IDs.
var urlFilterIDCounter = &atomic.Int32{}

// newURLFilterID returns a new unique URLFilterID.
func newURLFilterID() (id rulelist.URLFilterID) {
return rulelist.URLFilterID(urlFilterIDCounter.Add(1))
}

// newFilter is a helper for creating new filters in tests. It does not
// register the closing of the filter using t.Cleanup; callers must do that
// either directly or by using the filter in an engine.
func newFilter(t testing.TB, u *url.URL, name string) (f *rulelist.Filter) {
t.Helper()

f, err := rulelist.NewFilter(&rulelist.FilterConfig{
URL: u,
Name: name,
UID: rulelist.MustNewUID(),
URLFilterID: newURLFilterID(),
Enabled: true,
})
require.NoError(t, err)

return f
}

// newFilterLocations is a test helper that sets up both the filtering-rule list
// file and the HTTP-server. It also registers file removal and server stopping
// using t.Cleanup.
func newFilterLocations(
t testing.TB,
cacheDir string,
fileData string,
httpData string,
) (fileURL, srvURL *url.URL) {
filePath := filepath.Join(cacheDir, "initial.txt")
err := os.WriteFile(filePath, []byte(fileData), 0o644)
require.NoError(t, err)

testutil.CleanupAndRequireSuccess(t, func() (err error) {
return os.Remove(filePath)
})

fileURL = &url.URL{
Scheme: "file",
Path: filePath,
}

srv := newStringHTTPServer(httpData)
t.Cleanup(srv.Close)

srvURL, err = url.Parse(srv.URL)
require.NoError(t, err)

return fileURL, srvURL
}

// newStringHTTPServer returns a new HTTP server that serves s.
func newStringHTTPServer(s string) (srv *httptest.Server) {
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
pt := testutil.PanicT{}

_, err := io.WriteString(w, s)
require.NoError(pt, err)
}))
}
6 changes: 3 additions & 3 deletions internal/filtering/rulelist/textengine.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ type TextEngineConfig struct {
// Rules is the text of the filtering rules for this engine.
Rules []string

// URLFilterID is the ID to use inside an engine.
URLFilterID URLFilterID
// ID is the ID to use inside a URL-filter engine.
ID URLFilterID
}

// NewTextEngine returns a new rule-list filtering engine that uses rules
Expand All @@ -45,7 +45,7 @@ func NewTextEngine(c *TextEngineConfig) (e *TextEngine, err error) {
storage, err := filterlist.NewRuleStorage([]filterlist.RuleList{
&filterlist.StringRuleList{
RulesText: text,
ID: c.URLFilterID,
ID: c.ID,
IgnoreCosmetic: true,
},
})
Expand Down
2 changes: 1 addition & 1 deletion internal/filtering/rulelist/textengine_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ func TestNewTextEngine(t *testing.T) {
testRuleTextTitle,
testRuleTextBlocked,
},
URLFilterID: testURLFilterID,
ID: testURLFilterID,
})
require.NoError(t, err)
require.NotNil(t, eng)
Expand Down

0 comments on commit 56756b2

Please sign in to comment.