Browse Source

rename query package to qs

Aneurin Barker Snook 1 year ago
commit
137dc89f64
9 changed files with 762 additions and 0 deletions
  1. 155 0
      filter.go
  2. 60 0
      filter_test.go
  3. 3 0
      go.mod
  4. 73 0
      page.go
  5. 107 0
      page_test.go
  6. 123 0
      pagination.go
  7. 50 0
      pagination_test.go
  8. 131 0
      sort.go
  9. 60 0
      sort_test.go

+ 155 - 0
filter.go

@@ -0,0 +1,155 @@
+package qs
+
+import (
+	"errors"
+	"net/http"
+	"net/url"
+	"regexp"
+	"strconv"
+)
+
+// Query error.
+var (
+	ErrInvalidFilter  = errors.New("invalid filter")
+	ErrTooManyFilters = errors.New("too many filters")
+)
+
+var filterRegexp = regexp.MustCompile("^([A-z0-9]+) (eq|neq|gt|gte|lt|lte|in|not in|like|not like) (.+)$")
+
+// Filter represents a filter as used in, most likely, a database query.
+type Filter struct {
+	Field    string `json:"field"`    // Field to filter on.
+	Operator string `json:"operator"` // Filter operator, e.g. eq, gt...
+	Value    string `json:"value"`    // Value to filter by.
+}
+
+// BoolValue retrieves the filter value as a bool.
+func (filter Filter) BoolValue() (bool, error) {
+	return strconv.ParseBool(filter.Value)
+}
+
+// Float32Value retrieves the filter value as a float32.
+func (filter Filter) Float32Value() (float32, error) {
+	value, err := strconv.ParseFloat(filter.Value, 32)
+	return float32(value), err
+}
+
+// Float64Value retrieves the filter value as a float64.
+func (filter Filter) Float64Value() (float64, error) {
+	return strconv.ParseFloat(filter.Value, 64)
+}
+
+// IntValue retrieves the filter value as an int.
+func (filter Filter) IntValue() (int, error) {
+	return strconv.Atoi(filter.Value)
+}
+
+// Filters is a slice of Filter structs.
+type Filters []Filter
+
+// Field returns a new Filters slice containing only filters for the specified field.
+// The original order of filters is preserved.
+func (filters Filters) Field(field string) Filters {
+	ff := Filters{}
+	for _, filter := range filters {
+		if filter.Field == field {
+			ff = append(ff, filter)
+		}
+	}
+	return ff
+}
+
+// Fields returns a new Filters slice containing filters for any of the specified fields.
+// The original order of filters is preserved.
+func (filters Filters) Fields(fields ...string) Filters {
+	ff := Filters{}
+	for _, filter := range filters {
+		for _, field := range fields {
+			if filter.Field == field {
+				ff = append(ff, filter)
+			}
+		}
+	}
+	return ff
+}
+
+// HasField returns true if the Filters slice includes any filters for the specified field.
+func (filters Filters) HasField(field string) bool {
+	for _, filter := range filters {
+		if filter.Field == field {
+			return true
+		}
+	}
+	return false
+}
+
+// ReadFiltersOptions configures the behaviour of ReadFilters.
+type ReadFiltersOptions struct {
+	Key        string // Query string key. The default value is "filter"
+	MaxFilters int    // If this is > 0, a maximum number of filters is imposed
+}
+
+// ReadFilters parses URL values into a slice of filters.
+// This function returns nil if no filters are found.
+func ReadFilters(values url.Values, opt *ReadFiltersOptions) (Filters, error) {
+	opt = initFiltersOptions(opt)
+
+	if !values.Has(opt.Key) {
+		return nil, nil
+	}
+
+	if opt.MaxFilters > 0 && len(values[opt.Key]) > opt.MaxFilters {
+		return nil, ErrTooManyFilters
+	}
+
+	filters := Filters{}
+	for _, filterStr := range values[opt.Key] {
+		match := filterRegexp.FindStringSubmatch(filterStr)
+		if match == nil {
+			return nil, ErrInvalidFilter
+		}
+
+		filter := Filter{
+			Field:    match[1],
+			Operator: match[2],
+			Value:    match[3],
+		}
+		filters = append(filters, filter)
+	}
+
+	return filters, nil
+}
+
+// ReadRequestFilters parses a request's query string into a slice of filters.
+// This function returns nil if no filters are found.
+func ReadRequestFilters(req *http.Request, opt *ReadFiltersOptions) (Filters, error) {
+	return ReadFilters(req.URL.Query(), opt)
+}
+
+// ReadStringFilters parses a query string literal into a slice of filters.
+// This function returns nil if no filters are found.
+func ReadStringFilters(qs string, opt *ReadFiltersOptions) (Filters, error) {
+	values, err := url.ParseQuery(qs)
+	if err != nil {
+		return nil, err
+	}
+	return ReadFilters(values, opt)
+}
+
+func initFiltersOptions(opt *ReadFiltersOptions) *ReadFiltersOptions {
+	def := &ReadFiltersOptions{
+		Key: "filter",
+	}
+
+	if opt != nil {
+		if len(opt.Key) > 0 {
+			def.Key = opt.Key
+		}
+
+		if opt.MaxFilters > def.MaxFilters {
+			def.MaxFilters = opt.MaxFilters
+		}
+	}
+
+	return def
+}

+ 60 - 0
filter_test.go

@@ -0,0 +1,60 @@
+package qs
+
+import "testing"
+
+func TestReadFilters(t *testing.T) {
+	type TestCase struct {
+		Input  string
+		Opt    *ReadFiltersOptions
+		Output []Filter
+		Err    error
+	}
+
+	testCases := []TestCase{
+		{Input: ""},
+		{
+			Input: "filter=title eq Spaghetti",
+			Output: []Filter{
+				{Field: "title", Operator: "eq", Value: "Spaghetti"},
+			},
+		},
+		{
+			Input: "filter=title eq Bolognese&filter=serves gte 4",
+			Output: []Filter{
+				{Field: "title", Operator: "eq", Value: "Bolognese"},
+				{Field: "serves", Operator: "gte", Value: "4"},
+			},
+		},
+	}
+
+	for n, tc := range testCases {
+		t.Logf("(%d) Testing %q with options %+v", n, tc.Input, tc.Opt)
+
+		filters, err := ReadStringFilters(tc.Input, nil)
+
+		if err != tc.Err {
+			t.Errorf("Expected error %v, got %v", tc.Err, err)
+		}
+		if tc.Err != nil {
+			continue
+		}
+
+		if tc.Output == nil && filters != nil {
+			t.Error("Expected nil")
+			continue
+		}
+
+		if len(filters) != len(tc.Output) {
+			t.Errorf("Expected %d filters, got %d", len(tc.Output), len(filters))
+		}
+
+		for i, filter := range tc.Output {
+			if i == len(filters) {
+				break
+			}
+			if filter != filters[i] {
+				t.Errorf("Expected %+v for filter %d, got %+v", filter, i, filters[i])
+			}
+		}
+	}
+}

+ 3 - 0
go.mod

@@ -0,0 +1,3 @@
+module github.com/recipeer/go/query
+
+go 1.21.1

+ 73 - 0
page.go

@@ -0,0 +1,73 @@
+package qs
+
+import (
+	"net/http"
+	"net/url"
+)
+
+// Page represents a combination of pagination, filter and sort parameters for, most likely, a database query.
+type Page struct {
+	Pagination *Pagination `json:"pagination"`
+	Filters    Filters     `json:"filters,omitempty"`
+	Sorts      Sorts       `json:"sorts,omitempty"`
+}
+
+// ReadPageOptions configures the behaviour of ReadPage.
+type ReadPageOptions struct {
+	Pagination *ReadPaginationOptions
+	Filter     *ReadFiltersOptions
+	Sort       *ReadSortsOptions
+}
+
+// ReadPage parses URL values into a convenient Page struct.
+func ReadPage(values url.Values, opt *ReadPageOptions) (*Page, error) {
+	opt = initPageOptions(opt)
+
+	pag, err := ReadPagination(values, opt.Pagination)
+	if err != nil {
+		return nil, err
+	}
+
+	filters, err := ReadFilters(values, opt.Filter)
+	if err != nil {
+		return nil, err
+	}
+
+	sorts, err := ReadSorts(values, opt.Sort)
+	if err != nil {
+		return nil, err
+	}
+
+	page := &Page{
+		Pagination: pag,
+		Filters:    filters,
+		Sorts:      sorts,
+	}
+	return page, nil
+}
+
+// ReadRequestPage parses a request's query string into a convenient Page struct.
+// This function always returns a value if it does not encounter an error.
+func ReadRequestPage(req *http.Request, opt *ReadPageOptions) (*Page, error) {
+	return ReadPage(req.URL.Query(), opt)
+}
+
+// ReadStringPage parses a query string literal into a convenient Page struct.
+// This function always returns a value if it does not encounter an error.
+func ReadStringPage(qs string, opt *ReadPageOptions) (*Page, error) {
+	values, err := url.ParseQuery(qs)
+	if err != nil {
+		return nil, err
+	}
+	return ReadPage(values, opt)
+}
+
+func initPageOptions(opt *ReadPageOptions) *ReadPageOptions {
+	def := &ReadPageOptions{}
+	if opt != nil {
+		def.Pagination = initPaginationOptions(opt.Pagination)
+		def.Filter = initFiltersOptions(opt.Filter)
+		def.Sort = initSortsOptions(opt.Sort)
+	}
+	return def
+}

+ 107 - 0
page_test.go

@@ -0,0 +1,107 @@
+package qs
+
+import "testing"
+
+func TestReadPage(t *testing.T) {
+	type TestCase struct {
+		Input  string
+		Opt    *ReadPageOptions
+		Output *Page
+		Err    error
+	}
+
+	testCases := []TestCase{
+		{
+			Input: "",
+			Output: &Page{
+				Pagination: &Pagination{},
+			},
+		},
+		{
+			Input: "limit=10&page=2",
+			Output: &Page{
+				Pagination: &Pagination{Limit: 10, Offset: 10, Page: 2},
+			},
+		},
+		{
+			Input: "limit=10&page=2&filter=title eq Spaghetti",
+			Output: &Page{
+				Pagination: &Pagination{Limit: 10, Offset: 10, Page: 2},
+				Filters: []Filter{
+					{Field: "title", Operator: "eq", Value: "Spaghetti"},
+				},
+			},
+		},
+		{
+			Input: "limit=10&page=2&filter=title eq Spaghetti&sort=serves desc",
+			Output: &Page{
+				Pagination: &Pagination{Limit: 10, Offset: 10, Page: 2},
+				Filters: []Filter{
+					{Field: "title", Operator: "eq", Value: "Spaghetti"},
+				},
+				Sorts: []Sort{
+					{Field: "serves", Direction: "desc"},
+				},
+			},
+		},
+	}
+
+	for n, tc := range testCases {
+		t.Logf("(%d) Testing %q with options %+v", n, tc.Input, tc.Opt)
+
+		page, err := ReadStringPage(tc.Input, tc.Opt)
+
+		if err != tc.Err {
+			t.Errorf("Expected error %v, got %v", tc.Err, err)
+		}
+		if tc.Err != nil {
+			continue
+		}
+
+		if tc.Output == nil && page != nil {
+			t.Error("Expected nil")
+			continue
+		}
+
+		// Compare pagination (see pagination_test.go)
+		if *page.Pagination != *tc.Output.Pagination {
+			t.Errorf("Expected %+v for pagination, got %+v", tc.Output, page.Pagination)
+		}
+
+		// Compare filters (see filter_test.go)
+		if tc.Output.Filters == nil && page.Filters != nil {
+			t.Error("Expected nil filters")
+		}
+
+		if len(page.Filters) != len(tc.Output.Filters) {
+			t.Errorf("Expected %d filters, got %d", len(tc.Output.Filters), len(page.Filters))
+		}
+
+		for i, filter := range tc.Output.Filters {
+			if i == len(page.Filters) {
+				break
+			}
+			if filter != page.Filters[i] {
+				t.Errorf("Expected %+v for filter %d, got %+v", filter, i, page.Filters[i])
+			}
+		}
+
+		// Compare sorts (see sort_test.go)
+		if tc.Output.Sorts == nil && page.Sorts != nil {
+			t.Error("Expected nil sorts")
+		}
+
+		if len(page.Sorts) != len(tc.Output.Sorts) {
+			t.Errorf("Expected %d sorts, got %d", len(tc.Output.Sorts), len(page.Sorts))
+		}
+
+		for i, sort := range tc.Output.Sorts {
+			if i == len(page.Sorts) {
+				break
+			}
+			if sort != page.Sorts[i] {
+				t.Errorf("Expected %+v for sort %d, got %+v", sort, i, page.Sorts[i])
+			}
+		}
+	}
+}

+ 123 - 0
pagination.go

@@ -0,0 +1,123 @@
+package qs
+
+import (
+	"errors"
+	"net/http"
+	"net/url"
+	"strconv"
+)
+
+// Query error.
+var (
+	ErrInvalidLimit  = errors.New("invalid limit")
+	ErrInvalidOffset = errors.New("invalid offset")
+	ErrInvalidPage   = errors.New("invalid page")
+)
+
+// Pagination represents a page size and offset for, most likely, a database query.
+type Pagination struct {
+	Limit  int `json:"limit"`          // Maximum number of results in the page.
+	Offset int `json:"offset"`         // Results offset.
+	Page   int `json:"page,omitempty"` // Page number. This is 0 if the query specifies Offset directly.
+}
+
+// ReadPaginationOptions configures the behaviour of ReadPagination.
+type ReadPaginationOptions struct {
+	LimitKey  string // Query string key for limit. The default value is "limit"
+	OffsetKey string // Query string key for offset. The default value is "offset"
+	PageKey   string // Query string key for page. The default value is "page"
+
+	MaxLimit int // If this is > 0, the limit is clamped to this maximum value
+	MinLimit int // The limit is clamped to this minimum value
+}
+
+// ReadPagination parses URL values into a Pagination struct.
+// This function offers support for both Page and Offset values.
+// If both are provided, Offset is always prioritised.
+// If only Page is provided, Offset is calculated based on Limit.
+func ReadPagination(values url.Values, opt *ReadPaginationOptions) (*Pagination, error) {
+	opt = initPaginationOptions(opt)
+
+	limit := 0
+	offset := 0
+	page := 0
+	var err error = nil
+
+	if values.Has(opt.LimitKey) {
+		limit, err = strconv.Atoi(values.Get(opt.LimitKey))
+		if err != nil {
+			return nil, ErrInvalidLimit
+		}
+	}
+
+	if opt.MaxLimit > 0 && limit > opt.MaxLimit {
+		limit = opt.MaxLimit
+	} else if limit < opt.MinLimit {
+		limit = opt.MinLimit
+	}
+
+	if values.Has(opt.OffsetKey) {
+		offset, err = strconv.Atoi(values.Get(opt.OffsetKey))
+		if err != nil {
+			return nil, ErrInvalidOffset
+		}
+	} else if values.Has(opt.PageKey) {
+		page, err = strconv.Atoi(values.Get(opt.PageKey))
+		if err != nil {
+			return nil, ErrInvalidPage
+		}
+		offset = (page - 1) * limit
+	}
+
+	pag := &Pagination{
+		Limit:  limit,
+		Offset: offset,
+		Page:   page,
+	}
+	return pag, nil
+}
+
+// ReadRequestPagination parses a request's query string into a slice of filters.
+// This function always returns a value if it does not encounter an error.
+func ReadRequestPagination(req *http.Request, opt *ReadPaginationOptions) (*Pagination, error) {
+	return ReadPagination(req.URL.Query(), opt)
+}
+
+// ReadStringPagination parses a query string literal into a slice of filters.
+// This function always returns a value if it does not encounter an error.
+func ReadStringPagination(qs string, opt *ReadPaginationOptions) (*Pagination, error) {
+	values, err := url.ParseQuery(qs)
+	if err != nil {
+		return nil, err
+	}
+	return ReadPagination(values, opt)
+}
+
+func initPaginationOptions(opt *ReadPaginationOptions) *ReadPaginationOptions {
+	def := &ReadPaginationOptions{
+		LimitKey:  "limit",
+		OffsetKey: "offset",
+		PageKey:   "page",
+	}
+
+	if opt != nil {
+		if len(opt.LimitKey) > 0 {
+			def.LimitKey = opt.LimitKey
+		}
+		if len(opt.OffsetKey) > 0 {
+			def.OffsetKey = opt.OffsetKey
+		}
+		if len(opt.PageKey) > 0 {
+			def.PageKey = opt.PageKey
+		}
+
+		if opt.MaxLimit > def.MaxLimit {
+			def.MaxLimit = opt.MaxLimit
+		}
+		if opt.MinLimit > def.MinLimit {
+			def.MinLimit = opt.MinLimit
+		}
+	}
+
+	return def
+}

+ 50 - 0
pagination_test.go

@@ -0,0 +1,50 @@
+package qs
+
+import "testing"
+
+func TestReadPagination(t *testing.T) {
+	type TestCase struct {
+		Input  string
+		Opt    *ReadPaginationOptions
+		Output *Pagination
+		Err    error
+	}
+
+	testCases := []TestCase{
+		{Input: "", Output: &Pagination{}},
+		{Input: "limit=10", Output: &Pagination{Limit: 10}},
+		{Input: "offset=5", Output: &Pagination{Offset: 5}},
+		{Input: "limit=10&page=3", Output: &Pagination{Limit: 10, Offset: 20, Page: 3}},
+		{Input: "limit=10&offset=5&page=3", Output: &Pagination{Limit: 10, Offset: 5}},
+
+		{Input: "", Opt: &ReadPaginationOptions{MinLimit: 5, MaxLimit: 10}, Output: &Pagination{Limit: 5}},
+		{Input: "limit=3", Opt: &ReadPaginationOptions{MinLimit: 5, MaxLimit: 10}, Output: &Pagination{Limit: 5}},
+		{Input: "limit=20", Opt: &ReadPaginationOptions{MinLimit: 5, MaxLimit: 10}, Output: &Pagination{Limit: 10}},
+
+		{Input: "limit=abc", Err: ErrInvalidLimit},
+		{Input: "offset=def", Err: ErrInvalidOffset},
+		{Input: "page=ghi", Err: ErrInvalidPage},
+		{Input: "limit=abc&offset=5", Err: ErrInvalidLimit},
+		{Input: "limit=5&offset=def", Err: ErrInvalidOffset},
+		{Input: "limit=5&page=ghi", Err: ErrInvalidPage},
+	}
+
+	for n, tc := range testCases {
+		t.Logf("(%d) Testing %q with options %+v", n, tc.Input, tc.Opt)
+
+		pag, err := ReadStringPagination(tc.Input, tc.Opt)
+
+		if err != tc.Err {
+			t.Errorf("Expected error %v, got %v", tc.Err, err)
+			continue
+		}
+
+		if tc.Err != nil {
+			continue
+		}
+
+		if *pag != *tc.Output {
+			t.Errorf("Expected %+v, got %+v", tc.Output, pag)
+		}
+	}
+}

+ 131 - 0
sort.go

@@ -0,0 +1,131 @@
+package qs
+
+import (
+	"errors"
+	"net/http"
+	"net/url"
+	"regexp"
+)
+
+// Query error.
+var (
+	ErrInvalidSort  = errors.New("invalid sort")
+	ErrTooManySorts = errors.New("too many sorts")
+)
+
+var sortRegexp = regexp.MustCompile("^([A-z0-9]+) (asc|desc)$")
+
+// ReadSortsOptions configures the behaviour of ReadSorts.
+type ReadSortsOptions struct {
+	Key      string // Query string key. The default value is "sort"
+	MaxSorts int    // If this is > 0, a maximum number of sorts is imposed
+}
+
+// Sort represents a sort order for, most likely, a database query.
+type Sort struct {
+	Field     string `json:"field"`     // Field by which to sort.
+	Direction string `json:"direction"` // Direction in which to sort, namely asc or desc.
+}
+
+// Sorts is a slice of Sort structs.
+type Sorts []Sort
+
+// Field returns a new Sorts slice containing only sorts for the specified field.
+// The original order of sorts is preserved.
+func (sorts Sorts) Field(field string) Sorts {
+	ff := Sorts{}
+	for _, sort := range sorts {
+		if sort.Field == field {
+			ff = append(ff, sort)
+		}
+	}
+	return ff
+}
+
+// Fields returns a new Sorts slice containing sorts for any of the specified fields.
+// The original order of sorts is preserved.
+func (sorts Sorts) Fields(fields ...string) Sorts {
+	ff := Sorts{}
+	for _, sort := range sorts {
+		for _, field := range fields {
+			if sort.Field == field {
+				ff = append(ff, sort)
+			}
+		}
+	}
+	return ff
+}
+
+// HasField returns true if the Sorts slice includes any sorts for the specified field.
+func (sorts Sorts) HasField(field string) bool {
+	for _, sort := range sorts {
+		if sort.Field == field {
+			return true
+		}
+	}
+	return false
+}
+
+// ReadRequestSorts parses a request's query string into a slice of sorts.
+// This function returns nil if no sorts are found.
+func ReadRequestSorts(req *http.Request, opt *ReadSortsOptions) (Sorts, error) {
+	return ReadSorts(req.URL.Query(), opt)
+}
+
+// ReadSorts parses URL values into a slice of sorts.
+// This function returns nil if no sorts are found.
+func ReadSorts(values url.Values, opt *ReadSortsOptions) (Sorts, error) {
+	opt = initSortsOptions(opt)
+
+	if !values.Has(opt.Key) {
+		return nil, nil
+	}
+
+	if opt.MaxSorts > 0 && len(values[opt.Key]) > opt.MaxSorts {
+		return nil, ErrTooManySorts
+	}
+
+	sorts := []Sort{}
+	for _, sortStr := range values[opt.Key] {
+		match := sortRegexp.FindStringSubmatch(sortStr)
+		if match == nil {
+			return nil, ErrInvalidSort
+		}
+
+		sort := Sort{
+			Field:     match[1],
+			Direction: match[2],
+		}
+		sorts = append(sorts, sort)
+	}
+
+	return sorts, nil
+}
+
+// ReadStringSorts parses a query string literal into a slice of sorts.
+// This function returns nil if no sorts are found.
+func ReadStringSorts(qs string, opt *ReadSortsOptions) (Sorts, error) {
+	values, err := url.ParseQuery(qs)
+	if err != nil {
+		return nil, err
+	}
+	return ReadSorts(values, opt)
+}
+
+func initSortsOptions(opt *ReadSortsOptions) *ReadSortsOptions {
+	def := &ReadSortsOptions{
+		Key: "sort",
+	}
+
+	if opt != nil {
+		if len(opt.Key) > 0 {
+			def.Key = opt.Key
+		}
+
+		if opt.MaxSorts > def.MaxSorts {
+			def.MaxSorts = opt.MaxSorts
+		}
+	}
+
+	return def
+}

+ 60 - 0
sort_test.go

@@ -0,0 +1,60 @@
+package qs
+
+import "testing"
+
+func TestReadSorts(t *testing.T) {
+	type TestCase struct {
+		Input  string
+		Opt    *ReadSortsOptions
+		Output []Sort
+		Err    error
+	}
+
+	testCases := []TestCase{
+		{Input: ""},
+		{
+			Input: "sort=title asc",
+			Output: []Sort{
+				{Field: "title", Direction: "asc"},
+			},
+		},
+		{
+			Input: "sort=title asc&sort=serves asc",
+			Output: []Sort{
+				{Field: "title", Direction: "asc"},
+				{Field: "serves", Direction: "asc"},
+			},
+		},
+	}
+
+	for n, tc := range testCases {
+		t.Logf("(%d) Testing %q with options %+v", n, tc.Input, tc.Opt)
+
+		sorts, err := ReadStringSorts(tc.Input, nil)
+
+		if err != tc.Err {
+			t.Errorf("Expected error %v, got %v", tc.Err, err)
+		}
+		if tc.Err != nil {
+			continue
+		}
+
+		if tc.Output == nil && sorts != nil {
+			t.Error("Expected nil")
+			continue
+		}
+
+		if len(sorts) != len(tc.Output) {
+			t.Errorf("Expected %d sorts, got %d", len(tc.Output), len(sorts))
+		}
+
+		for i, sort := range tc.Output {
+			if i == len(sorts) {
+				break
+			}
+			if sort != sorts[i] {
+				t.Errorf("Expected %+v for sort %d, got %+v", sort, i, sorts[i])
+			}
+		}
+	}
+}