From 20edd60174365ae03e39b0d2c5f8d54df3bd1692 Mon Sep 17 00:00:00 2001 From: Onur Cinar Date: Sun, 25 Jun 2023 11:29:17 -0700 Subject: [PATCH] URL escape and unescape normalier. Fixes #110 (#119) # Describe Request URL escape and unescape normalier. Fixes #110 # Change Type New normalizer. --- README.md | 4 +++- checker.go | 2 ++ doc/normalizers/url_escape.md | 22 +++++++++++++++++++ doc/normalizers/url_unescape.md | 26 ++++++++++++++++++++++ url_escape.go | 26 ++++++++++++++++++++++ url_escape_test.go | 38 +++++++++++++++++++++++++++++++++ url_unescape.go | 29 +++++++++++++++++++++++++ url_unescape_test.go | 38 +++++++++++++++++++++++++++++++++ 8 files changed, 184 insertions(+), 1 deletion(-) create mode 100644 doc/normalizers/url_escape.md create mode 100644 doc/normalizers/url_unescape.md create mode 100644 url_escape.go create mode 100644 url_escape_test.go create mode 100644 url_unescape.go create mode 100644 url_unescape_test.go diff --git a/README.md b/README.md index a987b18..4c2fbb0 100644 --- a/README.md +++ b/README.md @@ -99,13 +99,15 @@ This package currently provides the following checkers: This package currently provides the following normalizers. They can be mixed with the checkers when defining the validation steps for user data. - [html-escape](doc/normalizers/html_escape.md) applies HTML escaping to special characters. -- [html-unescape](doc//normalizers/html_unescape.md) applies HTML unescaping to special characters. +- [html-unescape](doc/normalizers/html_unescape.md) applies HTML unescaping to special characters. - [lower](doc/normalizers/lower.md) maps all Unicode letters in the given value to their lower case. - [upper](doc/normalizers/upper.md) maps all Unicode letters in the given value to their upper case. - [title](doc/normalizers/title.md) maps the first letter of each word to their upper case. - [trim](doc/normalizers/trim.md) removes the whitespaces at the beginning and at the end of the given value. - [trim-left](doc/normalizers/trim_left.md) removes the whitespaces at the beginning of the given value. - [trim-right](doc/normalizers/trim_right.md) removes the whitespaces at the end of the given value. +- [url-escape](doc/normalizers/url_escape.md) applies URL escaping to special characters. +- [url-unescape](doc/normalizers/url_unescape.md) applies URL unescaping to special characters. # Custom Checkers diff --git a/checker.go b/checker.go index 98ff61b..0865ac2 100644 --- a/checker.go +++ b/checker.go @@ -66,6 +66,8 @@ var makers = map[string]MakeFunc{ NormalizerTrim: makeTrim, NormalizerTrimLeft: makeTrimLeft, NormalizerTrimRight: makeTrimRight, + NormalizerURLEscape: makeURLEscape, + NormalizerURLUnescape: makeURLUnescape, } // Register registers the given checker name and the maker function. diff --git a/doc/normalizers/url_escape.md b/doc/normalizers/url_escape.md new file mode 100644 index 0000000..0d229d5 --- /dev/null +++ b/doc/normalizers/url_escape.md @@ -0,0 +1,22 @@ +# URL Escape Normalizer + +The `url-escape` normalizer uses [net.url.QueryEscape](https://pkg.go.dev/net/url#QueryEscape) to escape the string so it can be safely placed inside a URL query. + +```golang +type Request struct { + Query string `checkers:"url-escape"` +} + +request := &Request{ + Query: "param1/param2 = 1 + 2 & 3 + 4", +} + +_, valid := checker.Check(request) +if !valid { + t.Fail() +} + +// Outputs: +// param1%2Fparam2+%3D+1+%2B+2+%26+3+%2B+4 +fmt.Println(request.Query) +``` diff --git a/doc/normalizers/url_unescape.md b/doc/normalizers/url_unescape.md new file mode 100644 index 0000000..8982a49 --- /dev/null +++ b/doc/normalizers/url_unescape.md @@ -0,0 +1,26 @@ +# URL Unescape Normalizer + +The `url-unescape` normalizer uses [net.url.QueryUnescape](https://pkg.go.dev/net/url#QueryUnescape) to converte each 3-byte encoded substring of the form "%AB" into the hex-decoded byte 0xAB. + +```golang +type Request struct { + Query string `checkers:"url-unescape"` +} + +request := &Request{ + Query: "param1%2Fparam2+%3D+1+%2B+2+%26+3+%2B+4", +} + +_, valid := checker.Check(request) +if !valid { + t.Fail() +} + +if request.Query != "param1/param2 = 1 + 2 & 3 + 4" { + t.Fail() +} + +// Outputs: +// param1/param2 = 1 + 2 & 3 + 4 +fmt.Println(comment.Body) +``` diff --git a/url_escape.go b/url_escape.go new file mode 100644 index 0000000..7a0b1c0 --- /dev/null +++ b/url_escape.go @@ -0,0 +1,26 @@ +package checker + +import ( + "net/url" + "reflect" +) + +// NormalizerURLEscape is the name of the normalizer. +const NormalizerURLEscape = "url-escape" + +// makeURLEscape makes a normalizer function for the URL escape normalizer. +func makeURLEscape(_ string) CheckFunc { + return normalizeURLEscape +} + +// normalizeURLEscape applies URL escaping to special characters. +// Uses net.url.QueryEscape for the actual escape operation. +func normalizeURLEscape(value, _ reflect.Value) Result { + if value.Kind() != reflect.String { + panic("string expected") + } + + value.SetString(url.QueryEscape(value.String())) + + return ResultValid +} diff --git a/url_escape_test.go b/url_escape_test.go new file mode 100644 index 0000000..2b7cd6d --- /dev/null +++ b/url_escape_test.go @@ -0,0 +1,38 @@ +package checker_test + +import ( + "testing" + + "github.com/cinar/checker" +) + +func TestNormalizeURLEscapeNonString(t *testing.T) { + defer checker.FailIfNoPanic(t) + + type Request struct { + Query int `checkers:"url-escape"` + } + + request := &Request{} + + checker.Check(request) +} + +func TestNormalizeURLEscape(t *testing.T) { + type Request struct { + Query string `checkers:"url-escape"` + } + + request := &Request{ + Query: "param1/param2 = 1 + 2 & 3 + 4", + } + + _, valid := checker.Check(request) + if !valid { + t.Fail() + } + + if request.Query != "param1%2Fparam2+%3D+1+%2B+2+%26+3+%2B+4" { + t.Fail() + } +} diff --git a/url_unescape.go b/url_unescape.go new file mode 100644 index 0000000..99859c0 --- /dev/null +++ b/url_unescape.go @@ -0,0 +1,29 @@ +package checker + +import ( + "net/url" + "reflect" +) + +// NormalizerURLUnescape is the name of the normalizer. +const NormalizerURLUnescape = "url-unescape" + +// makeURLUnescape makes a normalizer function for the URL unscape normalizer. +func makeURLUnescape(_ string) CheckFunc { + return normalizeURLUnescape +} + +// normalizeURLUnescape applies URL unescaping to special characters. +// Uses url.QueryUnescape for the actual unescape operation. +func normalizeURLUnescape(value, _ reflect.Value) Result { + if value.Kind() != reflect.String { + panic("string expected") + } + + unescaped, err := url.QueryUnescape(value.String()) + if err == nil { + value.SetString(unescaped) + } + + return ResultValid +} diff --git a/url_unescape_test.go b/url_unescape_test.go new file mode 100644 index 0000000..19427a9 --- /dev/null +++ b/url_unescape_test.go @@ -0,0 +1,38 @@ +package checker_test + +import ( + "testing" + + "github.com/cinar/checker" +) + +func TestNormalizeURLUnescapeNonString(t *testing.T) { + defer checker.FailIfNoPanic(t) + + type Request struct { + Query int `checkers:"url-unescape"` + } + + request := &Request{} + + checker.Check(request) +} + +func TestNormalizeURLUnescape(t *testing.T) { + type Request struct { + Query string `checkers:"url-unescape"` + } + + request := &Request{ + Query: "param1%2Fparam2+%3D+1+%2B+2+%26+3+%2B+4", + } + + _, valid := checker.Check(request) + if !valid { + t.Fail() + } + + if request.Query != "param1/param2 = 1 + 2 & 3 + 4" { + t.Fail() + } +}