From 6b740c6882b52b451c70efe2722d2ccf81a18a17 Mon Sep 17 00:00:00 2001 From: Paul Lecuq Date: Fri, 4 Dec 2020 20:36:22 +0100 Subject: [PATCH] updated dependencies --- go.mod | 15 +- go.sum | 29 + .../github.com/antchfx/xmlquery/.travis.yml | 11 +- vendor/github.com/antchfx/xmlquery/README.md | 116 +- vendor/github.com/antchfx/xmlquery/cache.go | 43 + .../antchfx/xmlquery/cached_reader.go | 69 + vendor/github.com/antchfx/xmlquery/go.mod | 9 + vendor/github.com/antchfx/xmlquery/go.sum | 14 + vendor/github.com/antchfx/xmlquery/node.go | 238 +- vendor/github.com/antchfx/xmlquery/parse.go | 334 ++ vendor/github.com/antchfx/xmlquery/query.go | 78 +- vendor/github.com/antchfx/xpath/README.md | 6 + vendor/github.com/antchfx/xpath/build.go | 78 +- vendor/github.com/antchfx/xpath/cache.go | 80 + vendor/github.com/antchfx/xpath/func.go | 252 +- vendor/github.com/antchfx/xpath/func_go110.go | 9 +- .../antchfx/xpath/func_pre_go110.go | 9 +- vendor/github.com/antchfx/xpath/go.mod | 3 + vendor/github.com/antchfx/xpath/operator.go | 12 +- vendor/github.com/antchfx/xpath/query.go | 143 +- vendor/github.com/antchfx/xpath/xpath.go | 6 +- vendor/github.com/golang/groupcache/LICENSE | 191 + .../github.com/golang/groupcache/lru/lru.go | 133 + .../influxdata/influxdb1-client/v2/client.go | 50 +- .../influxdata/influxdb1-client/v2/params.go | 73 + vendor/golang.org/x/net/html/const.go | 3 +- vendor/golang.org/x/net/html/foreign.go | 120 +- vendor/golang.org/x/net/html/node.go | 5 + vendor/golang.org/x/net/html/parse.go | 313 +- vendor/golang.org/x/net/html/render.go | 34 +- vendor/golang.org/x/net/html/token.go | 9 +- vendor/golang.org/x/text/encoding/encoding.go | 2 +- .../x/text/encoding/htmlindex/tables.go | 1 + .../internal/identifier/identifier.go | 2 +- .../text/encoding/internal/identifier/mib.go | 96 +- .../x/text/encoding/unicode/unicode.go | 96 +- .../x/text/{ => internal}/language/common.go | 12 +- .../x/text/internal/language/compact.go | 29 + .../text/internal/language/compact/compact.go | 61 + .../internal/language/compact/language.go | 260 ++ .../text/internal/language/compact/parents.go | 120 + .../text/internal/language/compact/tables.go | 1015 +++++ .../x/text/internal/language/compact/tags.go | 91 + .../x/text/internal/language/compose.go | 167 + .../x/text/internal/language/coverage.go | 28 + .../x/text/internal/language/language.go | 596 +++ .../x/text/{ => internal}/language/lookup.go | 120 +- .../x/text/internal/language/match.go | 226 ++ .../x/text/internal/language/parse.go | 594 +++ .../x/text/internal/language/tables.go | 3464 ++++++++++++++++ .../x/text/internal/language/tags.go | 48 + vendor/golang.org/x/text/language/Makefile | 16 - vendor/golang.org/x/text/language/coverage.go | 34 +- vendor/golang.org/x/text/language/index.go | 783 ---- vendor/golang.org/x/text/language/language.go | 716 +--- vendor/golang.org/x/text/language/match.go | 422 +- vendor/golang.org/x/text/language/parse.go | 735 +--- vendor/golang.org/x/text/language/tables.go | 3500 +---------------- vendor/golang.org/x/text/language/tags.go | 160 +- .../golang.org/x/text/transform/transform.go | 12 +- vendor/gopkg.in/ini.v1/.travis.yml | 17 - vendor/gopkg.in/ini.v1/Makefile | 2 +- vendor/gopkg.in/ini.v1/README.md | 19 +- vendor/gopkg.in/ini.v1/codecov.yml | 9 + vendor/gopkg.in/ini.v1/data_source.go | 76 + vendor/gopkg.in/ini.v1/deprecated.go | 25 + vendor/gopkg.in/ini.v1/error.go | 2 + vendor/gopkg.in/ini.v1/file.go | 185 +- vendor/gopkg.in/ini.v1/helper.go | 24 + vendor/gopkg.in/ini.v1/ini.go | 127 +- vendor/gopkg.in/ini.v1/key.go | 165 +- vendor/gopkg.in/ini.v1/parser.go | 217 +- vendor/gopkg.in/ini.v1/section.go | 21 +- vendor/gopkg.in/ini.v1/struct.go | 377 +- vendor/modules.txt | 16 +- 75 files changed, 10290 insertions(+), 6883 deletions(-) create mode 100644 vendor/github.com/antchfx/xmlquery/cache.go create mode 100644 vendor/github.com/antchfx/xmlquery/cached_reader.go create mode 100644 vendor/github.com/antchfx/xmlquery/go.mod create mode 100644 vendor/github.com/antchfx/xmlquery/go.sum create mode 100644 vendor/github.com/antchfx/xmlquery/parse.go create mode 100644 vendor/github.com/antchfx/xpath/cache.go create mode 100644 vendor/github.com/antchfx/xpath/go.mod create mode 100644 vendor/github.com/golang/groupcache/LICENSE create mode 100644 vendor/github.com/golang/groupcache/lru/lru.go create mode 100644 vendor/github.com/influxdata/influxdb1-client/v2/params.go rename vendor/golang.org/x/text/{ => internal}/language/common.go (50%) create mode 100644 vendor/golang.org/x/text/internal/language/compact.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/compact.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/language.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/parents.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/tables.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/tags.go create mode 100644 vendor/golang.org/x/text/internal/language/compose.go create mode 100644 vendor/golang.org/x/text/internal/language/coverage.go create mode 100644 vendor/golang.org/x/text/internal/language/language.go rename vendor/golang.org/x/text/{ => internal}/language/lookup.go (80%) create mode 100644 vendor/golang.org/x/text/internal/language/match.go create mode 100644 vendor/golang.org/x/text/internal/language/parse.go create mode 100644 vendor/golang.org/x/text/internal/language/tables.go create mode 100644 vendor/golang.org/x/text/internal/language/tags.go delete mode 100644 vendor/golang.org/x/text/language/Makefile delete mode 100644 vendor/golang.org/x/text/language/index.go delete mode 100644 vendor/gopkg.in/ini.v1/.travis.yml create mode 100644 vendor/gopkg.in/ini.v1/codecov.yml create mode 100644 vendor/gopkg.in/ini.v1/data_source.go create mode 100644 vendor/gopkg.in/ini.v1/deprecated.go create mode 100644 vendor/gopkg.in/ini.v1/helper.go diff --git a/go.mod b/go.mod index 3453b78..d0fe98e 100644 --- a/go.mod +++ b/go.mod @@ -3,10 +3,13 @@ module git.paulbsd.com/paulbsd/fuelprices go 1.13 require ( - github.com/antchfx/xmlquery v1.0.0 - github.com/antchfx/xpath v1.0.0 // indirect - github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc - github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a // indirect - golang.org/x/net v0.0.0-20190607181551-461777fb6f67 // indirect - gopkg.in/ini.v1 v1.42.0 + github.com/antchfx/xmlquery v1.3.3 + github.com/antchfx/xpath v1.1.11 // indirect + github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect + github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab + github.com/smartystreets/assertions v1.2.0 // indirect + github.com/smartystreets/goconvey v1.6.4 // indirect + golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb // indirect + golang.org/x/text v0.3.4 // indirect + gopkg.in/ini.v1 v1.62.0 ) diff --git a/go.sum b/go.sum index c1c0460..b76f543 100644 --- a/go.sum +++ b/go.sum @@ -1,24 +1,53 @@ github.com/antchfx/xmlquery v1.0.0 h1:YuEPqexGG2opZKNc9JU3Zw6zFXwC47wNcy6/F8oKsrM= github.com/antchfx/xmlquery v1.0.0/go.mod h1:/+CnyD/DzHRnv2eRxrVbieRU/FIF6N0C+7oTtyUtCKk= +github.com/antchfx/xmlquery v1.3.3 h1:HYmadPG0uz8CySdL68rB4DCLKXz2PurCjS3mnkVF4CQ= +github.com/antchfx/xmlquery v1.3.3/go.mod h1:64w0Xesg2sTaawIdNqMB+7qaW/bSqkQm+ssPaCMWNnc= github.com/antchfx/xpath v1.0.0 h1:Q5gFgh2O40VTSwMOVbFE7nFNRBu3tS21Tn0KAWeEjtk= github.com/antchfx/xpath v1.0.0/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= +github.com/antchfx/xpath v1.1.10/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= +github.com/antchfx/xpath v1.1.11 h1:WOFtK8TVAjLm3lbgqeP0arlHpvCEeTANeWZ/csPpJkQ= +github.com/antchfx/xpath v1.1.11/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc h1:KpMgaYJRieDkHZJWY3LMafvtqS/U8xX6+lUN+OKpl/Y= github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= +github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab h1:HqW4xhhynfjrtEiiSGcQUd6vrK23iMam1FO8rI7mwig= +github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= +github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a h1:pa8hGb/2YqsZKovtsgrwcDH1RZhVbTKCjLp47XpqCDs= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190607181551-461777fb6f67 h1:rJJxsykSlULwd2P2+pg/rtnwN2FrWp4IuCxOSyS0V00= golang.org/x/net v0.0.0-20190607181551-461777fb6f67/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb h1:eBmm0M9fYhWpKZLjQUUKka/LtIxf46G4fxeEz5KJr9U= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= gopkg.in/ini.v1 v1.42.0 h1:7N3gPTt50s8GuLortA00n8AqRTk75qOP98+mTPpgzRk= gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= diff --git a/vendor/github.com/antchfx/xmlquery/.travis.yml b/vendor/github.com/antchfx/xmlquery/.travis.yml index d9a7bb8..731b767 100644 --- a/vendor/github.com/antchfx/xmlquery/.travis.yml +++ b/vendor/github.com/antchfx/xmlquery/.travis.yml @@ -1,14 +1,17 @@ language: go go: - - 1.6 - - 1.7 - - 1.8 + - 1.9.x + - 1.12.x + - 1.13.x + - 1.14.x + - 1.15.x install: - go get golang.org/x/net/html/charset - go get github.com/antchfx/xpath - go get github.com/mattn/goveralls + - go get github.com/golang/groupcache script: - - $HOME/gopath/bin/goveralls -service=travis-ci \ No newline at end of file + - $HOME/gopath/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/antchfx/xmlquery/README.md b/vendor/github.com/antchfx/xmlquery/README.md index 6683afd..bae7fc3 100644 --- a/vendor/github.com/antchfx/xmlquery/README.md +++ b/vendor/github.com/antchfx/xmlquery/README.md @@ -8,45 +8,104 @@ xmlquery Overview === -xmlquery is an XPath query package for XML document, lets you extract data or evaluate from XML documents by an XPath expression. +`xmlquery` is an XPath query package for XML documents, allowing you to extract +data or evaluate from XML documents with an XPath expression. + +`xmlquery` has a built-in query object caching feature that caches recently used +XPATH query strings. Enabling caching can avoid recompile XPath expression for +each query. Change Logs === -**2018-12-23** -* added XML output will including comment node. [#9](https://github.com/antchfx/xmlquery/issues/9) +2020-08-?? +- Add XML stream loading and parsing support. -**2018-12-03** - * added support attribute name with namespace prefix and XML output. [#6](https://github.com/antchfx/xmlquery/issues/6) +2019-11-11 +- Add XPath query caching. + +2019-10-05 +- Add new methods compatible with invalid XPath expression error: `QueryAll` and `Query`. +- Add `QuerySelector` and `QuerySelectorAll` methods, support for reused query objects. +- PR [#12](https://github.com/antchfx/xmlquery/pull/12) (Thanks @FrancescoIlario) +- PR [#11](https://github.com/antchfx/xmlquery/pull/11) (Thanks @gjvnq) + +2018-12-23 +- Added XML output including comment nodes. [#9](https://github.com/antchfx/xmlquery/issues/9) + +2018-12-03 +- Added support to attribute name with namespace prefix and XML output. [#6](https://github.com/antchfx/xmlquery/issues/6) Installation ==== - -> $ go get github.com/antchfx/xmlquery +``` + $ go get github.com/antchfx/xmlquery +``` Getting Started === -#### Parse a XML from URL. +### Find specified XPath query. + +```go +list, err := xmlquery.QueryAll(doc, "a") +if err != nil { + panic(err) +} +``` + +#### Parse an XML from URL. ```go doc, err := xmlquery.LoadURL("http://www.example.com/sitemap.xml") ``` -#### Parse a XML from string. +#### Parse an XML from string. ```go s := `` doc, err := xmlquery.Parse(strings.NewReader(s)) ``` -#### Parse a XML from io.Reader. +#### Parse an XML from io.Reader. ```go f, err := os.Open("../books.xml") doc, err := xmlquery.Parse(f) ``` +#### Parse an XML in a stream fashion (simple case without elements filtering). + +```go +f, err := os.Open("../books.xml") +p, err := xmlquery.CreateStreamParser(f, "/bookstore/book") +for { + n, err := p.Read() + if err == io.EOF { + break + } + if err != nil { + ... + } +} +``` + +#### Parse an XML in a stream fashion (simple case advanced element filtering). + +```go +f, err := os.Open("../books.xml") +p, err := xmlquery.CreateStreamParser(f, "/bookstore/book", "/bookstore/book[price>=10]") +for { + n, err := p.Read() + if err == io.EOF { + break + } + if err != nil { + ... + } +} +``` + #### Find authors of all books in the bookstore. ```go @@ -61,25 +120,25 @@ list := xmlquery.Find(doc, "//author") book := xmlquery.FindOne(doc, "//book[2]") ``` -#### Find all book elements and only get `id` attribute self. (New Feature) +#### Find all book elements and only get `id` attribute. (New Feature) ```go list := xmlquery.Find(doc,"//book/@id") ``` -#### Find all books with id is bk104. +#### Find all books with id `bk104`. ```go list := xmlquery.Find(doc, "//book[@id='bk104']") ``` -#### Find all books that price less than 5. +#### Find all books with price less than 5. ```go list := xmlquery.Find(doc, "//book[price<5]") ``` -#### Evaluate the total price of all books. +#### Evaluate total price of all books. ```go expr, err := xpath.Compile("sum(//book/price)") @@ -87,13 +146,30 @@ price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64) fmt.Printf("total price: %f\n", price) ``` -#### Evaluate the number of all books element. +#### Evaluate number of all book elements. ```go expr, err := xpath.Compile("count(//book)") price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64) ``` +FAQ +==== + +#### `Find()` vs `QueryAll()`, which is better? + +`Find` and `QueryAll` both do the same thing: searches all of matched XML nodes. +`Find` panics if provided with an invalid XPath query, while `QueryAll` returns +an error. + +#### Can I save my query expression object for the next query? + +Yes, you can. We provide `QuerySelector` and `QuerySelectorAll` methods; they +accept your query expression object. + +Caching a query expression object avoids recompiling the XPath query +expression, improving query performance. + #### Create XML document. ```go @@ -175,11 +251,11 @@ func main(){ List of supported XPath query packages === -|Name |Description | -|--------------------------|----------------| -|[htmlquery](https://github.com/antchfx/htmlquery) | XPath query package for the HTML document| -|[xmlquery](https://github.com/antchfx/xmlquery) | XPath query package for the XML document| -|[jsonquery](https://github.com/antchfx/jsonquery) | XPath query package for the JSON document| +| Name | Description | +| ------------------------------------------------- | ----------------------------------------- | +| [htmlquery](https://github.com/antchfx/htmlquery) | XPath query package for HTML documents | +| [xmlquery](https://github.com/antchfx/xmlquery) | XPath query package for XML documents | +| [jsonquery](https://github.com/antchfx/jsonquery) | XPath query package for JSON documents | Questions === diff --git a/vendor/github.com/antchfx/xmlquery/cache.go b/vendor/github.com/antchfx/xmlquery/cache.go new file mode 100644 index 0000000..3abffcd --- /dev/null +++ b/vendor/github.com/antchfx/xmlquery/cache.go @@ -0,0 +1,43 @@ +package xmlquery + +import ( + "sync" + + "github.com/golang/groupcache/lru" + + "github.com/antchfx/xpath" +) + +// DisableSelectorCache will disable caching for the query selector if value is true. +var DisableSelectorCache = false + +// SelectorCacheMaxEntries allows how many selector object can be caching. Default is 50. +// Will disable caching if SelectorCacheMaxEntries <= 0. +var SelectorCacheMaxEntries = 50 + +var ( + cacheOnce sync.Once + cache *lru.Cache + cacheMutex sync.Mutex +) + +func getQuery(expr string) (*xpath.Expr, error) { + if DisableSelectorCache || SelectorCacheMaxEntries <= 0 { + return xpath.Compile(expr) + } + cacheOnce.Do(func() { + cache = lru.New(SelectorCacheMaxEntries) + }) + cacheMutex.Lock() + defer cacheMutex.Unlock() + if v, ok := cache.Get(expr); ok { + return v.(*xpath.Expr), nil + } + v, err := xpath.Compile(expr) + if err != nil { + return nil, err + } + cache.Add(expr, v) + return v, nil + +} diff --git a/vendor/github.com/antchfx/xmlquery/cached_reader.go b/vendor/github.com/antchfx/xmlquery/cached_reader.go new file mode 100644 index 0000000..fe389c5 --- /dev/null +++ b/vendor/github.com/antchfx/xmlquery/cached_reader.go @@ -0,0 +1,69 @@ +package xmlquery + +import ( + "bufio" +) + +type cachedReader struct { + buffer *bufio.Reader + cache []byte + cacheCap int + cacheLen int + caching bool +} + +func newCachedReader(r *bufio.Reader) *cachedReader { + return &cachedReader{ + buffer: r, + cache: make([]byte, 4096), + cacheCap: 4096, + cacheLen: 0, + caching: false, + } +} + +func (c *cachedReader) StartCaching() { + c.cacheLen = 0 + c.caching = true +} + +func (c *cachedReader) ReadByte() (byte, error) { + if !c.caching { + return c.buffer.ReadByte() + } + b, err := c.buffer.ReadByte() + if err != nil { + return b, err + } + if c.cacheLen < c.cacheCap { + c.cache[c.cacheLen] = b + c.cacheLen++ + } + return b, err +} + +func (c *cachedReader) Cache() []byte { + return c.cache[:c.cacheLen] +} + +func (c *cachedReader) StopCaching() { + c.caching = false +} + +func (c *cachedReader) Read(p []byte) (int, error) { + n, err := c.buffer.Read(p) + if err != nil { + return n, err + } + if c.caching && c.cacheLen < c.cacheCap { + for i := 0; i < n; i++ { + c.cache[c.cacheLen] = p[i] + c.cacheLen++ + if c.cacheLen >= c.cacheCap { + break + } + } + } + return n, err +} + diff --git a/vendor/github.com/antchfx/xmlquery/go.mod b/vendor/github.com/antchfx/xmlquery/go.mod new file mode 100644 index 0000000..b6f453e --- /dev/null +++ b/vendor/github.com/antchfx/xmlquery/go.mod @@ -0,0 +1,9 @@ +module github.com/antchfx/xmlquery + +go 1.14 + +require ( + github.com/antchfx/xpath v1.1.10 + github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e + golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc +) diff --git a/vendor/github.com/antchfx/xmlquery/go.sum b/vendor/github.com/antchfx/xmlquery/go.sum new file mode 100644 index 0000000..9f54294 --- /dev/null +++ b/vendor/github.com/antchfx/xmlquery/go.sum @@ -0,0 +1,14 @@ +github.com/antchfx/xpath v1.1.10 h1:cJ0pOvEdN/WvYXxvRrzQH9x5QWKpzHacYO8qzCcDYAg= +github.com/antchfx/xpath v1.1.10/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc h1:zK/HqS5bZxDptfPJNq8v7vJfXtkU7r9TLIoSr1bXaP4= +golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/vendor/github.com/antchfx/xmlquery/node.go b/vendor/github.com/antchfx/xmlquery/node.go index d0e6a54..e6b893c 100644 --- a/vendor/github.com/antchfx/xmlquery/node.go +++ b/vendor/github.com/antchfx/xmlquery/node.go @@ -3,13 +3,8 @@ package xmlquery import ( "bytes" "encoding/xml" - "errors" "fmt" - "io" - "net/http" "strings" - - "golang.org/x/net/html/charset" ) // A NodeType is the type of a Node. @@ -19,13 +14,15 @@ const ( // DocumentNode is a document object that, as the root of the document tree, // provides access to the entire XML document. DocumentNode NodeType = iota - // DeclarationNode is the document type declaration, indicated by the following - // tag (for example, ). + // DeclarationNode is the document type declaration, indicated by the + // following tag (for example, ). DeclarationNode // ElementNode is an element (for example, ). ElementNode // TextNode is the text content of a node. TextNode + // CharDataNode node + CharDataNode // CommentNode a comment (for example, ). CommentNode // AttributeNode is an attribute of element. @@ -51,14 +48,13 @@ func (n *Node) InnerText() string { var output func(*bytes.Buffer, *Node) output = func(buf *bytes.Buffer, n *Node) { switch n.Type { - case TextNode: + case TextNode, CharDataNode: buf.WriteString(n.Data) - return case CommentNode: - return - } - for child := n.FirstChild; child != nil; child = child.NextSibling { - output(buf, child) + default: + for child := n.FirstChild; child != nil; child = child.NextSibling { + output(buf, child) + } } } @@ -67,20 +63,41 @@ func (n *Node) InnerText() string { return buf.String() } -func outputXML(buf *bytes.Buffer, n *Node) { - if n.Type == TextNode { - xml.EscapeText(buf, []byte(strings.TrimSpace(n.Data))) - return +func (n *Node) sanitizedData(preserveSpaces bool) string { + if preserveSpaces { + return strings.Trim(n.Data, "\n\t") } - if n.Type == CommentNode { + return strings.TrimSpace(n.Data) +} + +func calculatePreserveSpaces(n *Node, pastValue bool) bool { + if attr := n.SelectAttr("xml:space"); attr == "preserve" { + return true + } else if attr == "default" { + return false + } + return pastValue +} + +func outputXML(buf *bytes.Buffer, n *Node, preserveSpaces bool) { + preserveSpaces = calculatePreserveSpaces(n, preserveSpaces) + switch n.Type { + case TextNode: + xml.EscapeText(buf, []byte(n.sanitizedData(preserveSpaces))) + return + case CharDataNode: + buf.WriteString("") + return + case CommentNode: buf.WriteString("") return - } - if n.Type == DeclarationNode { + case DeclarationNode: buf.WriteString("") @@ -101,7 +121,7 @@ func outputXML(buf *bytes.Buffer, n *Node) { buf.WriteString(">") } for child := n.FirstChild; child != nil; child = child.NextSibling { - outputXML(buf, child) + outputXML(buf, child, preserveSpaces) } if n.Type != DeclarationNode { if n.Prefix == "" { @@ -116,17 +136,18 @@ func outputXML(buf *bytes.Buffer, n *Node) { func (n *Node) OutputXML(self bool) string { var buf bytes.Buffer if self { - outputXML(&buf, n) + outputXML(&buf, n, false) } else { for n := n.FirstChild; n != nil; n = n.NextSibling { - outputXML(&buf, n) + outputXML(&buf, n, false) } } return buf.String() } -func addAttr(n *Node, key, val string) { +// AddAttr adds a new attribute specified by 'key' and 'val' to a node 'n'. +func AddAttr(n *Node, key, val string) { var attr xml.Attr if i := strings.Index(key, ":"); i > 0 { attr = xml.Attr{ @@ -143,10 +164,13 @@ func addAttr(n *Node, key, val string) { n.Attr = append(n.Attr, attr) } -func addChild(parent, n *Node) { +// AddChild adds a new node 'n' to a node 'parent' as its last child. +func AddChild(parent, n *Node) { n.Parent = parent + n.NextSibling = nil if parent.FirstChild == nil { parent.FirstChild = n + n.PrevSibling = nil } else { parent.LastChild.NextSibling = n n.PrevSibling = parent.LastChild @@ -155,148 +179,48 @@ func addChild(parent, n *Node) { parent.LastChild = n } -func addSibling(sibling, n *Node) { +// AddSibling adds a new node 'n' as a sibling of a given node 'sibling'. +// Note it is not necessarily true that the new node 'n' would be added +// immediately after 'sibling'. If 'sibling' isn't the last child of its +// parent, then the new node 'n' will be added at the end of the sibling +// chain of their parent. +func AddSibling(sibling, n *Node) { for t := sibling.NextSibling; t != nil; t = t.NextSibling { sibling = t } n.Parent = sibling.Parent sibling.NextSibling = n n.PrevSibling = sibling + n.NextSibling = nil if sibling.Parent != nil { sibling.Parent.LastChild = n } } -// LoadURL loads the XML document from the specified URL. -func LoadURL(url string) (*Node, error) { - resp, err := http.Get(url) - if err != nil { - return nil, err +// RemoveFromTree removes a node and its subtree from the document +// tree it is in. If the node is the root of the tree, then it's no-op. +func RemoveFromTree(n *Node) { + if n.Parent == nil { + return } - defer resp.Body.Close() - return parse(resp.Body) -} - -func parse(r io.Reader) (*Node, error) { - var ( - decoder = xml.NewDecoder(r) - doc = &Node{Type: DocumentNode} - space2prefix = make(map[string]string) - level = 0 - ) - // http://www.w3.org/XML/1998/namespace is bound by definition to the prefix xml. - space2prefix["http://www.w3.org/XML/1998/namespace"] = "xml" - decoder.CharsetReader = charset.NewReaderLabel - prev := doc - for { - tok, err := decoder.Token() - switch { - case err == io.EOF: - goto quit - case err != nil: - return nil, err + if n.Parent.FirstChild == n { + if n.Parent.LastChild == n { + n.Parent.FirstChild = nil + n.Parent.LastChild = nil + } else { + n.Parent.FirstChild = n.NextSibling + n.NextSibling.PrevSibling = nil } - - switch tok := tok.(type) { - case xml.StartElement: - if level == 0 { - // mising XML declaration - node := &Node{Type: DeclarationNode, Data: "xml", level: 1} - addChild(prev, node) - level = 1 - prev = node - } - // https://www.w3.org/TR/xml-names/#scoping-defaulting - for _, att := range tok.Attr { - if att.Name.Local == "xmlns" { - space2prefix[att.Value] = "" - } else if att.Name.Space == "xmlns" { - space2prefix[att.Value] = att.Name.Local - } - } - - if tok.Name.Space != "" { - if _, found := space2prefix[tok.Name.Space]; !found { - return nil, errors.New("xmlquery: invalid XML document, namespace is missing") - } - } - - for i := 0; i < len(tok.Attr); i++ { - att := &tok.Attr[i] - if prefix, ok := space2prefix[att.Name.Space]; ok { - att.Name.Space = prefix - } - } - - node := &Node{ - Type: ElementNode, - Data: tok.Name.Local, - Prefix: space2prefix[tok.Name.Space], - NamespaceURI: tok.Name.Space, - Attr: tok.Attr, - level: level, - } - //fmt.Println(fmt.Sprintf("start > %s : %d", node.Data, level)) - if level == prev.level { - addSibling(prev, node) - } else if level > prev.level { - addChild(prev, node) - } else if level < prev.level { - for i := prev.level - level; i > 1; i-- { - prev = prev.Parent - } - addSibling(prev.Parent, node) - } - prev = node - level++ - case xml.EndElement: - level-- - case xml.CharData: - node := &Node{Type: TextNode, Data: string(tok), level: level} - if level == prev.level { - addSibling(prev, node) - } else if level > prev.level { - addChild(prev, node) - } - case xml.Comment: - node := &Node{Type: CommentNode, Data: string(tok), level: level} - if level == prev.level { - addSibling(prev, node) - } else if level > prev.level { - addChild(prev, node) - } else if level < prev.level { - for i := prev.level - level; i > 1; i-- { - prev = prev.Parent - } - addSibling(prev.Parent, node) - } - case xml.ProcInst: // Processing Instruction - if prev.Type != DeclarationNode { - level++ - } - node := &Node{Type: DeclarationNode, Data: tok.Target, level: level} - pairs := strings.Split(string(tok.Inst), " ") - for _, pair := range pairs { - pair = strings.TrimSpace(pair) - if i := strings.Index(pair, "="); i > 0 { - addAttr(node, pair[:i], strings.Trim(pair[i+1:], `"`)) - } - } - if level == prev.level { - addSibling(prev, node) - } else if level > prev.level { - addChild(prev, node) - } - prev = node - case xml.Directive: + } else { + if n.Parent.LastChild == n { + n.Parent.LastChild = n.PrevSibling + n.PrevSibling.NextSibling = nil + } else { + n.PrevSibling.NextSibling = n.NextSibling + n.NextSibling.PrevSibling = n.PrevSibling } - } -quit: - return doc, nil -} - -// Parse returns the parse tree for the XML from the given Reader. -func Parse(r io.Reader) (*Node, error) { - return parse(r) + n.Parent = nil + n.PrevSibling = nil + n.NextSibling = nil } diff --git a/vendor/github.com/antchfx/xmlquery/parse.go b/vendor/github.com/antchfx/xmlquery/parse.go new file mode 100644 index 0000000..623f06d --- /dev/null +++ b/vendor/github.com/antchfx/xmlquery/parse.go @@ -0,0 +1,334 @@ +package xmlquery + +import ( + "bufio" + "encoding/xml" + "errors" + "fmt" + "io" + "net/http" + "regexp" + "strings" + + "github.com/antchfx/xpath" + "golang.org/x/net/html/charset" +) + +var xmlMIMERegex = regexp.MustCompile(`(?i)((application|image|message|model)/((\w|\.|-)+\+?)?|text/)(wb)?xml`) + +// LoadURL loads the XML document from the specified URL. +func LoadURL(url string) (*Node, error) { + resp, err := http.Get(url) + if err != nil { + return nil, err + } + defer resp.Body.Close() + // Make sure the Content-Type has a valid XML MIME type + if xmlMIMERegex.MatchString(resp.Header.Get("Content-Type")) { + return Parse(resp.Body) + } + return nil, fmt.Errorf("invalid XML document(%s)", resp.Header.Get("Content-Type")) +} + +// Parse returns the parse tree for the XML from the given Reader. +func Parse(r io.Reader) (*Node, error) { + p := createParser(r) + for { + _, err := p.parse() + if err == io.EOF { + return p.doc, nil + } + if err != nil { + return nil, err + } + } +} + +type parser struct { + decoder *xml.Decoder + doc *Node + space2prefix map[string]string + level int + prev *Node + streamElementXPath *xpath.Expr // Under streaming mode, this specifies the xpath to the target element node(s). + streamElementFilter *xpath.Expr // If specified, it provides further filtering on the target element. + streamNode *Node // Need to remember the last target node So we can clean it up upon next Read() call. + streamNodePrev *Node // Need to remember target node's prev so upon target node removal, we can restore correct prev. + reader *cachedReader // Need to maintain a reference to the reader, so we can determine whether a node contains CDATA. +} + +func createParser(r io.Reader) *parser { + reader := newCachedReader(bufio.NewReader(r)) + p := &parser{ + decoder: xml.NewDecoder(reader), + doc: &Node{Type: DocumentNode}, + space2prefix: make(map[string]string), + level: 0, + reader: reader, + } + // http://www.w3.org/XML/1998/namespace is bound by definition to the prefix xml. + p.space2prefix["http://www.w3.org/XML/1998/namespace"] = "xml" + p.decoder.CharsetReader = charset.NewReaderLabel + p.prev = p.doc + return p +} + +func (p *parser) parse() (*Node, error) { + var streamElementNodeCounter int + + for { + tok, err := p.decoder.Token() + if err != nil { + return nil, err + } + + switch tok := tok.(type) { + case xml.StartElement: + if p.level == 0 { + // mising XML declaration + node := &Node{Type: DeclarationNode, Data: "xml", level: 1} + AddChild(p.prev, node) + p.level = 1 + p.prev = node + } + // https://www.w3.org/TR/xml-names/#scoping-defaulting + for _, att := range tok.Attr { + if att.Name.Local == "xmlns" { + p.space2prefix[att.Value] = "" + } else if att.Name.Space == "xmlns" { + p.space2prefix[att.Value] = att.Name.Local + } + } + + if tok.Name.Space != "" { + if _, found := p.space2prefix[tok.Name.Space]; !found { + return nil, errors.New("xmlquery: invalid XML document, namespace is missing") + } + } + + for i := 0; i < len(tok.Attr); i++ { + att := &tok.Attr[i] + if prefix, ok := p.space2prefix[att.Name.Space]; ok { + att.Name.Space = prefix + } + } + + node := &Node{ + Type: ElementNode, + Data: tok.Name.Local, + Prefix: p.space2prefix[tok.Name.Space], + NamespaceURI: tok.Name.Space, + Attr: tok.Attr, + level: p.level, + } + + if p.level == p.prev.level { + AddSibling(p.prev, node) + } else if p.level > p.prev.level { + AddChild(p.prev, node) + } else if p.level < p.prev.level { + for i := p.prev.level - p.level; i > 1; i-- { + p.prev = p.prev.Parent + } + AddSibling(p.prev.Parent, node) + } + // If we're in the streaming mode, we need to remember the node if it is the target node + // so that when we finish processing the node's EndElement, we know how/what to return to + // caller. Also we need to remove the target node from the tree upon next Read() call so + // memory doesn't grow unbounded. + if p.streamElementXPath != nil { + if p.streamNode == nil { + if QuerySelector(p.doc, p.streamElementXPath) != nil { + p.streamNode = node + p.streamNodePrev = p.prev + streamElementNodeCounter = 1 + } + } else { + streamElementNodeCounter++ + } + } + p.prev = node + p.level++ + p.reader.StartCaching() + case xml.EndElement: + p.level-- + // If we're in streaming mode, and we already have a potential streaming + // target node identified (p.streamNode != nil) then we need to check if + // this is the real one we want to return to caller. + if p.streamNode != nil { + streamElementNodeCounter-- + if streamElementNodeCounter == 0 { + // Now we know this element node is the at least passing the initial + // p.streamElementXPath check and is a potential target node candidate. + // We need to have 1 more check with p.streamElementFilter (if given) to + // ensure it is really the element node we want. + // The reason we need a two-step check process is because the following + // situation: + // b1 + // And say the p.streamElementXPath = "/AAA/BBB[. != 'b1']". Now during + // xml.StartElement time, the node is still empty, so it will pass + // the p.streamElementXPath check. However, eventually we know this + // shouldn't be returned to the caller. Having a second more fine-grained + // filter check ensures that. So in this case, the caller should really + // setup the stream parser with: + // streamElementXPath = "/AAA/BBB[" + // streamElementFilter = "/AAA/BBB[. != 'b1']" + if p.streamElementFilter == nil || QuerySelector(p.doc, p.streamElementFilter) != nil { + return p.streamNode, nil + } + // otherwise, this isn't our target node, clean things up. + // note we also remove the underlying *Node from the node tree, to prevent + // future stream node candidate selection error. + RemoveFromTree(p.streamNode) + p.prev = p.streamNodePrev + p.streamNode = nil + p.streamNodePrev = nil + } + } + case xml.CharData: + p.reader.StopCaching() + // First, normalize the cache... + cached := strings.ToUpper(string(p.reader.Cache())) + nodeType := TextNode + if strings.HasPrefix(cached, " p.prev.level { + AddChild(p.prev, node) + } else if p.level < p.prev.level { + for i := p.prev.level - p.level; i > 1; i-- { + p.prev = p.prev.Parent + } + AddSibling(p.prev.Parent, node) + } + p.reader.StartCaching() + case xml.Comment: + node := &Node{Type: CommentNode, Data: string(tok), level: p.level} + if p.level == p.prev.level { + AddSibling(p.prev, node) + } else if p.level > p.prev.level { + AddChild(p.prev, node) + } else if p.level < p.prev.level { + for i := p.prev.level - p.level; i > 1; i-- { + p.prev = p.prev.Parent + } + AddSibling(p.prev.Parent, node) + } + case xml.ProcInst: // Processing Instruction + if p.prev.Type != DeclarationNode { + p.level++ + } + node := &Node{Type: DeclarationNode, Data: tok.Target, level: p.level} + pairs := strings.Split(string(tok.Inst), " ") + for _, pair := range pairs { + pair = strings.TrimSpace(pair) + if i := strings.Index(pair, "="); i > 0 { + AddAttr(node, pair[:i], strings.Trim(pair[i+1:], `"`)) + } + } + if p.level == p.prev.level { + AddSibling(p.prev, node) + } else if p.level > p.prev.level { + AddChild(p.prev, node) + } + p.prev = node + case xml.Directive: + } + } +} + +// StreamParser enables loading and parsing an XML document in a streaming +// fashion. +type StreamParser struct { + p *parser +} + +// CreateStreamParser creates a StreamParser. Argument streamElementXPath is +// required. +// Argument streamElementFilter is optional and should only be used in advanced +// scenarios. +// +// Scenario 1: simple case: +// xml := `b1b2` +// sp, err := CreateStreamParser(strings.NewReader(xml), "/AAA/BBB") +// if err != nil { +// panic(err) +// } +// for { +// n, err := sp.Read() +// if err != nil { +// break +// } +// fmt.Println(n.OutputXML(true)) +// } +// Output will be: +// b1 +// b2 +// +// Scenario 2: advanced case: +// xml := `b1b2` +// sp, err := CreateStreamParser(strings.NewReader(xml), "/AAA/BBB", "/AAA/BBB[. != 'b1']") +// if err != nil { +// panic(err) +// } +// for { +// n, err := sp.Read() +// if err != nil { +// break +// } +// fmt.Println(n.OutputXML(true)) +// } +// Output will be: +// b2 +// +// As the argument names indicate, streamElementXPath should be used for +// providing xpath query pointing to the target element node only, no extra +// filtering on the element itself or its children; while streamElementFilter, +// if needed, can provide additional filtering on the target element and its +// children. +// +// CreateStreamParser returns an error if either streamElementXPath or +// streamElementFilter, if provided, cannot be successfully parsed and compiled +// into a valid xpath query. +func CreateStreamParser(r io.Reader, streamElementXPath string, streamElementFilter ...string) (*StreamParser, error) { + elemXPath, err := getQuery(streamElementXPath) + if err != nil { + return nil, fmt.Errorf("invalid streamElementXPath '%s', err: %s", streamElementXPath, err.Error()) + } + elemFilter := (*xpath.Expr)(nil) + if len(streamElementFilter) > 0 { + elemFilter, err = getQuery(streamElementFilter[0]) + if err != nil { + return nil, fmt.Errorf("invalid streamElementFilter '%s', err: %s", streamElementFilter[0], err.Error()) + } + } + sp := &StreamParser{ + p: createParser(r), + } + sp.p.streamElementXPath = elemXPath + sp.p.streamElementFilter = elemFilter + return sp, nil +} + +// Read returns a target node that satisfies the XPath specified by caller at +// StreamParser creation time. If there is no more satisfying target nodes after +// reading the rest of the XML document, io.EOF will be returned. At any time, +// any XML parsing error encountered will be returned, and the stream parsing +// stopped. Calling Read() after an error is returned (including io.EOF) results +// undefined behavior. Also note, due to the streaming nature, calling Read() +// will automatically remove any previous target node(s) from the document tree. +func (sp *StreamParser) Read() (*Node, error) { + // Because this is a streaming read, we need to release/remove last + // target node from the node tree to free up memory. + if sp.p.streamNode != nil { + RemoveFromTree(sp.p.streamNode) + sp.p.prev = sp.p.streamNodePrev + sp.p.streamNode = nil + sp.p.streamNodePrev = nil + } + return sp.p.parse() +} diff --git a/vendor/github.com/antchfx/xmlquery/query.go b/vendor/github.com/antchfx/xmlquery/query.go index e3a0db7..7544b7e 100644 --- a/vendor/github.com/antchfx/xmlquery/query.go +++ b/vendor/github.com/antchfx/xmlquery/query.go @@ -44,7 +44,8 @@ func (n *Node) SelectAttr(name string) string { var _ xpath.NodeNavigator = &NodeNavigator{} -// CreateXPathNavigator creates a new xpath.NodeNavigator for the specified html.Node. +// CreateXPathNavigator creates a new xpath.NodeNavigator for the specified +// XML Node. func CreateXPathNavigator(top *Node) *NodeNavigator { return &NodeNavigator{curr: top, root: top, attr: -1} } @@ -57,6 +58,7 @@ func getCurrentNode(it *xpath.NodeIterator) *Node { Data: n.Value(), } return &Node{ + Parent: n.curr, Type: AttributeNode, Data: n.LocalName(), FirstChild: childNode, @@ -66,13 +68,50 @@ func getCurrentNode(it *xpath.NodeIterator) *Node { return n.curr } -// Find searches the Node that matches by the specified XPath expr. +// Find is like QueryAll but panics if `expr` is not a valid XPath expression. +// See `QueryAll()` function. func Find(top *Node, expr string) []*Node { - exp, err := xpath.Compile(expr) + nodes, err := QueryAll(top, expr) if err != nil { panic(err) } - t := exp.Select(CreateXPathNavigator(top)) + return nodes +} + +// FindOne is like Query but panics if `expr` is not a valid XPath expression. +// See `Query()` function. +func FindOne(top *Node, expr string) *Node { + node, err := Query(top, expr) + if err != nil { + panic(err) + } + return node +} + +// QueryAll searches the XML Node that matches by the specified XPath expr. +// Returns an error if the expression `expr` cannot be parsed. +func QueryAll(top *Node, expr string) ([]*Node, error) { + exp, err := getQuery(expr) + if err != nil { + return nil, err + } + return QuerySelectorAll(top, exp), nil +} + +// Query searches the XML Node that matches by the specified XPath expr, +// and returns first matched element. +func Query(top *Node, expr string) (*Node, error) { + exp, err := getQuery(expr) + if err != nil { + return nil, err + } + return QuerySelector(top, exp), nil +} + +// QuerySelectorAll searches all of the XML Node that matches the specified +// XPath selectors. +func QuerySelectorAll(top *Node, selector *xpath.Expr) []*Node { + t := selector.Select(CreateXPathNavigator(top)) var elems []*Node for t.MoveNext() { elems = append(elems, getCurrentNode(t)) @@ -80,32 +119,27 @@ func Find(top *Node, expr string) []*Node { return elems } -// FindOne searches the Node that matches by the specified XPath expr, -// and returns first element of matched. -func FindOne(top *Node, expr string) *Node { - exp, err := xpath.Compile(expr) - if err != nil { - panic(err) - } - t := exp.Select(CreateXPathNavigator(top)) - var elem *Node +// QuerySelector returns the first matched XML Node by the specified XPath +// selector. +func QuerySelector(top *Node, selector *xpath.Expr) *Node { + t := selector.Select(CreateXPathNavigator(top)) if t.MoveNext() { - elem = getCurrentNode(t) + return getCurrentNode(t) } - return elem + return nil } // FindEach searches the html.Node and calls functions cb. -// Important: this method has deprecated, recommend use for .. = range Find(){}. +// Important: this method is deprecated, instead, use for .. = range Find(){}. func FindEach(top *Node, expr string, cb func(int, *Node)) { for i, n := range Find(top, expr) { cb(i, n) } } -// FindEachWithBreak functions the same as FindEach but allows you -// to break the loop by returning false from your callback function, cb. -// Important: this method has deprecated, recommend use for .. = range Find(){}. +// FindEachWithBreak functions the same as FindEach but allows to break the loop +// by returning false from the callback function `cb`. +// Important: this method is deprecated, instead, use .. = range Find(){}. func FindEachWithBreak(top *Node, expr string, cb func(int, *Node) bool) { for i, n := range Find(top, expr) { if !cb(i, n) { @@ -127,7 +161,7 @@ func (x *NodeNavigator) NodeType() xpath.NodeType { switch x.curr.Type { case CommentNode: return xpath.CommentNode - case TextNode: + case TextNode, CharDataNode: return xpath.TextNode case DeclarationNode, DocumentNode: return xpath.RootNode @@ -158,6 +192,10 @@ func (x *NodeNavigator) Prefix() string { return x.curr.Prefix } +func (x *NodeNavigator) NamespaceURL() string { + return x.curr.NamespaceURI +} + func (x *NodeNavigator) Value() string { switch x.curr.Type { case CommentNode: diff --git a/vendor/github.com/antchfx/xpath/README.md b/vendor/github.com/antchfx/xpath/README.md index 414114d..9a58a9f 100644 --- a/vendor/github.com/antchfx/xpath/README.md +++ b/vendor/github.com/antchfx/xpath/README.md @@ -138,12 +138,15 @@ Supported Features `lang()`| ✗ | `last()`| ✓ | `local-name()`| ✓ | +`matches()`| ✓ | `name()`| ✓ | `namespace-uri()`| ✓ | `normalize-space()`| ✓ | `not()`| ✓ | `number()`| ✓ | `position()`| ✓ | +`replace()`| ✓ | +`reverse()`| ✓ | `round()`| ✓ | `starts-with()`| ✓ | `string()`| ✓ | @@ -160,6 +163,9 @@ Supported Features Changelogs === +2019-03-19 +- optimize XPath `|` operation performance. [#33](https://github.com/antchfx/xpath/issues/33). Tips: suggest split into multiple subquery if you have a lot of `|` operations. + 2019-01-29 - improvement `normalize-space` function. [#32](https://github.com/antchfx/xpath/issues/32) diff --git a/vendor/github.com/antchfx/xpath/build.go b/vendor/github.com/antchfx/xpath/build.go index 74f266b..2edafb8 100644 --- a/vendor/github.com/antchfx/xpath/build.go +++ b/vendor/github.com/antchfx/xpath/build.go @@ -77,7 +77,18 @@ func (b *builder) processAxisNode(root *axisNode) (query, error) { } else { qyGrandInput = &contextQuery{} } - qyOutput = &descendantQuery{Input: qyGrandInput, Predicate: predicate, Self: true} + // fix #20: https://github.com/antchfx/htmlquery/issues/20 + filter := func(n NodeNavigator) bool { + v := predicate(n) + switch root.Prop { + case "text": + v = v && n.NodeType() == TextNode + case "comment": + v = v && n.NodeType() == CommentNode + } + return v + } + qyOutput = &descendantQuery{Input: qyGrandInput, Predicate: filter, Self: true} return qyOutput, nil } } @@ -182,8 +193,23 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) { if err != nil { return nil, err } - qyOutput = &functionQuery{Input: b.firstInput, Func: containsFunc(arg1, arg2)} + case "matches": + //matches(string , pattern) + if len(root.Args) != 2 { + return nil, errors.New("xpath: matches function must have two parameters") + } + var ( + arg1, arg2 query + err error + ) + if arg1, err = b.processNode(root.Args[0]); err != nil { + return nil, err + } + if arg2, err = b.processNode(root.Args[1]); err != nil { + return nil, err + } + qyOutput = &functionQuery{Input: b.firstInput, Func: matchesFunc(arg1, arg2)} case "substring": //substring( string , start [, length] ) if len(root.Args) < 2 { @@ -243,6 +269,25 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) { return nil, err } qyOutput = &functionQuery{Input: argQuery, Func: normalizespaceFunc} + case "replace": + //replace( string , string, string ) + if len(root.Args) != 3 { + return nil, errors.New("xpath: replace function must have three parameters") + } + var ( + arg1, arg2, arg3 query + err error + ) + if arg1, err = b.processNode(root.Args[0]); err != nil { + return nil, err + } + if arg2, err = b.processNode(root.Args[1]); err != nil { + return nil, err + } + if arg3, err = b.processNode(root.Args[2]); err != nil { + return nil, err + } + qyOutput = &functionQuery{Input: b.firstInput, Func: replaceFunc(arg1, arg2, arg3)} case "translate": //translate( string , string, string ) if len(root.Args) != 3 { @@ -272,27 +317,27 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) { } qyOutput = &functionQuery{Input: argQuery, Func: notFunc} case "name", "local-name", "namespace-uri": - inp := b.firstInput if len(root.Args) > 1 { return nil, fmt.Errorf("xpath: %s function must have at most one parameter", root.FuncName) } + var ( + arg query + err error + ) if len(root.Args) == 1 { - argQuery, err := b.processNode(root.Args[0]) + arg, err = b.processNode(root.Args[0]) if err != nil { return nil, err } - inp = argQuery } - f := &functionQuery{Input: inp} switch root.FuncName { case "name": - f.Func = nameFunc + qyOutput = &functionQuery{Input: b.firstInput, Func: nameFunc(arg)} case "local-name": - f.Func = localNameFunc + qyOutput = &functionQuery{Input: b.firstInput, Func: localNameFunc(arg)} case "namespace-uri": - f.Func = namespaceFunc + qyOutput = &functionQuery{Input: b.firstInput, Func: namespaceFunc(arg)} } - qyOutput = f case "true", "false": val := root.FuncName == "true" qyOutput = &functionQuery{ @@ -379,6 +424,15 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) { args = append(args, q) } qyOutput = &functionQuery{Input: b.firstInput, Func: concatFunc(args...)} + case "reverse": + if len(root.Args) == 0 { + return nil, fmt.Errorf("xpath: reverse(node-sets) function must with have parameters node-sets") + } + argQuery, err := b.processNode(root.Args[0]) + if err != nil { + return nil, err + } + qyOutput = &transformFunctionQuery{Input: argQuery, Func: reverseFunc} default: return nil, fmt.Errorf("not yet support this function %s()", root.FuncName) } @@ -396,13 +450,15 @@ func (b *builder) processOperatorNode(root *operatorNode) (query, error) { } var qyOutput query switch root.Op { - case "+", "-", "div", "mod": // Numeric operator + case "+", "-", "*", "div", "mod": // Numeric operator var exprFunc func(interface{}, interface{}) interface{} switch root.Op { case "+": exprFunc = plusFunc case "-": exprFunc = minusFunc + case "*": + exprFunc = mulFunc case "div": exprFunc = divFunc case "mod": diff --git a/vendor/github.com/antchfx/xpath/cache.go b/vendor/github.com/antchfx/xpath/cache.go new file mode 100644 index 0000000..31a2b33 --- /dev/null +++ b/vendor/github.com/antchfx/xpath/cache.go @@ -0,0 +1,80 @@ +package xpath + +import ( + "regexp" + "sync" +) + +type loadFunc func(key interface{}) (interface{}, error) + +const ( + defaultCap = 65536 +) + +// The reason we're building a simple capacity-resetting loading cache (when capacity reached) instead of using +// something like github.com/hashicorp/golang-lru is primarily due to (not wanting to create) external dependency. +// Currently this library has 0 external dep (other than go sdk), and supports go 1.6, 1.9, and 1.10 (and later). +// Creating external lib dependencies (plus their transitive dependencies) would make things hard if not impossible. +// We expect under most circumstances, the defaultCap is big enough for any long running services that use this +// library if their xpath regexp cardinality is low. However, in extreme cases when the capacity is reached, we +// simply reset the cache, taking a small subsequent perf hit (next to nothing considering amortization) in trade +// of more complex and less performant LRU type of construct. +type loadingCache struct { + sync.RWMutex + cap int + load loadFunc + m map[interface{}]interface{} + reset int +} + +// NewLoadingCache creates a new instance of a loading cache with capacity. Capacity must be >= 0, or +// it will panic. Capacity == 0 means the cache growth is unbounded. +func NewLoadingCache(load loadFunc, capacity int) *loadingCache { + if capacity < 0 { + panic("capacity must be >= 0") + } + return &loadingCache{cap: capacity, load: load, m: make(map[interface{}]interface{})} +} + +func (c *loadingCache) get(key interface{}) (interface{}, error) { + c.RLock() + v, found := c.m[key] + c.RUnlock() + if found { + return v, nil + } + v, err := c.load(key) + if err != nil { + return nil, err + } + c.Lock() + if c.cap > 0 && len(c.m) >= c.cap { + c.m = map[interface{}]interface{}{key: v} + c.reset++ + } else { + c.m[key] = v + } + c.Unlock() + return v, nil +} + +var ( + // RegexpCache is a loading cache for string -> *regexp.Regexp mapping. It is exported so that in rare cases + // client can customize load func and/or capacity. + RegexpCache = defaultRegexpCache() +) + +func defaultRegexpCache() *loadingCache { + return NewLoadingCache( + func(key interface{}) (interface{}, error) { + return regexp.Compile(key.(string)) + }, defaultCap) +} + +func getRegexp(pattern string) (*regexp.Regexp, error) { + exp, err := RegexpCache.get(pattern) + if err != nil { + return nil, err + } + return exp.(*regexp.Regexp), nil +} diff --git a/vendor/github.com/antchfx/xpath/func.go b/vendor/github.com/antchfx/xpath/func.go index 3c0fde9..fd4187b 100644 --- a/vendor/github.com/antchfx/xpath/func.go +++ b/vendor/github.com/antchfx/xpath/func.go @@ -4,11 +4,26 @@ import ( "errors" "fmt" "math" - "regexp" "strconv" "strings" + "sync" + "unicode" ) +// Defined an interface of stringBuilder that compatible with +// strings.Builder(go 1.10) and bytes.Buffer(< go 1.10) +type stringBuilder interface { + WriteRune(r rune) (n int, err error) + WriteString(s string) (int, error) + Reset() + Grow(n int) + String() string +} + +var builderPool = sync.Pool{New: func() interface{} { + return newStringBuilder() +}} + // The XPath function list. func predicate(q query) func(NodeNavigator) bool { @@ -25,7 +40,7 @@ func predicate(q query) func(NodeNavigator) bool { func positionFunc(q query, t iterator) interface{} { var ( count = 1 - node = t.Current() + node = t.Current().Copy() ) test := predicate(q) for node.MoveToPrevious() { @@ -40,7 +55,7 @@ func positionFunc(q query, t iterator) interface{} { func lastFunc(q query, t iterator) interface{} { var ( count = 0 - node = t.Current() + node = t.Current().Copy() ) node.MoveToFirst() test := predicate(q) @@ -58,6 +73,7 @@ func lastFunc(q query, t iterator) interface{} { // countFunc is a XPath Node Set functions count(node-set). func countFunc(q query, t iterator) interface{} { var count = 0 + q = functionArgs(q) test := predicate(q) switch typ := q.Evaluate(t).(type) { case query: @@ -73,7 +89,7 @@ func countFunc(q query, t iterator) interface{} { // sumFunc is a XPath Node Set functions sum(node-set). func sumFunc(q query, t iterator) interface{} { var sum float64 - switch typ := q.Evaluate(t).(type) { + switch typ := functionArgs(q).Evaluate(t).(type) { case query: for node := typ.Select(t); node != nil; node = typ.Select(t) { if v, err := strconv.ParseFloat(node.Value(), 64); err == nil { @@ -116,52 +132,82 @@ func asNumber(t iterator, o interface{}) float64 { // ceilingFunc is a XPath Node Set functions ceiling(node-set). func ceilingFunc(q query, t iterator) interface{} { - val := asNumber(t, q.Evaluate(t)) + val := asNumber(t, functionArgs(q).Evaluate(t)) return math.Ceil(val) } // floorFunc is a XPath Node Set functions floor(node-set). func floorFunc(q query, t iterator) interface{} { - val := asNumber(t, q.Evaluate(t)) + val := asNumber(t, functionArgs(q).Evaluate(t)) return math.Floor(val) } // roundFunc is a XPath Node Set functions round(node-set). func roundFunc(q query, t iterator) interface{} { - val := asNumber(t, q.Evaluate(t)) + val := asNumber(t, functionArgs(q).Evaluate(t)) //return math.Round(val) return round(val) } // nameFunc is a XPath functions name([node-set]). -func nameFunc(q query, t iterator) interface{} { - v := q.Select(t) - if v == nil { - return "" +func nameFunc(arg query) func(query, iterator) interface{} { + return func(q query, t iterator) interface{} { + var v NodeNavigator + if arg == nil { + v = t.Current() + } else { + v = arg.Clone().Select(t) + if v == nil { + return "" + } + } + ns := v.Prefix() + if ns == "" { + return v.LocalName() + } + return ns + ":" + v.LocalName() } - ns := v.Prefix() - if ns == "" { - return v.LocalName() - } - return ns + ":" + v.LocalName() } // localNameFunc is a XPath functions local-name([node-set]). -func localNameFunc(q query, t iterator) interface{} { - v := q.Select(t) - if v == nil { - return "" +func localNameFunc(arg query) func(query, iterator) interface{} { + return func(q query, t iterator) interface{} { + var v NodeNavigator + if arg == nil { + v = t.Current() + } else { + v = arg.Clone().Select(t) + if v == nil { + return "" + } + } + return v.LocalName() } - return v.LocalName() } // namespaceFunc is a XPath functions namespace-uri([node-set]). -func namespaceFunc(q query, t iterator) interface{} { - v := q.Select(t) - if v == nil { - return "" +func namespaceFunc(arg query) func(query, iterator) interface{} { + return func(q query, t iterator) interface{} { + var v NodeNavigator + if arg == nil { + v = t.Current() + } else { + // Get the first node in the node-set if specified. + v = arg.Clone().Select(t) + if v == nil { + return "" + } + } + // fix about namespace-uri() bug: https://github.com/antchfx/xmlquery/issues/22 + // TODO: In the next version, add NamespaceURL() to the NodeNavigator interface. + type namespaceURL interface { + NamespaceURL() string + } + if f, ok := v.(namespaceURL); ok { + return f.NamespaceURL() + } + return v.Prefix() } - return v.Prefix() } func asBool(t iterator, v interface{}) bool { @@ -171,7 +217,7 @@ func asBool(t iterator, v interface{}) bool { case *NodeIterator: return v.MoveNext() case bool: - return bool(v) + return v case float64: return v != 0 case string: @@ -209,19 +255,19 @@ func asString(t iterator, v interface{}) string { // booleanFunc is a XPath functions boolean([node-set]). func booleanFunc(q query, t iterator) interface{} { - v := q.Evaluate(t) + v := functionArgs(q).Evaluate(t) return asBool(t, v) } // numberFunc is a XPath functions number([node-set]). func numberFunc(q query, t iterator) interface{} { - v := q.Evaluate(t) + v := functionArgs(q).Evaluate(t) return asNumber(t, v) } // stringFunc is a XPath functions string([node-set]). func stringFunc(q query, t iterator) interface{} { - v := q.Evaluate(t) + v := functionArgs(q).Evaluate(t) return asString(t, v) } @@ -232,7 +278,7 @@ func startwithFunc(arg1, arg2 query) func(query, iterator) interface{} { m, n string ok bool ) - switch typ := arg1.Evaluate(t).(type) { + switch typ := functionArgs(arg1).Evaluate(t).(type) { case string: m = typ case query: @@ -244,7 +290,7 @@ func startwithFunc(arg1, arg2 query) func(query, iterator) interface{} { default: panic(errors.New("starts-with() function argument type must be string")) } - n, ok = arg2.Evaluate(t).(string) + n, ok = functionArgs(arg2).Evaluate(t).(string) if !ok { panic(errors.New("starts-with() function argument type must be string")) } @@ -259,7 +305,7 @@ func endwithFunc(arg1, arg2 query) func(query, iterator) interface{} { m, n string ok bool ) - switch typ := arg1.Evaluate(t).(type) { + switch typ := functionArgs(arg1).Evaluate(t).(type) { case string: m = typ case query: @@ -271,7 +317,7 @@ func endwithFunc(arg1, arg2 query) func(query, iterator) interface{} { default: panic(errors.New("ends-with() function argument type must be string")) } - n, ok = arg2.Evaluate(t).(string) + n, ok = functionArgs(arg2).Evaluate(t).(string) if !ok { panic(errors.New("ends-with() function argument type must be string")) } @@ -286,8 +332,7 @@ func containsFunc(arg1, arg2 query) func(query, iterator) interface{} { m, n string ok bool ) - - switch typ := arg1.Evaluate(t).(type) { + switch typ := functionArgs(arg1).Evaluate(t).(type) { case string: m = typ case query: @@ -300,7 +345,7 @@ func containsFunc(arg1, arg2 query) func(query, iterator) interface{} { panic(errors.New("contains() function argument type must be string")) } - n, ok = arg2.Evaluate(t).(string) + n, ok = functionArgs(arg2).Evaluate(t).(string) if !ok { panic(errors.New("contains() function argument type must be string")) } @@ -309,15 +354,39 @@ func containsFunc(arg1, arg2 query) func(query, iterator) interface{} { } } -var ( - regnewline = regexp.MustCompile(`[\r\n\t]`) - regseqspace = regexp.MustCompile(`\s{2,}`) -) +// matchesFunc is an XPath function that tests a given string against a regexp pattern. +// Note: does not support https://www.w3.org/TR/xpath-functions-31/#func-matches 3rd optional `flags` argument; if +// needed, directly put flags in the regexp pattern, such as `(?i)^pattern$` for `i` flag. +func matchesFunc(arg1, arg2 query) func(query, iterator) interface{} { + return func(q query, t iterator) interface{} { + var s string + switch typ := functionArgs(arg1).Evaluate(t).(type) { + case string: + s = typ + case query: + node := typ.Select(t) + if node == nil { + return "" + } + s = node.Value() + } + var pattern string + var ok bool + if pattern, ok = functionArgs(arg2).Evaluate(t).(string); !ok { + panic(errors.New("matches() function second argument type must be string")) + } + re, err := getRegexp(pattern) + if err != nil { + panic(fmt.Errorf("matches() function second argument is not a valid regexp pattern, err: %s", err.Error())) + } + return re.MatchString(s) + } +} // normalizespaceFunc is XPath functions normalize-space(string?) func normalizespaceFunc(q query, t iterator) interface{} { var m string - switch typ := q.Evaluate(t).(type) { + switch typ := functionArgs(q).Evaluate(t).(type) { case string: m = typ case query: @@ -327,17 +396,33 @@ func normalizespaceFunc(q query, t iterator) interface{} { } m = node.Value() } - m = strings.TrimSpace(m) - m = regnewline.ReplaceAllString(m, " ") - m = regseqspace.ReplaceAllString(m, " ") - return m + var b = builderPool.Get().(stringBuilder) + b.Grow(len(m)) + + runeStr := []rune(strings.TrimSpace(m)) + l := len(runeStr) + for i := range runeStr { + r := runeStr[i] + isSpace := unicode.IsSpace(r) + if !(isSpace && (i+1 < l && unicode.IsSpace(runeStr[i+1]))) { + if isSpace { + r = ' ' + } + b.WriteRune(r) + } + } + result := b.String() + b.Reset() + builderPool.Put(b) + + return result } // substringFunc is XPath functions substring function returns a part of a given string. func substringFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { var m string - switch typ := arg1.Evaluate(t).(type) { + switch typ := functionArgs(arg1).Evaluate(t).(type) { case string: m = typ case query: @@ -351,14 +436,14 @@ func substringFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} { var start, length float64 var ok bool - if start, ok = arg2.Evaluate(t).(float64); !ok { + if start, ok = functionArgs(arg2).Evaluate(t).(float64); !ok { panic(errors.New("substring() function first argument type must be int")) } else if start < 1 { panic(errors.New("substring() function first argument type must be >= 1")) } start-- if arg3 != nil { - if length, ok = arg3.Evaluate(t).(float64); !ok { + if length, ok = functionArgs(arg3).Evaluate(t).(float64); !ok { panic(errors.New("substring() function second argument type must be int")) } } @@ -376,7 +461,7 @@ func substringFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} { func substringIndFunc(arg1, arg2 query, after bool) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { var str string - switch v := arg1.Evaluate(t).(type) { + switch v := functionArgs(arg1).Evaluate(t).(type) { case string: str = v case query: @@ -387,7 +472,7 @@ func substringIndFunc(arg1, arg2 query, after bool) func(query, iterator) interf str = node.Value() } var word string - switch v := arg2.Evaluate(t).(type) { + switch v := functionArgs(arg2).Evaluate(t).(type) { case string: word = v case query: @@ -416,7 +501,7 @@ func substringIndFunc(arg1, arg2 query, after bool) func(query, iterator) interf // equal to the number of characters in a given string. func stringLengthFunc(arg1 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { - switch v := arg1.Evaluate(t).(type) { + switch v := functionArgs(arg1).Evaluate(t).(type) { case string: return float64(len(v)) case query: @@ -433,11 +518,11 @@ func stringLengthFunc(arg1 query) func(query, iterator) interface{} { // translateFunc is XPath functions translate() function returns a replaced string. func translateFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { - str := asString(t, arg1.Evaluate(t)) - src := asString(t, arg2.Evaluate(t)) - dst := asString(t, arg3.Evaluate(t)) + str := asString(t, functionArgs(arg1).Evaluate(t)) + src := asString(t, functionArgs(arg2).Evaluate(t)) + dst := asString(t, functionArgs(arg3).Evaluate(t)) - var replace []string + replace := make([]string, 0, len(src)) for i, s := range src { d := "" if i < len(dst) { @@ -449,9 +534,20 @@ func translateFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} { } } +// replaceFunc is XPath functions replace() function returns a replaced string. +func replaceFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} { + return func(q query, t iterator) interface{} { + str := asString(t, functionArgs(arg1).Evaluate(t)) + src := asString(t, functionArgs(arg2).Evaluate(t)) + dst := asString(t, functionArgs(arg3).Evaluate(t)) + + return strings.Replace(str, src, dst, -1) + } +} + // notFunc is XPATH functions not(expression) function operation. func notFunc(q query, t iterator) interface{} { - switch v := q.Evaluate(t).(type) { + switch v := functionArgs(q).Evaluate(t).(type) { case bool: return !v case query: @@ -467,18 +563,52 @@ func notFunc(q query, t iterator) interface{} { // concat( string1 , string2 [, stringn]* ) func concatFunc(args ...query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { - var a []string + b := builderPool.Get().(stringBuilder) for _, v := range args { + v = functionArgs(v) + switch v := v.Evaluate(t).(type) { case string: - a = append(a, v) + b.WriteString(v) case query: node := v.Select(t) if node != nil { - a = append(a, node.Value()) + b.WriteString(node.Value()) } } } - return strings.Join(a, "") + result := b.String() + b.Reset() + builderPool.Put(b) + + return result + } +} + +// https://github.com/antchfx/xpath/issues/43 +func functionArgs(q query) query { + if _, ok := q.(*functionQuery); ok { + return q + } + return q.Clone() +} + +func reverseFunc(q query, t iterator) func() NodeNavigator { + var list []NodeNavigator + for { + node := q.Select(t) + if node == nil { + break + } + list = append(list, node.Copy()) + } + i := len(list) + return func() NodeNavigator { + if i <= 0 { + return nil + } + i-- + node := list[i] + return node } } diff --git a/vendor/github.com/antchfx/xpath/func_go110.go b/vendor/github.com/antchfx/xpath/func_go110.go index 500880f..6df30d3 100644 --- a/vendor/github.com/antchfx/xpath/func_go110.go +++ b/vendor/github.com/antchfx/xpath/func_go110.go @@ -2,8 +2,15 @@ package xpath -import "math" +import ( + "math" + "strings" +) func round(f float64) int { return int(math.Round(f)) } + +func newStringBuilder() stringBuilder{ + return &strings.Builder{} +} diff --git a/vendor/github.com/antchfx/xpath/func_pre_go110.go b/vendor/github.com/antchfx/xpath/func_pre_go110.go index 043616b..335141f 100644 --- a/vendor/github.com/antchfx/xpath/func_pre_go110.go +++ b/vendor/github.com/antchfx/xpath/func_pre_go110.go @@ -2,7 +2,10 @@ package xpath -import "math" +import ( + "bytes" + "math" +) // math.Round() is supported by Go 1.10+, // This method just compatible for version <1.10. @@ -13,3 +16,7 @@ func round(f float64) int { } return int(f + math.Copysign(0.5, f)) } + +func newStringBuilder() stringBuilder { + return &bytes.Buffer{} +} diff --git a/vendor/github.com/antchfx/xpath/go.mod b/vendor/github.com/antchfx/xpath/go.mod new file mode 100644 index 0000000..6745c56 --- /dev/null +++ b/vendor/github.com/antchfx/xpath/go.mod @@ -0,0 +1,3 @@ +module github.com/antchfx/xpath + +go 1.14 diff --git a/vendor/github.com/antchfx/xpath/operator.go b/vendor/github.com/antchfx/xpath/operator.go index 308d3cb..8c2f31f 100644 --- a/vendor/github.com/antchfx/xpath/operator.go +++ b/vendor/github.com/antchfx/xpath/operator.go @@ -163,7 +163,17 @@ func cmpNodeSetString(t iterator, op string, m, n interface{}) bool { } func cmpNodeSetNodeSet(t iterator, op string, m, n interface{}) bool { - return false + a := m.(query) + b := n.(query) + x := a.Select(t) + if x == nil { + return false + } + y := b.Select(t) + if y == nil { + return false + } + return cmpStringStringF(op, x.Value(), y.Value()) } func cmpStringNumeric(t iterator, op string, m, n interface{}) bool { diff --git a/vendor/github.com/antchfx/xpath/query.go b/vendor/github.com/antchfx/xpath/query.go index 333fe09..47f8076 100644 --- a/vendor/github.com/antchfx/xpath/query.go +++ b/vendor/github.com/antchfx/xpath/query.go @@ -22,6 +22,17 @@ type query interface { Clone() query } +// nopQuery is an empty query that always return nil for any query. +type nopQuery struct { + query +} + +func (nopQuery) Select(iterator) NodeNavigator { return nil } + +func (nopQuery) Evaluate(iterator) interface{} { return nil } + +func (nopQuery) Clone() query { return nopQuery{} } + // contextQuery is returns current node on the iterator object query. type contextQuery struct { count int @@ -65,6 +76,7 @@ func (a *ancestorQuery) Select(t iterator) NodeNavigator { return nil } first := true + node = node.Copy() a.iterator = func() NodeNavigator { if first && a.Self { first = false @@ -216,6 +228,7 @@ func (c *childQuery) position() int { type descendantQuery struct { iterator func() NodeNavigator posit int + level int Self bool Input query @@ -231,32 +244,38 @@ func (d *descendantQuery) Select(t iterator) NodeNavigator { return nil } node = node.Copy() - level := 0 + d.level = 0 + positmap := make(map[int]int) first := true d.iterator = func() NodeNavigator { if first && d.Self { first = false if d.Predicate(node) { + d.posit = 1 + positmap[d.level] = 1 return node } } for { if node.MoveToChild() { - level++ + d.level = d.level + 1 + positmap[d.level] = 0 } else { for { - if level == 0 { + if d.level == 0 { return nil } if node.MoveToNext() { break } node.MoveToParent() - level-- + d.level = d.level - 1 } } if d.Predicate(node) { + positmap[d.level]++ + d.posit = positmap[d.level] return node } } @@ -264,7 +283,6 @@ func (d *descendantQuery) Select(t iterator) NodeNavigator { } if node := d.iterator(); node != nil { - d.posit++ return node } d.iterator = nil @@ -286,12 +304,17 @@ func (d *descendantQuery) position() int { return d.posit } +func (d *descendantQuery) depth() int { + return d.level +} + func (d *descendantQuery) Clone() query { return &descendantQuery{Self: d.Self, Input: d.Input.Clone(), Predicate: d.Predicate} } // followingQuery is an XPath following node query.(following::*|following-sibling::*) type followingQuery struct { + posit int iterator func() NodeNavigator Input query @@ -302,6 +325,7 @@ type followingQuery struct { func (f *followingQuery) Select(t iterator) NodeNavigator { for { if f.iterator == nil { + f.posit = 0 node := f.Input.Select(t) if node == nil { return nil @@ -314,12 +338,13 @@ func (f *followingQuery) Select(t iterator) NodeNavigator { return nil } if f.Predicate(node) { + f.posit++ return node } } } } else { - var q query // descendant query + var q *descendantQuery // descendant query f.iterator = func() NodeNavigator { for { if q == nil { @@ -336,6 +361,7 @@ func (f *followingQuery) Select(t iterator) NodeNavigator { t.Current().MoveTo(node) } if node := q.Select(t); node != nil { + f.posit = q.posit return node } q = nil @@ -364,9 +390,14 @@ func (f *followingQuery) Clone() query { return &followingQuery{Input: f.Input.Clone(), Sibling: f.Sibling, Predicate: f.Predicate} } +func (f *followingQuery) position() int { + return f.posit +} + // precedingQuery is an XPath preceding node query.(preceding::*) type precedingQuery struct { iterator func() NodeNavigator + posit int Input query Sibling bool // The matching sibling node of current node. Predicate func(NodeNavigator) bool @@ -375,6 +406,7 @@ type precedingQuery struct { func (p *precedingQuery) Select(t iterator) NodeNavigator { for { if p.iterator == nil { + p.posit = 0 node := p.Input.Select(t) if node == nil { return nil @@ -387,6 +419,7 @@ func (p *precedingQuery) Select(t iterator) NodeNavigator { return nil } if p.Predicate(node) { + p.posit++ return node } } @@ -400,6 +433,7 @@ func (p *precedingQuery) Select(t iterator) NodeNavigator { if !node.MoveToParent() { return nil } + p.posit = 0 } q = &descendantQuery{ Self: true, @@ -409,6 +443,7 @@ func (p *precedingQuery) Select(t iterator) NodeNavigator { t.Current().MoveTo(node) } if node := q.Select(t); node != nil { + p.posit++ return node } q = nil @@ -436,6 +471,10 @@ func (p *precedingQuery) Clone() query { return &precedingQuery{Input: p.Input.Clone(), Sibling: p.Sibling, Predicate: p.Predicate} } +func (p *precedingQuery) position() int { + return p.posit +} + // parentQuery is an XPath parent node query.(parent::*) type parentQuery struct { Input query @@ -504,6 +543,8 @@ func (s *selfQuery) Clone() query { type filterQuery struct { Input query Predicate query + posit int + positmap map[int]int } func (f *filterQuery) do(t iterator) bool { @@ -514,8 +555,8 @@ func (f *filterQuery) do(t iterator) bool { case reflect.String: return len(val.String()) > 0 case reflect.Float64: - pt := float64(getNodePosition(f.Input)) - return int(val.Float()) == int(pt) + pt := getNodePosition(f.Input) + return int(val.Float()) == pt default: if q, ok := f.Predicate.(query); ok { return q.Select(t) != nil @@ -524,17 +565,29 @@ func (f *filterQuery) do(t iterator) bool { return false } +func (f *filterQuery) position() int { + return f.posit +} + func (f *filterQuery) Select(t iterator) NodeNavigator { + if f.positmap == nil { + f.positmap = make(map[int]int) + } for { + node := f.Input.Select(t) if node == nil { return node } node = node.Copy() - //fmt.Println(node.LocalName()) t.Current().MoveTo(node) if f.do(t) { + // fix https://github.com/antchfx/htmlquery/issues/26 + // Calculate and keep the each of matching node's position in the same depth. + level := getNodeDepth(f.Input) + f.positmap[level]++ + f.posit = f.positmap[level] return node } } @@ -549,8 +602,9 @@ func (f *filterQuery) Clone() query { return &filterQuery{Input: f.Input.Clone(), Predicate: f.Predicate.Clone()} } -// functionQuery is an XPath function that call a function to returns -// value of current NodeNavigator node. +// functionQuery is an XPath function that returns a computed value for +// the Evaluate call of the current NodeNavigator node. Select call isn't +// applicable for functionQuery. type functionQuery struct { Input query // Node Set Func func(query, iterator) interface{} // The xpath function. @@ -570,6 +624,34 @@ func (f *functionQuery) Clone() query { return &functionQuery{Input: f.Input.Clone(), Func: f.Func} } +// transformFunctionQuery diffs from functionQuery where the latter computes a scalar +// value (number,string,boolean) for the current NodeNavigator node while the former +// (transformFunctionQuery) performs a mapping or transform of the current NodeNavigator +// and returns a new NodeNavigator. It is used for non-scalar XPath functions such as +// reverse(), remove(), subsequence(), unordered(), etc. +type transformFunctionQuery struct { + Input query + Func func(query, iterator) func() NodeNavigator + iterator func() NodeNavigator +} + +func (f *transformFunctionQuery) Select(t iterator) NodeNavigator { + if f.iterator == nil { + f.iterator = f.Func(f.Input, t) + } + return f.iterator() +} + +func (f *transformFunctionQuery) Evaluate(t iterator) interface{} { + f.Input.Evaluate(t) + f.iterator = nil + return f +} + +func (f *transformFunctionQuery) Clone() query { + return &transformFunctionQuery{Input: f.Input.Clone(), Func: f.Func} +} + // constantQuery is an XPath constant operand. type constantQuery struct { Val interface{} @@ -731,7 +813,8 @@ type unionQuery struct { func (u *unionQuery) Select(t iterator) NodeNavigator { if u.iterator == nil { - var m = make(map[uint64]NodeNavigator) + var list []NodeNavigator + var m = make(map[uint64]bool) root := t.Current().Copy() for { node := u.Left.Select(t) @@ -740,7 +823,8 @@ func (u *unionQuery) Select(t iterator) NodeNavigator { } code := getHashCode(node.Copy()) if _, ok := m[code]; !ok { - m[code] = node.Copy() + m[code] = true + list = append(list, node.Copy()) } } t.Current().MoveTo(root) @@ -751,16 +835,11 @@ func (u *unionQuery) Select(t iterator) NodeNavigator { } code := getHashCode(node.Copy()) if _, ok := m[code]; !ok { - m[code] = node.Copy() + m[code] = true + list = append(list, node.Copy()) } } - list := make([]NodeNavigator, len(m)) var i int - for _, v := range m { - list[i] = v - i++ - } - i = 0 u.iterator = func() NodeNavigator { if i >= len(list) { return nil @@ -789,8 +868,18 @@ func getHashCode(n NodeNavigator) uint64 { switch n.NodeType() { case AttributeNode, TextNode, CommentNode: sb.WriteString(fmt.Sprintf("%s=%s", n.LocalName(), n.Value())) - if n.MoveToParent() { - sb.WriteString(n.LocalName()) + // https://github.com/antchfx/htmlquery/issues/25 + d := 1 + for n.MoveToPrevious() { + d++ + } + sb.WriteString(fmt.Sprintf("-%d", d)) + for n.MoveToParent() { + d = 1 + for n.MoveToPrevious() { + d++ + } + sb.WriteString(fmt.Sprintf("-%d", d)) } case ElementNode: sb.WriteString(n.Prefix() + n.LocalName()) @@ -822,3 +911,13 @@ func getNodePosition(q query) int { } return 1 } + +func getNodeDepth(q query) int { + type Depth interface { + depth() int + } + if count, ok := q.(Depth); ok { + return count.depth() + } + return 0 +} diff --git a/vendor/github.com/antchfx/xpath/xpath.go b/vendor/github.com/antchfx/xpath/xpath.go index 7e3f52c..5f6aa89 100644 --- a/vendor/github.com/antchfx/xpath/xpath.go +++ b/vendor/github.com/antchfx/xpath/xpath.go @@ -2,6 +2,7 @@ package xpath import ( "errors" + "fmt" ) // NodeType represents a type of XPath node. @@ -144,6 +145,9 @@ func Compile(expr string) (*Expr, error) { if err != nil { return nil, err } + if qy == nil { + return nil, fmt.Errorf(fmt.Sprintf("undeclared variable in XPath expression: %s", expr)) + } return &Expr{s: expr, q: qy}, nil } @@ -151,7 +155,7 @@ func Compile(expr string) (*Expr, error) { func MustCompile(expr string) *Expr { exp, err := Compile(expr) if err != nil { - return nil + return &Expr{s: expr, q: nopQuery{}} } return exp } diff --git a/vendor/github.com/golang/groupcache/LICENSE b/vendor/github.com/golang/groupcache/LICENSE new file mode 100644 index 0000000..37ec93a --- /dev/null +++ b/vendor/github.com/golang/groupcache/LICENSE @@ -0,0 +1,191 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/golang/groupcache/lru/lru.go b/vendor/github.com/golang/groupcache/lru/lru.go new file mode 100644 index 0000000..eac1c76 --- /dev/null +++ b/vendor/github.com/golang/groupcache/lru/lru.go @@ -0,0 +1,133 @@ +/* +Copyright 2013 Google Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Package lru implements an LRU cache. +package lru + +import "container/list" + +// Cache is an LRU cache. It is not safe for concurrent access. +type Cache struct { + // MaxEntries is the maximum number of cache entries before + // an item is evicted. Zero means no limit. + MaxEntries int + + // OnEvicted optionally specifies a callback function to be + // executed when an entry is purged from the cache. + OnEvicted func(key Key, value interface{}) + + ll *list.List + cache map[interface{}]*list.Element +} + +// A Key may be any value that is comparable. See http://golang.org/ref/spec#Comparison_operators +type Key interface{} + +type entry struct { + key Key + value interface{} +} + +// New creates a new Cache. +// If maxEntries is zero, the cache has no limit and it's assumed +// that eviction is done by the caller. +func New(maxEntries int) *Cache { + return &Cache{ + MaxEntries: maxEntries, + ll: list.New(), + cache: make(map[interface{}]*list.Element), + } +} + +// Add adds a value to the cache. +func (c *Cache) Add(key Key, value interface{}) { + if c.cache == nil { + c.cache = make(map[interface{}]*list.Element) + c.ll = list.New() + } + if ee, ok := c.cache[key]; ok { + c.ll.MoveToFront(ee) + ee.Value.(*entry).value = value + return + } + ele := c.ll.PushFront(&entry{key, value}) + c.cache[key] = ele + if c.MaxEntries != 0 && c.ll.Len() > c.MaxEntries { + c.RemoveOldest() + } +} + +// Get looks up a key's value from the cache. +func (c *Cache) Get(key Key) (value interface{}, ok bool) { + if c.cache == nil { + return + } + if ele, hit := c.cache[key]; hit { + c.ll.MoveToFront(ele) + return ele.Value.(*entry).value, true + } + return +} + +// Remove removes the provided key from the cache. +func (c *Cache) Remove(key Key) { + if c.cache == nil { + return + } + if ele, hit := c.cache[key]; hit { + c.removeElement(ele) + } +} + +// RemoveOldest removes the oldest item from the cache. +func (c *Cache) RemoveOldest() { + if c.cache == nil { + return + } + ele := c.ll.Back() + if ele != nil { + c.removeElement(ele) + } +} + +func (c *Cache) removeElement(e *list.Element) { + c.ll.Remove(e) + kv := e.Value.(*entry) + delete(c.cache, kv.key) + if c.OnEvicted != nil { + c.OnEvicted(kv.key, kv.value) + } +} + +// Len returns the number of items in the cache. +func (c *Cache) Len() int { + if c.cache == nil { + return 0 + } + return c.ll.Len() +} + +// Clear purges all stored items from the cache. +func (c *Cache) Clear() { + if c.OnEvicted != nil { + for _, e := range c.cache { + kv := e.Value.(*entry) + c.OnEvicted(kv.key, kv.value) + } + } + c.ll = nil + c.cache = nil +} diff --git a/vendor/github.com/influxdata/influxdb1-client/v2/client.go b/vendor/github.com/influxdata/influxdb1-client/v2/client.go index 0cf7b5f..ddd0b48 100644 --- a/vendor/github.com/influxdata/influxdb1-client/v2/client.go +++ b/vendor/github.com/influxdata/influxdb1-client/v2/client.go @@ -3,6 +3,7 @@ package client // import "github.com/influxdata/influxdb1-client/v2" import ( "bytes" + "compress/gzip" "crypto/tls" "encoding/json" "errors" @@ -20,6 +21,13 @@ import ( "github.com/influxdata/influxdb1-client/models" ) +type ContentEncoding string + +const ( + DefaultEncoding ContentEncoding = "" + GzipEncoding ContentEncoding = "gzip" +) + // HTTPConfig is the config data needed to create an HTTP Client. type HTTPConfig struct { // Addr should be of the form "http://host:port" @@ -48,6 +56,9 @@ type HTTPConfig struct { // Proxy configures the Proxy function on the HTTP client. Proxy func(req *http.Request) (*url.URL, error) + + // WriteEncoding specifies the encoding of write request + WriteEncoding ContentEncoding } // BatchPointsConfig is the config data needed to create an instance of the BatchPoints struct. @@ -102,6 +113,12 @@ func NewHTTPClient(conf HTTPConfig) (Client, error) { return nil, errors.New(m) } + switch conf.WriteEncoding { + case DefaultEncoding, GzipEncoding: + default: + return nil, fmt.Errorf("unsupported encoding %s", conf.WriteEncoding) + } + tr := &http.Transport{ TLSClientConfig: &tls.Config{ InsecureSkipVerify: conf.InsecureSkipVerify, @@ -121,6 +138,7 @@ func NewHTTPClient(conf HTTPConfig) (Client, error) { Transport: tr, }, transport: tr, + encoding: conf.WriteEncoding, }, nil } @@ -186,6 +204,7 @@ type client struct { useragent string httpClient *http.Client transport *http.Transport + encoding ContentEncoding } // BatchPoints is an interface into a batched grouping of points to write into @@ -366,15 +385,29 @@ func NewPointFrom(pt models.Point) *Point { func (c *client) Write(bp BatchPoints) error { var b bytes.Buffer + var w io.Writer + if c.encoding == GzipEncoding { + w = gzip.NewWriter(&b) + } else { + w = &b + } + for _, p := range bp.Points() { if p == nil { continue } - if _, err := b.WriteString(p.pt.PrecisionString(bp.Precision())); err != nil { + if _, err := io.WriteString(w, p.pt.PrecisionString(bp.Precision())); err != nil { return err } - if err := b.WriteByte('\n'); err != nil { + if _, err := w.Write([]byte{'\n'}); err != nil { + return err + } + } + + // gzip writer should be closed to flush data into underlying buffer + if c, ok := w.(io.Closer); ok { + if err := c.Close(); err != nil { return err } } @@ -386,6 +419,9 @@ func (c *client) Write(bp BatchPoints) error { if err != nil { return err } + if c.encoding != DefaultEncoding { + req.Header.Set("Content-Encoding", string(c.encoding)) + } req.Header.Set("Content-Type", "") req.Header.Set("User-Agent", c.useragent) if c.username != "" { @@ -429,6 +465,9 @@ type Query struct { Parameters map[string]interface{} } +// Params is a type alias to the query parameters. +type Params map[string]interface{} + // NewQuery returns a query object. // The database and precision arguments can be empty strings if they are not needed for the query. func NewQuery(command, database, precision string) Query { @@ -493,9 +532,10 @@ type Message struct { // Result represents a resultset returned from a single statement. type Result struct { - Series []models.Row - Messages []*Message - Err string `json:"error,omitempty"` + StatementId int `json:"statement_id"` + Series []models.Row + Messages []*Message + Err string `json:"error,omitempty"` } // Query sends a command to the server and returns the Response. diff --git a/vendor/github.com/influxdata/influxdb1-client/v2/params.go b/vendor/github.com/influxdata/influxdb1-client/v2/params.go new file mode 100644 index 0000000..5616bfb --- /dev/null +++ b/vendor/github.com/influxdata/influxdb1-client/v2/params.go @@ -0,0 +1,73 @@ +package client + +import ( + "encoding/json" + "time" +) + +type ( + // Identifier is an identifier value. + Identifier string + + // StringValue is a string literal. + StringValue string + + // RegexValue is a regexp literal. + RegexValue string + + // NumberValue is a number literal. + NumberValue float64 + + // IntegerValue is an integer literal. + IntegerValue int64 + + // BooleanValue is a boolean literal. + BooleanValue bool + + // TimeValue is a time literal. + TimeValue time.Time + + // DurationValue is a duration literal. + DurationValue time.Duration +) + +func (v Identifier) MarshalJSON() ([]byte, error) { + m := map[string]string{"identifier": string(v)} + return json.Marshal(m) +} + +func (v StringValue) MarshalJSON() ([]byte, error) { + m := map[string]string{"string": string(v)} + return json.Marshal(m) +} + +func (v RegexValue) MarshalJSON() ([]byte, error) { + m := map[string]string{"regex": string(v)} + return json.Marshal(m) +} + +func (v NumberValue) MarshalJSON() ([]byte, error) { + m := map[string]float64{"number": float64(v)} + return json.Marshal(m) +} + +func (v IntegerValue) MarshalJSON() ([]byte, error) { + m := map[string]int64{"integer": int64(v)} + return json.Marshal(m) +} + +func (v BooleanValue) MarshalJSON() ([]byte, error) { + m := map[string]bool{"boolean": bool(v)} + return json.Marshal(m) +} + +func (v TimeValue) MarshalJSON() ([]byte, error) { + t := time.Time(v) + m := map[string]string{"string": t.Format(time.RFC3339Nano)} + return json.Marshal(m) +} + +func (v DurationValue) MarshalJSON() ([]byte, error) { + m := map[string]int64{"duration": int64(v)} + return json.Marshal(m) +} diff --git a/vendor/golang.org/x/net/html/const.go b/vendor/golang.org/x/net/html/const.go index a3a918f..ff7acf2 100644 --- a/vendor/golang.org/x/net/html/const.go +++ b/vendor/golang.org/x/net/html/const.go @@ -52,8 +52,7 @@ var isSpecialElementMap = map[string]bool{ "iframe": true, "img": true, "input": true, - "isindex": true, // The 'isindex' element has been removed, but keep it for backwards compatibility. - "keygen": true, + "keygen": true, // "keygen" has been removed from the spec, but are kept here for backwards compatibility. "li": true, "link": true, "listing": true, diff --git a/vendor/golang.org/x/net/html/foreign.go b/vendor/golang.org/x/net/html/foreign.go index 01477a9..9da9e9d 100644 --- a/vendor/golang.org/x/net/html/foreign.go +++ b/vendor/golang.org/x/net/html/foreign.go @@ -161,66 +161,62 @@ var mathMLAttributeAdjustments = map[string]string{ } var svgAttributeAdjustments = map[string]string{ - "attributename": "attributeName", - "attributetype": "attributeType", - "basefrequency": "baseFrequency", - "baseprofile": "baseProfile", - "calcmode": "calcMode", - "clippathunits": "clipPathUnits", - "contentscripttype": "contentScriptType", - "contentstyletype": "contentStyleType", - "diffuseconstant": "diffuseConstant", - "edgemode": "edgeMode", - "externalresourcesrequired": "externalResourcesRequired", - "filterres": "filterRes", - "filterunits": "filterUnits", - "glyphref": "glyphRef", - "gradienttransform": "gradientTransform", - "gradientunits": "gradientUnits", - "kernelmatrix": "kernelMatrix", - "kernelunitlength": "kernelUnitLength", - "keypoints": "keyPoints", - "keysplines": "keySplines", - "keytimes": "keyTimes", - "lengthadjust": "lengthAdjust", - "limitingconeangle": "limitingConeAngle", - "markerheight": "markerHeight", - "markerunits": "markerUnits", - "markerwidth": "markerWidth", - "maskcontentunits": "maskContentUnits", - "maskunits": "maskUnits", - "numoctaves": "numOctaves", - "pathlength": "pathLength", - "patterncontentunits": "patternContentUnits", - "patterntransform": "patternTransform", - "patternunits": "patternUnits", - "pointsatx": "pointsAtX", - "pointsaty": "pointsAtY", - "pointsatz": "pointsAtZ", - "preservealpha": "preserveAlpha", - "preserveaspectratio": "preserveAspectRatio", - "primitiveunits": "primitiveUnits", - "refx": "refX", - "refy": "refY", - "repeatcount": "repeatCount", - "repeatdur": "repeatDur", - "requiredextensions": "requiredExtensions", - "requiredfeatures": "requiredFeatures", - "specularconstant": "specularConstant", - "specularexponent": "specularExponent", - "spreadmethod": "spreadMethod", - "startoffset": "startOffset", - "stddeviation": "stdDeviation", - "stitchtiles": "stitchTiles", - "surfacescale": "surfaceScale", - "systemlanguage": "systemLanguage", - "tablevalues": "tableValues", - "targetx": "targetX", - "targety": "targetY", - "textlength": "textLength", - "viewbox": "viewBox", - "viewtarget": "viewTarget", - "xchannelselector": "xChannelSelector", - "ychannelselector": "yChannelSelector", - "zoomandpan": "zoomAndPan", + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan", } diff --git a/vendor/golang.org/x/net/html/node.go b/vendor/golang.org/x/net/html/node.go index 633ee15..1350eef 100644 --- a/vendor/golang.org/x/net/html/node.go +++ b/vendor/golang.org/x/net/html/node.go @@ -18,6 +18,11 @@ const ( ElementNode CommentNode DoctypeNode + // RawNode nodes are not returned by the parser, but can be part of the + // Node tree passed to func Render to insert raw HTML (without escaping). + // If so, this package makes no guarantee that the rendered HTML is secure + // (from e.g. Cross Site Scripting attacks) or well-formed. + RawNode scopeMarkerNode ) diff --git a/vendor/golang.org/x/net/html/parse.go b/vendor/golang.org/x/net/html/parse.go index 992cff2..f91466f 100644 --- a/vendor/golang.org/x/net/html/parse.go +++ b/vendor/golang.org/x/net/html/parse.go @@ -184,6 +184,17 @@ func (p *parser) clearStackToContext(s scope) { } } +// parseGenericRawTextElements implements the generic raw text element parsing +// algorithm defined in 12.2.6.2. +// https://html.spec.whatwg.org/multipage/parsing.html#parsing-elements-that-contain-only-text +// TODO: Since both RAWTEXT and RCDATA states are treated as tokenizer's part +// officially, need to make tokenizer consider both states. +func (p *parser) parseGenericRawTextElement() { + p.addElement() + p.originalIM = p.im + p.im = textIM +} + // generateImpliedEndTags pops nodes off the stack of open elements as long as // the top node has a tag name of dd, dt, li, optgroup, option, p, rb, rp, rt or rtc. // If exceptions are specified, nodes with that name will not be popped off. @@ -192,16 +203,17 @@ func (p *parser) generateImpliedEndTags(exceptions ...string) { loop: for i = len(p.oe) - 1; i >= 0; i-- { n := p.oe[i] - if n.Type == ElementNode { - switch n.DataAtom { - case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc: - for _, except := range exceptions { - if n.Data == except { - break loop - } + if n.Type != ElementNode { + break + } + switch n.DataAtom { + case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc: + for _, except := range exceptions { + if n.Data == except { + break loop } - continue } + continue } break } @@ -369,8 +381,7 @@ findIdenticalElements: // Section 12.2.4.3. func (p *parser) clearActiveFormattingElements() { for { - n := p.afe.pop() - if len(p.afe) == 0 || n.Type == scopeMarkerNode { + if n := p.afe.pop(); len(p.afe) == 0 || n.Type == scopeMarkerNode { return } } @@ -625,25 +636,29 @@ func inHeadIM(p *parser) bool { switch p.tok.DataAtom { case a.Html: return inBodyIM(p) - case a.Base, a.Basefont, a.Bgsound, a.Command, a.Link, a.Meta: + case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta: p.addElement() p.oe.pop() p.acknowledgeSelfClosingTag() return true case a.Noscript: - p.addElement() if p.scripting { - p.setOriginalIM() - p.im = textIM - } else { - p.im = inHeadNoscriptIM + p.parseGenericRawTextElement() + return true } + p.addElement() + p.im = inHeadNoscriptIM + // Don't let the tokenizer go into raw text mode when scripting is disabled. + p.tokenizer.NextIsNotRawText() return true - case a.Script, a.Title, a.Noframes, a.Style: + case a.Script, a.Title: p.addElement() p.setOriginalIM() p.im = textIM return true + case a.Noframes, a.Style: + p.parseGenericRawTextElement() + return true case a.Head: // Ignore the token. return true @@ -713,7 +728,13 @@ func inHeadNoscriptIM(p *parser) bool { return inBodyIM(p) case a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Style: return inHeadIM(p) - case a.Head, a.Noscript: + case a.Head: + // Ignore the token. + return true + case a.Noscript: + // Don't let the tokenizer go into raw text mode even when a