Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
6758df2b1d | |||
810583964c | |||
ed361c8e9e | |||
d62d777f05 | |||
e1533ab274 | |||
d13318e927 | |||
7234075152 | |||
b8befed108 | |||
8b3e22ba50 | |||
7dd768a2db | |||
4b32e35c0e | |||
821557fb77 | |||
cd81a04a1e | |||
eb12b7b205 |
159
.drone.yml
159
.drone.yml
@ -1,89 +1,74 @@
|
|||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
type: docker
|
type: docker
|
||||||
name: cleanup-before
|
name: build-linux
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: clean
|
- name: build-linux-amd64
|
||||||
image: alpine
|
|
||||||
commands:
|
|
||||||
- rm -rf /build/*
|
|
||||||
volumes:
|
|
||||||
- name: build
|
|
||||||
path: /build
|
|
||||||
when:
|
|
||||||
event: tag
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- name: build
|
|
||||||
host:
|
|
||||||
path: /tmp/fuelprices/build
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
type: docker
|
|
||||||
name: default-linux-amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: golang
|
image: golang
|
||||||
commands:
|
commands:
|
||||||
- ./ci-build.sh build
|
- go build -o $PROJECTNAME $GOOPTIONS $SRCFILES
|
||||||
environment:
|
environment:
|
||||||
GOOS: linux
|
GOOS: linux
|
||||||
GOARCH: amd64
|
GOARCH: amd64
|
||||||
volumes:
|
GOOPTIONS: -mod=vendor
|
||||||
- name: build
|
SRCFILES: cmd/fuelprices/*.go
|
||||||
path: /build
|
PROJECTNAME: fuelprices
|
||||||
|
when:
|
||||||
volumes:
|
event:
|
||||||
- name: build
|
exclude:
|
||||||
host:
|
- tag
|
||||||
path: /tmp/fuelprices/build
|
- name: build-linux-arm64
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- cleanup-before
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
type: docker
|
|
||||||
name: default-linux-arm64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: golang
|
image: golang
|
||||||
commands:
|
commands:
|
||||||
- ./ci-build.sh build
|
- go build -o $PROJECTNAME $GOOPTIONS $SRCFILES
|
||||||
environment:
|
environment:
|
||||||
GOOS: linux
|
GOOS: linux
|
||||||
GOARCH: arm64
|
GOARCH: arm64
|
||||||
volumes:
|
GOOPTIONS: -mod=vendor
|
||||||
- name: build
|
SRCFILES: cmd/fuelprices/*.go
|
||||||
path: /build
|
PROJECTNAME: fuelprices
|
||||||
|
when:
|
||||||
volumes:
|
event:
|
||||||
- name: build
|
exclude:
|
||||||
host:
|
- tag
|
||||||
path: /tmp/fuelprices/build
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- cleanup-before
|
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
type: docker
|
type: docker
|
||||||
name: gitea-release
|
name: gitea-release-linux
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: move
|
- name: build-linux-amd64
|
||||||
image: alpine
|
image: golang
|
||||||
commands:
|
commands:
|
||||||
- mv build/* ./
|
- go build -o $PROJECTNAME $GOOPTIONS $SRCFILES
|
||||||
volumes:
|
- tar -czvf $PROJECTNAME-$DRONE_TAG-$GOOS-$GOARCH.tar.gz $PROJECTNAME
|
||||||
- name: build
|
- echo $PROJECTNAME $DRONE_TAG > VERSION
|
||||||
path: /drone/src/build
|
environment:
|
||||||
|
GOOS: linux
|
||||||
|
GOARCH: amd64
|
||||||
|
GOOPTIONS: -mod=vendor
|
||||||
|
SRCFILES: cmd/fuelprices/*.go
|
||||||
|
PROJECTNAME: fuelprices
|
||||||
when:
|
when:
|
||||||
event: tag
|
event:
|
||||||
|
- tag
|
||||||
|
- name: build-linux-arm64
|
||||||
|
image: golang
|
||||||
|
commands:
|
||||||
|
- go build -o $PROJECTNAME $GOOPTIONS $SRCFILES
|
||||||
|
- tar -czvf $PROJECTNAME-$DRONE_TAG-$GOOS-$GOARCH.tar.gz $PROJECTNAME
|
||||||
|
- echo $PROJECTNAME $DRONE_TAG > VERSION
|
||||||
|
environment:
|
||||||
|
GOOS: linux
|
||||||
|
GOARCH: arm64
|
||||||
|
GOOPTIONS: -mod=vendor
|
||||||
|
SRCFILES: cmd/fuelprices/*.go
|
||||||
|
PROJECTNAME: fuelprices
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- tag
|
||||||
- name: release
|
- name: release
|
||||||
image: plugins/gitea-release
|
image: plugins/gitea-release
|
||||||
settings:
|
settings:
|
||||||
@ -95,50 +80,6 @@ steps:
|
|||||||
- sha256
|
- sha256
|
||||||
- sha512
|
- sha512
|
||||||
title: VERSION
|
title: VERSION
|
||||||
volumes:
|
|
||||||
- name: build
|
|
||||||
path: /drone/src/build
|
|
||||||
when:
|
when:
|
||||||
event: tag
|
event:
|
||||||
- name: ls
|
- tag
|
||||||
image: alpine
|
|
||||||
commands:
|
|
||||||
- find .
|
|
||||||
volumes:
|
|
||||||
- name: build
|
|
||||||
path: /drone/src/build
|
|
||||||
when:
|
|
||||||
event: tag
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- name: build
|
|
||||||
host:
|
|
||||||
path: /tmp/fuelprices/build
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- default-linux-amd64
|
|
||||||
- default-linux-arm64
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
type: docker
|
|
||||||
name: cleanup-after
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: clean
|
|
||||||
image: alpine
|
|
||||||
commands:
|
|
||||||
- rm -rf /build/*
|
|
||||||
volumes:
|
|
||||||
- name: build
|
|
||||||
path: /build
|
|
||||||
when:
|
|
||||||
event: tag
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- name: build
|
|
||||||
host:
|
|
||||||
path: /tmp/fuelprices/build
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- gitea-release
|
|
||||||
|
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"go.formatTool": "goimports",
|
|
||||||
"editor.tabSize": 2
|
|
||||||
}
|
|
18
Makefile
18
Makefile
@ -1,18 +0,0 @@
|
|||||||
# fuelprices Makefile
|
|
||||||
|
|
||||||
GOCMD=go
|
|
||||||
GOBUILDCMD=${GOCMD} build
|
|
||||||
GOOPTIONS=-mod=vendor -ldflags="-s -w"
|
|
||||||
|
|
||||||
RMCMD=rm
|
|
||||||
BINNAME=fuelprices
|
|
||||||
|
|
||||||
SRCFILES=cmd/fuelprices/*.go
|
|
||||||
|
|
||||||
all: build
|
|
||||||
|
|
||||||
build:
|
|
||||||
${GOBUILDCMD} ${GOOPTIONS} ${SRCFILES}
|
|
||||||
|
|
||||||
clean:
|
|
||||||
${RMCMD} -f ${BINNAME}
|
|
62
ci-build.sh
62
ci-build.sh
@ -1,62 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
PROJECTNAME=fuelprices
|
|
||||||
RELEASENAME=${PROJECTNAME}
|
|
||||||
VERSION="0"
|
|
||||||
|
|
||||||
GOOPTIONS="-mod=vendor"
|
|
||||||
SRCFILES=cmd/fuelprices/*.go
|
|
||||||
|
|
||||||
build() {
|
|
||||||
echo "Begin of build"
|
|
||||||
if [[ ! -z $DRONE_TAG ]]
|
|
||||||
then
|
|
||||||
echo "Drone tag set, let's do a release"
|
|
||||||
VERSION=$DRONE_TAG
|
|
||||||
echo "${PROJECTNAME} ${VERSION}" > /build/VERSION
|
|
||||||
elif [[ ! -z $DRONE_TAG ]]
|
|
||||||
then
|
|
||||||
echo "Drone not set, let's only do a build"
|
|
||||||
VERSION=$DRONE_COMMIT
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ ! -z $VERSION && ! -z $GOOS && ! -z $GOARCH ]]
|
|
||||||
then
|
|
||||||
echo "Let's set a release name"
|
|
||||||
RELEASENAME=${PROJECTNAME}-${VERSION}-${GOOS}-${GOARCH}
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Building project"
|
|
||||||
go build -o ${PROJECTNAME} ${GOOPTIONS} ${SRCFILES}
|
|
||||||
|
|
||||||
if [[ ! -z $DRONE_TAG ]]
|
|
||||||
then
|
|
||||||
echo "Let's make archives"
|
|
||||||
mkdir -p /build
|
|
||||||
tar -czvf /build/${RELEASENAME}.tar.gz ${PROJECTNAME}
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Removing binary file"
|
|
||||||
rm ${PROJECTNAME}
|
|
||||||
|
|
||||||
echo "End of build"
|
|
||||||
}
|
|
||||||
|
|
||||||
clean() {
|
|
||||||
rm -rf $RELEASEDIR
|
|
||||||
}
|
|
||||||
|
|
||||||
case $1 in
|
|
||||||
"build")
|
|
||||||
build
|
|
||||||
;;
|
|
||||||
"clean")
|
|
||||||
clean
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "No options choosen"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
24
go.mod
24
go.mod
@ -1,15 +1,19 @@
|
|||||||
module git.paulbsd.com/paulbsd/fuelprices
|
module git.paulbsd.com/paulbsd/fuelprices
|
||||||
|
|
||||||
go 1.13
|
go 1.23
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/antchfx/xmlquery v1.3.3
|
github.com/antchfx/xmlquery v1.4.1
|
||||||
github.com/antchfx/xpath v1.1.11 // indirect
|
github.com/antchfx/xpath v1.3.1 // indirect
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect
|
github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c
|
||||||
github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab
|
golang.org/x/net v0.28.0 // indirect
|
||||||
github.com/smartystreets/assertions v1.2.0 // indirect
|
golang.org/x/text v0.17.0 // indirect
|
||||||
github.com/smartystreets/goconvey v1.6.4 // indirect
|
gopkg.in/ini.v1 v1.67.0
|
||||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 // indirect
|
)
|
||||||
golang.org/x/text v0.3.5 // indirect
|
|
||||||
gopkg.in/ini.v1 v1.62.0
|
require (
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||||
|
github.com/stretchr/testify v1.7.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||||
)
|
)
|
||||||
|
136
go.sum
136
go.sum
@ -1,59 +1,93 @@
|
|||||||
github.com/antchfx/xmlquery v1.0.0 h1:YuEPqexGG2opZKNc9JU3Zw6zFXwC47wNcy6/F8oKsrM=
|
github.com/antchfx/xmlquery v1.3.12 h1:6TMGpdjpO/P8VhjnaYPXuqT3qyJ/VsqoyNTmJzNBTQ4=
|
||||||
github.com/antchfx/xmlquery v1.0.0/go.mod h1:/+CnyD/DzHRnv2eRxrVbieRU/FIF6N0C+7oTtyUtCKk=
|
github.com/antchfx/xmlquery v1.3.12/go.mod h1:3w2RvQvTz+DaT5fSgsELkSJcdNgkmg6vuXDEuhdwsPQ=
|
||||||
github.com/antchfx/xmlquery v1.3.3 h1:HYmadPG0uz8CySdL68rB4DCLKXz2PurCjS3mnkVF4CQ=
|
github.com/antchfx/xmlquery v1.3.15 h1:aJConNMi1sMha5G8YJoAIF5P+H+qG1L73bSItWHo8Tw=
|
||||||
github.com/antchfx/xmlquery v1.3.3/go.mod h1:64w0Xesg2sTaawIdNqMB+7qaW/bSqkQm+ssPaCMWNnc=
|
github.com/antchfx/xmlquery v1.3.15/go.mod h1:zMDv5tIGjOxY/JCNNinnle7V/EwthZ5IT8eeCGJKRWA=
|
||||||
github.com/antchfx/xpath v1.0.0 h1:Q5gFgh2O40VTSwMOVbFE7nFNRBu3tS21Tn0KAWeEjtk=
|
github.com/antchfx/xmlquery v1.3.18 h1:FSQ3wMuphnPPGJOFhvc+cRQ2CT/rUj4cyQXkJcjOwz0=
|
||||||
github.com/antchfx/xpath v1.0.0/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
|
github.com/antchfx/xmlquery v1.3.18/go.mod h1:Afkq4JIeXut75taLSuI31ISJ/zeq+3jG7TunF7noreA=
|
||||||
github.com/antchfx/xpath v1.1.10/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
|
github.com/antchfx/xmlquery v1.4.1 h1:YgpSwbeWvLp557YFTi8E3z6t6/hYjmFEtiEKbDfEbl0=
|
||||||
github.com/antchfx/xpath v1.1.11 h1:WOFtK8TVAjLm3lbgqeP0arlHpvCEeTANeWZ/csPpJkQ=
|
github.com/antchfx/xmlquery v1.4.1/go.mod h1:lKezcT8ELGt8kW5L+ckFMTbgdR61/odpPgDv8Gvi1fI=
|
||||||
github.com/antchfx/xpath v1.1.11/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
github.com/antchfx/xpath v1.2.1 h1:qhp4EW6aCOVr5XIkT+l6LJ9ck/JsUH/yyauNgTQkBF8=
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
|
github.com/antchfx/xpath v1.2.1/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
|
github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
|
github.com/antchfx/xpath v1.2.4 h1:dW1HB/JxKvGtJ9WyVGJ0sIoEcqftV3SqIstujI+B9XY=
|
||||||
|
github.com/antchfx/xpath v1.2.4/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
|
github.com/antchfx/xpath v1.2.5 h1:hqZ+wtQ+KIOV/S3bGZcIhpgYC26um2bZYP2KVGcR7VY=
|
||||||
|
github.com/antchfx/xpath v1.2.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
|
github.com/antchfx/xpath v1.3.1 h1:PNbFuUqHwWl0xRjvUPjJ95Agbmdj2uzzIwmQKgu4oCk=
|
||||||
|
github.com/antchfx/xpath v1.3.1/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0=
|
github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c h1:qSHzRbhzK8RdXOsAdfDgO49TtqC1oZ+acxPrkfTxcCs=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
||||||
github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc h1:KpMgaYJRieDkHZJWY3LMafvtqS/U8xX6+lUN+OKpl/Y=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab h1:HqW4xhhynfjrtEiiSGcQUd6vrK23iMam1FO8rI7mwig=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
|
||||||
github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=
|
|
||||||
github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
|
|
||||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a h1:pa8hGb/2YqsZKovtsgrwcDH1RZhVbTKCjLp47XpqCDs=
|
|
||||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
|
||||||
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
|
|
||||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20190607181551-461777fb6f67 h1:rJJxsykSlULwd2P2+pg/rtnwN2FrWp4IuCxOSyS0V00=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20190607181551-461777fb6f67/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||||
golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb h1:eBmm0M9fYhWpKZLjQUUKka/LtIxf46G4fxeEz5KJr9U=
|
golang.org/x/net v0.1.0 h1:hZ/3BUoy5aId7sCpA/Tc5lt8DkFgdVS2onTpJsZ/fl0=
|
||||||
golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
||||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew=
|
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
|
||||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
|
||||||
|
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||||
|
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
||||||
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
|
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
|
||||||
|
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||||
|
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
||||||
|
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||||
|
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
|
||||||
|
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
|
||||||
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg=
|
||||||
golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ=
|
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
|
||||||
|
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
|
||||||
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||||
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||||
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
|
||||||
|
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
gopkg.in/ini.v1 v1.42.0 h1:7N3gPTt50s8GuLortA00n8AqRTk75qOP98+mTPpgzRk=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||||
|
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
@ -3,7 +3,7 @@ package zipfile
|
|||||||
import (
|
import (
|
||||||
"archive/zip"
|
"archive/zip"
|
||||||
"bytes"
|
"bytes"
|
||||||
"io/ioutil"
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
"time"
|
||||||
@ -27,7 +27,7 @@ func (zipfile *ZipFile) DownloadFile(c *config.Config) (err error) {
|
|||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
zipfile.Content, err = ioutil.ReadAll(resp.Body)
|
zipfile.Content, err = io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -50,7 +50,7 @@ func (zipfile *ZipFile) ExtractZip(c *config.Config, xmlfile *xmlfile.XMLFile) (
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
xmlfile.Content, err = ioutil.ReadAll(rc)
|
xmlfile.Content, err = io.ReadAll(rc)
|
||||||
rc.Close()
|
rc.Close()
|
||||||
} else {
|
} else {
|
||||||
log.Fatal("File not found")
|
log.Fatal("File not found")
|
||||||
|
17
vendor/github.com/antchfx/xmlquery/.travis.yml
generated
vendored
17
vendor/github.com/antchfx/xmlquery/.travis.yml
generated
vendored
@ -1,17 +0,0 @@
|
|||||||
language: go
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.9.x
|
|
||||||
- 1.12.x
|
|
||||||
- 1.13.x
|
|
||||||
- 1.14.x
|
|
||||||
- 1.15.x
|
|
||||||
|
|
||||||
install:
|
|
||||||
- go get golang.org/x/net/html/charset
|
|
||||||
- go get github.com/antchfx/xpath
|
|
||||||
- go get github.com/mattn/goveralls
|
|
||||||
- go get github.com/golang/groupcache
|
|
||||||
|
|
||||||
script:
|
|
||||||
- $HOME/gopath/bin/goveralls -service=travis-ci
|
|
434
vendor/github.com/antchfx/xmlquery/README.md
generated
vendored
434
vendor/github.com/antchfx/xmlquery/README.md
generated
vendored
@ -1,12 +1,10 @@
|
|||||||
xmlquery
|
# xmlquery
|
||||||
====
|
|
||||||
[](https://travis-ci.org/antchfx/xmlquery)
|
[](https://github.com/antchfx/xmlquery/actions/workflows/testing.yml)
|
||||||
[](https://coveralls.io/github/antchfx/xmlquery?branch=master)
|
|
||||||
[](https://godoc.org/github.com/antchfx/xmlquery)
|
[](https://godoc.org/github.com/antchfx/xmlquery)
|
||||||
[](https://goreportcard.com/report/github.com/antchfx/xmlquery)
|
[](https://goreportcard.com/report/github.com/antchfx/xmlquery)
|
||||||
|
|
||||||
Overview
|
# Overview
|
||||||
===
|
|
||||||
|
|
||||||
`xmlquery` is an XPath query package for XML documents, allowing you to extract
|
`xmlquery` is an XPath query package for XML documents, allowing you to extract
|
||||||
data or evaluate from XML documents with an XPath expression.
|
data or evaluate from XML documents with an XPath expression.
|
||||||
@ -15,197 +13,21 @@ data or evaluate from XML documents with an XPath expression.
|
|||||||
XPATH query strings. Enabling caching can avoid recompile XPath expression for
|
XPATH query strings. Enabling caching can avoid recompile XPath expression for
|
||||||
each query.
|
each query.
|
||||||
|
|
||||||
Change Logs
|
You can visit this page to learn about the supported XPath(1.0/2.0) syntax. https://github.com/antchfx/xpath
|
||||||
===
|
|
||||||
|
|
||||||
2020-08-??
|
[htmlquery](https://github.com/antchfx/htmlquery) - Package for the HTML document query.
|
||||||
- Add XML stream loading and parsing support.
|
|
||||||
|
|
||||||
2019-11-11
|
[xmlquery](https://github.com/antchfx/xmlquery) - Package for the XML document query.
|
||||||
- Add XPath query caching.
|
|
||||||
|
|
||||||
2019-10-05
|
[jsonquery](https://github.com/antchfx/jsonquery) - Package for the JSON document query.
|
||||||
- Add new methods compatible with invalid XPath expression error: `QueryAll` and `Query`.
|
|
||||||
- Add `QuerySelector` and `QuerySelectorAll` methods, support for reused query objects.
|
|
||||||
- PR [#12](https://github.com/antchfx/xmlquery/pull/12) (Thanks @FrancescoIlario)
|
|
||||||
- PR [#11](https://github.com/antchfx/xmlquery/pull/11) (Thanks @gjvnq)
|
|
||||||
|
|
||||||
2018-12-23
|
# Installation
|
||||||
- Added XML output including comment nodes. [#9](https://github.com/antchfx/xmlquery/issues/9)
|
|
||||||
|
|
||||||
2018-12-03
|
|
||||||
- Added support to attribute name with namespace prefix and XML output. [#6](https://github.com/antchfx/xmlquery/issues/6)
|
|
||||||
|
|
||||||
Installation
|
|
||||||
====
|
|
||||||
```
|
```
|
||||||
$ go get github.com/antchfx/xmlquery
|
$ go get github.com/antchfx/xmlquery
|
||||||
```
|
```
|
||||||
|
|
||||||
Getting Started
|
# Quick Starts
|
||||||
===
|
|
||||||
|
|
||||||
### Find specified XPath query.
|
|
||||||
|
|
||||||
```go
|
|
||||||
list, err := xmlquery.QueryAll(doc, "a")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parse an XML from URL.
|
|
||||||
|
|
||||||
```go
|
|
||||||
doc, err := xmlquery.LoadURL("http://www.example.com/sitemap.xml")
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parse an XML from string.
|
|
||||||
|
|
||||||
```go
|
|
||||||
s := `<?xml version="1.0" encoding="utf-8"?><rss version="2.0"></rss>`
|
|
||||||
doc, err := xmlquery.Parse(strings.NewReader(s))
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parse an XML from io.Reader.
|
|
||||||
|
|
||||||
```go
|
|
||||||
f, err := os.Open("../books.xml")
|
|
||||||
doc, err := xmlquery.Parse(f)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parse an XML in a stream fashion (simple case without elements filtering).
|
|
||||||
|
|
||||||
```go
|
|
||||||
f, err := os.Open("../books.xml")
|
|
||||||
p, err := xmlquery.CreateStreamParser(f, "/bookstore/book")
|
|
||||||
for {
|
|
||||||
n, err := p.Read()
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parse an XML in a stream fashion (simple case advanced element filtering).
|
|
||||||
|
|
||||||
```go
|
|
||||||
f, err := os.Open("../books.xml")
|
|
||||||
p, err := xmlquery.CreateStreamParser(f, "/bookstore/book", "/bookstore/book[price>=10]")
|
|
||||||
for {
|
|
||||||
n, err := p.Read()
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Find authors of all books in the bookstore.
|
|
||||||
|
|
||||||
```go
|
|
||||||
list := xmlquery.Find(doc, "//book//author")
|
|
||||||
// or
|
|
||||||
list := xmlquery.Find(doc, "//author")
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Find the second book.
|
|
||||||
|
|
||||||
```go
|
|
||||||
book := xmlquery.FindOne(doc, "//book[2]")
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Find all book elements and only get `id` attribute. (New Feature)
|
|
||||||
|
|
||||||
```go
|
|
||||||
list := xmlquery.Find(doc,"//book/@id")
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Find all books with id `bk104`.
|
|
||||||
|
|
||||||
```go
|
|
||||||
list := xmlquery.Find(doc, "//book[@id='bk104']")
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Find all books with price less than 5.
|
|
||||||
|
|
||||||
```go
|
|
||||||
list := xmlquery.Find(doc, "//book[price<5]")
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Evaluate total price of all books.
|
|
||||||
|
|
||||||
```go
|
|
||||||
expr, err := xpath.Compile("sum(//book/price)")
|
|
||||||
price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64)
|
|
||||||
fmt.Printf("total price: %f\n", price)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Evaluate number of all book elements.
|
|
||||||
|
|
||||||
```go
|
|
||||||
expr, err := xpath.Compile("count(//book)")
|
|
||||||
price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64)
|
|
||||||
```
|
|
||||||
|
|
||||||
FAQ
|
|
||||||
====
|
|
||||||
|
|
||||||
#### `Find()` vs `QueryAll()`, which is better?
|
|
||||||
|
|
||||||
`Find` and `QueryAll` both do the same thing: searches all of matched XML nodes.
|
|
||||||
`Find` panics if provided with an invalid XPath query, while `QueryAll` returns
|
|
||||||
an error.
|
|
||||||
|
|
||||||
#### Can I save my query expression object for the next query?
|
|
||||||
|
|
||||||
Yes, you can. We provide `QuerySelector` and `QuerySelectorAll` methods; they
|
|
||||||
accept your query expression object.
|
|
||||||
|
|
||||||
Caching a query expression object avoids recompiling the XPath query
|
|
||||||
expression, improving query performance.
|
|
||||||
|
|
||||||
#### Create XML document.
|
|
||||||
|
|
||||||
```go
|
|
||||||
doc := &xmlquery.Node{
|
|
||||||
Type: xmlquery.DeclarationNode,
|
|
||||||
Data: "xml",
|
|
||||||
Attr: []xml.Attr{
|
|
||||||
xml.Attr{Name: xml.Name{Local: "version"}, Value: "1.0"},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
root := &xmlquery.Node{
|
|
||||||
Data: "rss",
|
|
||||||
Type: xmlquery.ElementNode,
|
|
||||||
}
|
|
||||||
doc.FirstChild = root
|
|
||||||
channel := &xmlquery.Node{
|
|
||||||
Data: "channel",
|
|
||||||
Type: xmlquery.ElementNode,
|
|
||||||
}
|
|
||||||
root.FirstChild = channel
|
|
||||||
title := &xmlquery.Node{
|
|
||||||
Data: "title",
|
|
||||||
Type: xmlquery.ElementNode,
|
|
||||||
}
|
|
||||||
title_text := &xmlquery.Node{
|
|
||||||
Data: "W3Schools Home Page",
|
|
||||||
Type: xmlquery.TextNode,
|
|
||||||
}
|
|
||||||
title.FirstChild = title_text
|
|
||||||
channel.FirstChild = title
|
|
||||||
fmt.Println(doc.OutputXML(true))
|
|
||||||
// <?xml version="1.0"?><rss><channel><title>W3Schools Home Page</title></channel></rss>
|
|
||||||
```
|
|
||||||
|
|
||||||
Quick Tutorial
|
|
||||||
===
|
|
||||||
|
|
||||||
```go
|
```go
|
||||||
import (
|
import (
|
||||||
@ -249,14 +71,232 @@ func main(){
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
List of supported XPath query packages
|
# Getting Started
|
||||||
===
|
|
||||||
| Name | Description |
|
### Find specified XPath query.
|
||||||
| ------------------------------------------------- | ----------------------------------------- |
|
|
||||||
| [htmlquery](https://github.com/antchfx/htmlquery) | XPath query package for HTML documents |
|
```go
|
||||||
| [xmlquery](https://github.com/antchfx/xmlquery) | XPath query package for XML documents |
|
list, err := xmlquery.QueryAll(doc, "a")
|
||||||
| [jsonquery](https://github.com/antchfx/jsonquery) | XPath query package for JSON documents |
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parse an XML from URL.
|
||||||
|
|
||||||
|
```go
|
||||||
|
doc, err := xmlquery.LoadURL("http://www.example.com/sitemap.xml")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parse an XML from string.
|
||||||
|
|
||||||
|
```go
|
||||||
|
s := `<?xml version="1.0" encoding="utf-8"?><rss version="2.0"></rss>`
|
||||||
|
doc, err := xmlquery.Parse(strings.NewReader(s))
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parse an XML from io.Reader.
|
||||||
|
|
||||||
|
```go
|
||||||
|
f, err := os.Open("../books.xml")
|
||||||
|
doc, err := xmlquery.Parse(f)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parse an XML in a stream fashion (simple case without elements filtering).
|
||||||
|
|
||||||
|
```go
|
||||||
|
f, _ := os.Open("../books.xml")
|
||||||
|
p, err := xmlquery.CreateStreamParser(f, "/bookstore/book")
|
||||||
|
for {
|
||||||
|
n, err := p.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
fmt.Println(n)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Notes: `CreateStreamParser()` used for saving memory if your had a large XML file to parse.
|
||||||
|
|
||||||
|
#### Parse an XML in a stream fashion (simple case advanced element filtering).
|
||||||
|
|
||||||
|
```go
|
||||||
|
f, _ := os.Open("../books.xml")
|
||||||
|
p, err := xmlquery.CreateStreamParser(f, "/bookstore/book", "/bookstore/book[price>=10]")
|
||||||
|
for {
|
||||||
|
n, err := p.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
fmt.Println(n)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Find authors of all books in the bookstore.
|
||||||
|
|
||||||
|
```go
|
||||||
|
list := xmlquery.Find(doc, "//book//author")
|
||||||
|
// or
|
||||||
|
list := xmlquery.Find(doc, "//author")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Find the second book.
|
||||||
|
|
||||||
|
```go
|
||||||
|
book := xmlquery.FindOne(doc, "//book[2]")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Find the last book.
|
||||||
|
|
||||||
|
```go
|
||||||
|
book := xmlquery.FindOne(doc, "//book[last()]")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Find all book elements and only get `id` attribute.
|
||||||
|
|
||||||
|
```go
|
||||||
|
list := xmlquery.Find(doc,"//book/@id")
|
||||||
|
fmt.Println(list[0].InnerText) // outout @id value
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Find all books with id `bk104`.
|
||||||
|
|
||||||
|
```go
|
||||||
|
list := xmlquery.Find(doc, "//book[@id='bk104']")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Find all books with price less than 5.
|
||||||
|
|
||||||
|
```go
|
||||||
|
list := xmlquery.Find(doc, "//book[price<5]")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Evaluate total price of all books.
|
||||||
|
|
||||||
|
```go
|
||||||
|
expr, err := xpath.Compile("sum(//book/price)")
|
||||||
|
price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64)
|
||||||
|
fmt.Printf("total price: %f\n", price)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Count the number of books.
|
||||||
|
|
||||||
|
```go
|
||||||
|
expr, err := xpath.Compile("count(//book)")
|
||||||
|
count := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Calculate the total price of all book prices.
|
||||||
|
|
||||||
|
```go
|
||||||
|
expr, err := xpath.Compile("sum(//book/price)")
|
||||||
|
price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64)
|
||||||
|
```
|
||||||
|
|
||||||
|
# Advanced Features
|
||||||
|
|
||||||
|
### Parse `UTF-16` XML file with `ParseWithOptions()`.
|
||||||
|
|
||||||
|
```go
|
||||||
|
f, _ := os.Open(`UTF-16.XML`)
|
||||||
|
// Convert UTF-16 XML to UTF-8
|
||||||
|
utf16ToUtf8Transformer := unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM).NewDecoder()
|
||||||
|
utf8Reader := transform.NewReader(f, utf16ToUtf8Transformer)
|
||||||
|
// Sets `CharsetReader`
|
||||||
|
options := xmlquery.ParserOptions{
|
||||||
|
Decoder: &xmlquery.DecoderOptions{
|
||||||
|
CharsetReader: func(charset string, input io.Reader) (io.Reader, error) {
|
||||||
|
return input, nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
doc, err := xmlquery.ParseWithOptions(utf8Reader, options)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Query with custom namespace prefix.
|
||||||
|
|
||||||
|
```go
|
||||||
|
s := `<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<pd:ProcessDefinition xmlns:pd="http://xmlns.xyz.com/process/2003" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
|
||||||
|
<pd:activity name="Invoke Request-Response Service">
|
||||||
|
<pd:type>RequestReplyActivity</pd:type>
|
||||||
|
<pd:resourceType>OpClientReqActivity</pd:resourceType>
|
||||||
|
<pd:x>300</pd:x>
|
||||||
|
<pd:y>80</pd:y>
|
||||||
|
</pd:activity>
|
||||||
|
</pd:ProcessDefinition>`
|
||||||
|
nsMap := map[string]string{
|
||||||
|
"q": "http://xmlns.xyz.com/process/2003",
|
||||||
|
"r": "http://www.w3.org/1999/XSL/Transform",
|
||||||
|
"s": "http://www.w3.org/2001/XMLSchema",
|
||||||
|
}
|
||||||
|
expr, _ := xpath.CompileWithNS("//q:activity", nsMap)
|
||||||
|
node := xmlquery.QuerySelector(doc, expr)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create XML document without call `xml.Marshal`.
|
||||||
|
|
||||||
|
```go
|
||||||
|
doc := &xmlquery.Node{
|
||||||
|
Type: xmlquery.DeclarationNode,
|
||||||
|
Data: "xml",
|
||||||
|
Attr: []xml.Attr{
|
||||||
|
xml.Attr{Name: xml.Name{Local: "version"}, Value: "1.0"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
root := &xmlquery.Node{
|
||||||
|
Data: "rss",
|
||||||
|
Type: xmlquery.ElementNode,
|
||||||
|
}
|
||||||
|
doc.FirstChild = root
|
||||||
|
channel := &xmlquery.Node{
|
||||||
|
Data: "channel",
|
||||||
|
Type: xmlquery.ElementNode,
|
||||||
|
}
|
||||||
|
root.FirstChild = channel
|
||||||
|
title := &xmlquery.Node{
|
||||||
|
Data: "title",
|
||||||
|
Type: xmlquery.ElementNode,
|
||||||
|
}
|
||||||
|
title_text := &xmlquery.Node{
|
||||||
|
Data: "W3Schools Home Page",
|
||||||
|
Type: xmlquery.TextNode,
|
||||||
|
}
|
||||||
|
title.FirstChild = title_text
|
||||||
|
channel.FirstChild = title
|
||||||
|
|
||||||
|
fmt.Println(doc.OutputXML(true))
|
||||||
|
fmt.Println(doc.OutputXMLWithOptions(WithOutputSelf()))
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0"?><rss><channel><title>W3Schools Home Page</title></channel></rss>
|
||||||
|
```
|
||||||
|
|
||||||
|
# FAQ
|
||||||
|
|
||||||
|
#### `Find()` vs `QueryAll()`, which is better?
|
||||||
|
|
||||||
|
`Find` and `QueryAll` both do the same thing: searches all of matched XML nodes.
|
||||||
|
`Find` panics if provided with an invalid XPath query, while `QueryAll` returns
|
||||||
|
an error.
|
||||||
|
|
||||||
|
#### Can I save my query expression object for the next query?
|
||||||
|
|
||||||
|
Yes, you can. We provide `QuerySelector` and `QuerySelectorAll` methods; they
|
||||||
|
accept your query expression object.
|
||||||
|
|
||||||
|
Caching a query expression object avoids recompiling the XPath query
|
||||||
|
expression, improving query performance.
|
||||||
|
|
||||||
|
# Questions
|
||||||
|
|
||||||
Questions
|
|
||||||
===
|
|
||||||
Please let me know if you have any questions
|
Please let me know if you have any questions
|
||||||
|
121
vendor/github.com/antchfx/xmlquery/books.xml
generated
vendored
121
vendor/github.com/antchfx/xmlquery/books.xml
generated
vendored
@ -1,121 +0,0 @@
|
|||||||
<?xml version="1.0"?>
|
|
||||||
<?xml-stylesheet type="text/xsl" ?>
|
|
||||||
<bookstore specialty="novel">
|
|
||||||
<book id="bk101">
|
|
||||||
<author>Gambardella, Matthew</author>
|
|
||||||
<title>XML Developer's Guide</title>
|
|
||||||
<genre>Computer</genre>
|
|
||||||
<price>44.95</price>
|
|
||||||
<publish_date>2000-10-01</publish_date>
|
|
||||||
<description>An in-depth look at creating applications
|
|
||||||
with XML.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk102">
|
|
||||||
<author>Ralls, Kim</author>
|
|
||||||
<title>Midnight Rain</title>
|
|
||||||
<genre>Fantasy</genre>
|
|
||||||
<price>5.95</price>
|
|
||||||
<publish_date>2000-12-16</publish_date>
|
|
||||||
<description>A former architect battles corporate zombies,
|
|
||||||
an evil sorceress, and her own childhood to become queen
|
|
||||||
of the world.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk103">
|
|
||||||
<author>Corets, Eva</author>
|
|
||||||
<title>Maeve Ascendant</title>
|
|
||||||
<genre>Fantasy</genre>
|
|
||||||
<price>5.95</price>
|
|
||||||
<publish_date>2000-11-17</publish_date>
|
|
||||||
<description>After the collapse of a nanotechnology
|
|
||||||
society in England, the young survivors lay the
|
|
||||||
foundation for a new society.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk104">
|
|
||||||
<author>Corets, Eva</author>
|
|
||||||
<title>Oberon's Legacy</title>
|
|
||||||
<genre>Fantasy</genre>
|
|
||||||
<price>5.95</price>
|
|
||||||
<publish_date>2001-03-10</publish_date>
|
|
||||||
<description>In post-apocalypse England, the mysterious
|
|
||||||
agent known only as Oberon helps to create a new life
|
|
||||||
for the inhabitants of London. Sequel to Maeve
|
|
||||||
Ascendant.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk105">
|
|
||||||
<author>Corets, Eva</author>
|
|
||||||
<title>The Sundered Grail</title>
|
|
||||||
<genre>Fantasy</genre>
|
|
||||||
<price>5.95</price>
|
|
||||||
<publish_date>2001-09-10</publish_date>
|
|
||||||
<description>The two daughters of Maeve, half-sisters,
|
|
||||||
battle one another for control of England. Sequel to
|
|
||||||
Oberon's Legacy.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk106">
|
|
||||||
<author>Randall, Cynthia</author>
|
|
||||||
<title>Lover Birds</title>
|
|
||||||
<genre>Romance</genre>
|
|
||||||
<price>4.95</price>
|
|
||||||
<publish_date>2000-09-02</publish_date>
|
|
||||||
<description>When Carla meets Paul at an ornithology
|
|
||||||
conference, tempers fly as feathers get ruffled.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk107">
|
|
||||||
<author>Thurman, Paula</author>
|
|
||||||
<title>Splish Splash</title>
|
|
||||||
<genre>Romance</genre>
|
|
||||||
<price>4.95</price>
|
|
||||||
<publish_date>2000-11-02</publish_date>
|
|
||||||
<description>A deep sea diver finds true love twenty
|
|
||||||
thousand leagues beneath the sea.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk108">
|
|
||||||
<author>Knorr, Stefan</author>
|
|
||||||
<title>Creepy Crawlies</title>
|
|
||||||
<genre>Horror</genre>
|
|
||||||
<price>4.95</price>
|
|
||||||
<publish_date>2000-12-06</publish_date>
|
|
||||||
<description>An anthology of horror stories about roaches,
|
|
||||||
centipedes, scorpions and other insects.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk109">
|
|
||||||
<author>Kress, Peter</author>
|
|
||||||
<title>Paradox Lost</title>
|
|
||||||
<genre>Science Fiction</genre>
|
|
||||||
<price>6.95</price>
|
|
||||||
<publish_date>2000-11-02</publish_date>
|
|
||||||
<description>After an inadvertant trip through a Heisenberg
|
|
||||||
Uncertainty Device, James Salway discovers the problems
|
|
||||||
of being quantum.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk110">
|
|
||||||
<author>O'Brien, Tim</author>
|
|
||||||
<title>Microsoft .NET: The Programming Bible</title>
|
|
||||||
<genre>Computer</genre>
|
|
||||||
<price>36.95</price>
|
|
||||||
<publish_date>2000-12-09</publish_date>
|
|
||||||
<description>Microsoft's .NET initiative is explored in
|
|
||||||
detail in this deep programmer's reference.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk111">
|
|
||||||
<author>O'Brien, Tim</author>
|
|
||||||
<title>MSXML3: A Comprehensive Guide</title>
|
|
||||||
<genre>Computer</genre>
|
|
||||||
<price>36.95</price>
|
|
||||||
<publish_date>2000-12-01</publish_date>
|
|
||||||
<description>The Microsoft MSXML3 parser is covered in
|
|
||||||
detail, with attention to XML DOM interfaces, XSLT processing,
|
|
||||||
SAX and more.</description>
|
|
||||||
</book>
|
|
||||||
<book id="bk112">
|
|
||||||
<author>Galos, Mike</author>
|
|
||||||
<title>Visual Studio 7: A Comprehensive Guide</title>
|
|
||||||
<genre>Computer</genre>
|
|
||||||
<price>49.95</price>
|
|
||||||
<publish_date>2001-04-16</publish_date>
|
|
||||||
<description>Microsoft Visual Studio 7 is explored in depth,
|
|
||||||
looking at how Visual Basic, Visual C++, C#, and ASP+ are
|
|
||||||
integrated into a comprehensive development
|
|
||||||
environment.</description>
|
|
||||||
</book>
|
|
||||||
</bookstore>
|
|
9
vendor/github.com/antchfx/xmlquery/go.mod
generated
vendored
9
vendor/github.com/antchfx/xmlquery/go.mod
generated
vendored
@ -1,9 +0,0 @@
|
|||||||
module github.com/antchfx/xmlquery
|
|
||||||
|
|
||||||
go 1.14
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/antchfx/xpath v1.1.10
|
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e
|
|
||||||
golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc
|
|
||||||
)
|
|
14
vendor/github.com/antchfx/xmlquery/go.sum
generated
vendored
14
vendor/github.com/antchfx/xmlquery/go.sum
generated
vendored
@ -1,14 +0,0 @@
|
|||||||
github.com/antchfx/xpath v1.1.10 h1:cJ0pOvEdN/WvYXxvRrzQH9x5QWKpzHacYO8qzCcDYAg=
|
|
||||||
github.com/antchfx/xpath v1.1.10/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
|
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
|
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc h1:zK/HqS5bZxDptfPJNq8v7vJfXtkU7r9TLIoSr1bXaP4=
|
|
||||||
golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
209
vendor/github.com/antchfx/xmlquery/node.go
generated
vendored
209
vendor/github.com/antchfx/xmlquery/node.go
generated
vendored
@ -1,9 +1,9 @@
|
|||||||
package xmlquery
|
package xmlquery
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"html"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -27,8 +27,16 @@ const (
|
|||||||
CommentNode
|
CommentNode
|
||||||
// AttributeNode is an attribute of element.
|
// AttributeNode is an attribute of element.
|
||||||
AttributeNode
|
AttributeNode
|
||||||
|
// NotationNode is a directive represents in document (for example, <!text...>).
|
||||||
|
NotationNode
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type Attr struct {
|
||||||
|
Name xml.Name
|
||||||
|
Value string
|
||||||
|
NamespaceURI string
|
||||||
|
}
|
||||||
|
|
||||||
// A Node consists of a NodeType and some Data (tag name for
|
// A Node consists of a NodeType and some Data (tag name for
|
||||||
// element nodes, content for text) and are part of a tree of Nodes.
|
// element nodes, content for text) and are part of a tree of Nodes.
|
||||||
type Node struct {
|
type Node struct {
|
||||||
@ -38,34 +46,88 @@ type Node struct {
|
|||||||
Data string
|
Data string
|
||||||
Prefix string
|
Prefix string
|
||||||
NamespaceURI string
|
NamespaceURI string
|
||||||
Attr []xml.Attr
|
Attr []Attr
|
||||||
|
|
||||||
level int // node level in the tree
|
level int // node level in the tree
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type outputConfiguration struct {
|
||||||
|
printSelf bool
|
||||||
|
preserveSpaces bool
|
||||||
|
emptyElementTagSupport bool
|
||||||
|
skipComments bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type OutputOption func(*outputConfiguration)
|
||||||
|
|
||||||
|
// WithOutputSelf configures the Node to print the root node itself
|
||||||
|
func WithOutputSelf() OutputOption {
|
||||||
|
return func(oc *outputConfiguration) {
|
||||||
|
oc.printSelf = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithEmptyTagSupport empty tags should be written as <empty/> and
|
||||||
|
// not as <empty></empty>
|
||||||
|
func WithEmptyTagSupport() OutputOption {
|
||||||
|
return func(oc *outputConfiguration) {
|
||||||
|
oc.emptyElementTagSupport = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithoutComments will skip comments in output
|
||||||
|
func WithoutComments() OutputOption {
|
||||||
|
return func(oc *outputConfiguration) {
|
||||||
|
oc.skipComments = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithPreserveSpace will preserve spaces in output
|
||||||
|
func WithPreserveSpace() OutputOption {
|
||||||
|
return func(oc *outputConfiguration) {
|
||||||
|
oc.preserveSpaces = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newXMLName(name string) xml.Name {
|
||||||
|
if i := strings.IndexByte(name, ':'); i > 0 {
|
||||||
|
return xml.Name{
|
||||||
|
Space: name[:i],
|
||||||
|
Local: name[i+1:],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return xml.Name{
|
||||||
|
Local: name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Node) Level() int {
|
||||||
|
return n.level
|
||||||
|
}
|
||||||
|
|
||||||
// InnerText returns the text between the start and end tags of the object.
|
// InnerText returns the text between the start and end tags of the object.
|
||||||
func (n *Node) InnerText() string {
|
func (n *Node) InnerText() string {
|
||||||
var output func(*bytes.Buffer, *Node)
|
var output func(*strings.Builder, *Node)
|
||||||
output = func(buf *bytes.Buffer, n *Node) {
|
output = func(b *strings.Builder, n *Node) {
|
||||||
switch n.Type {
|
switch n.Type {
|
||||||
case TextNode, CharDataNode:
|
case TextNode, CharDataNode:
|
||||||
buf.WriteString(n.Data)
|
b.WriteString(n.Data)
|
||||||
case CommentNode:
|
case CommentNode:
|
||||||
default:
|
default:
|
||||||
for child := n.FirstChild; child != nil; child = child.NextSibling {
|
for child := n.FirstChild; child != nil; child = child.NextSibling {
|
||||||
output(buf, child)
|
output(b, child)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var buf bytes.Buffer
|
var b strings.Builder
|
||||||
output(&buf, n)
|
output(&b, n)
|
||||||
return buf.String()
|
return b.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *Node) sanitizedData(preserveSpaces bool) string {
|
func (n *Node) sanitizedData(preserveSpaces bool) string {
|
||||||
if preserveSpaces {
|
if preserveSpaces {
|
||||||
return strings.Trim(n.Data, "\n\t")
|
return n.Data
|
||||||
}
|
}
|
||||||
return strings.TrimSpace(n.Data)
|
return strings.TrimSpace(n.Data)
|
||||||
}
|
}
|
||||||
@ -79,89 +141,142 @@ func calculatePreserveSpaces(n *Node, pastValue bool) bool {
|
|||||||
return pastValue
|
return pastValue
|
||||||
}
|
}
|
||||||
|
|
||||||
func outputXML(buf *bytes.Buffer, n *Node, preserveSpaces bool) {
|
func outputXML(b *strings.Builder, n *Node, preserveSpaces bool, config *outputConfiguration) {
|
||||||
preserveSpaces = calculatePreserveSpaces(n, preserveSpaces)
|
preserveSpaces = calculatePreserveSpaces(n, preserveSpaces)
|
||||||
switch n.Type {
|
switch n.Type {
|
||||||
case TextNode:
|
case TextNode:
|
||||||
xml.EscapeText(buf, []byte(n.sanitizedData(preserveSpaces)))
|
b.WriteString(html.EscapeString(n.sanitizedData(preserveSpaces)))
|
||||||
return
|
return
|
||||||
case CharDataNode:
|
case CharDataNode:
|
||||||
buf.WriteString("<![CDATA[")
|
b.WriteString("<![CDATA[")
|
||||||
xml.EscapeText(buf, []byte(n.sanitizedData(preserveSpaces)))
|
b.WriteString(n.Data)
|
||||||
buf.WriteString("]]>")
|
b.WriteString("]]>")
|
||||||
return
|
return
|
||||||
case CommentNode:
|
case CommentNode:
|
||||||
buf.WriteString("<!--")
|
if !config.skipComments {
|
||||||
buf.WriteString(n.Data)
|
b.WriteString("<!--")
|
||||||
buf.WriteString("-->")
|
b.WriteString(n.Data)
|
||||||
|
b.WriteString("-->")
|
||||||
|
}
|
||||||
|
return
|
||||||
|
case NotationNode:
|
||||||
|
fmt.Fprintf(b, "<!%s>", n.Data)
|
||||||
return
|
return
|
||||||
case DeclarationNode:
|
case DeclarationNode:
|
||||||
buf.WriteString("<?" + n.Data)
|
b.WriteString("<?" + n.Data)
|
||||||
default:
|
default:
|
||||||
if n.Prefix == "" {
|
if n.Prefix == "" {
|
||||||
buf.WriteString("<" + n.Data)
|
b.WriteString("<" + n.Data)
|
||||||
} else {
|
} else {
|
||||||
buf.WriteString("<" + n.Prefix + ":" + n.Data)
|
fmt.Fprintf(b, "<%s:%s", n.Prefix, n.Data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, attr := range n.Attr {
|
for _, attr := range n.Attr {
|
||||||
if attr.Name.Space != "" {
|
if attr.Name.Space != "" {
|
||||||
buf.WriteString(fmt.Sprintf(` %s:%s=`, attr.Name.Space, attr.Name.Local))
|
fmt.Fprintf(b, ` %s:%s=`, attr.Name.Space, attr.Name.Local)
|
||||||
} else {
|
} else {
|
||||||
buf.WriteString(fmt.Sprintf(` %s=`, attr.Name.Local))
|
fmt.Fprintf(b, ` %s=`, attr.Name.Local)
|
||||||
}
|
}
|
||||||
buf.WriteByte('"')
|
b.WriteByte('"')
|
||||||
xml.EscapeText(buf, []byte(attr.Value))
|
b.WriteString(html.EscapeString(attr.Value))
|
||||||
buf.WriteByte('"')
|
b.WriteByte('"')
|
||||||
}
|
}
|
||||||
if n.Type == DeclarationNode {
|
if n.Type == DeclarationNode {
|
||||||
buf.WriteString("?>")
|
b.WriteString("?>")
|
||||||
} else {
|
} else {
|
||||||
buf.WriteString(">")
|
if n.FirstChild != nil || !config.emptyElementTagSupport {
|
||||||
|
b.WriteString(">")
|
||||||
|
} else {
|
||||||
|
b.WriteString("/>")
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
for child := n.FirstChild; child != nil; child = child.NextSibling {
|
for child := n.FirstChild; child != nil; child = child.NextSibling {
|
||||||
outputXML(buf, child, preserveSpaces)
|
outputXML(b, child, preserveSpaces, config)
|
||||||
}
|
}
|
||||||
if n.Type != DeclarationNode {
|
if n.Type != DeclarationNode {
|
||||||
if n.Prefix == "" {
|
if n.Prefix == "" {
|
||||||
buf.WriteString(fmt.Sprintf("</%s>", n.Data))
|
fmt.Fprintf(b, "</%s>", n.Data)
|
||||||
} else {
|
} else {
|
||||||
buf.WriteString(fmt.Sprintf("</%s:%s>", n.Prefix, n.Data))
|
fmt.Fprintf(b, "</%s:%s>", n.Prefix, n.Data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// OutputXML returns the text that including tags name.
|
// OutputXML returns the text that including tags name.
|
||||||
func (n *Node) OutputXML(self bool) string {
|
func (n *Node) OutputXML(self bool) string {
|
||||||
var buf bytes.Buffer
|
|
||||||
if self {
|
config := &outputConfiguration{
|
||||||
outputXML(&buf, n, false)
|
printSelf: true,
|
||||||
|
emptyElementTagSupport: false,
|
||||||
|
}
|
||||||
|
preserveSpaces := calculatePreserveSpaces(n, false)
|
||||||
|
var b strings.Builder
|
||||||
|
if self && n.Type != DocumentNode {
|
||||||
|
outputXML(&b, n, preserveSpaces, config)
|
||||||
} else {
|
} else {
|
||||||
for n := n.FirstChild; n != nil; n = n.NextSibling {
|
for n := n.FirstChild; n != nil; n = n.NextSibling {
|
||||||
outputXML(&buf, n, false)
|
outputXML(&b, n, preserveSpaces, config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return buf.String()
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// OutputXMLWithOptions returns the text that including tags name.
|
||||||
|
func (n *Node) OutputXMLWithOptions(opts ...OutputOption) string {
|
||||||
|
|
||||||
|
config := &outputConfiguration{}
|
||||||
|
// Set the options
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(config)
|
||||||
|
}
|
||||||
|
pastPreserveSpaces := config.preserveSpaces
|
||||||
|
preserveSpaces := calculatePreserveSpaces(n, pastPreserveSpaces)
|
||||||
|
var b strings.Builder
|
||||||
|
if config.printSelf && n.Type != DocumentNode {
|
||||||
|
outputXML(&b, n, preserveSpaces, config)
|
||||||
|
} else {
|
||||||
|
for n := n.FirstChild; n != nil; n = n.NextSibling {
|
||||||
|
outputXML(&b, n, preserveSpaces, config)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return b.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddAttr adds a new attribute specified by 'key' and 'val' to a node 'n'.
|
// AddAttr adds a new attribute specified by 'key' and 'val' to a node 'n'.
|
||||||
func AddAttr(n *Node, key, val string) {
|
func AddAttr(n *Node, key, val string) {
|
||||||
var attr xml.Attr
|
attr := Attr{
|
||||||
if i := strings.Index(key, ":"); i > 0 {
|
Name: newXMLName(key),
|
||||||
attr = xml.Attr{
|
Value: val,
|
||||||
Name: xml.Name{Space: key[:i], Local: key[i+1:]},
|
}
|
||||||
Value: val,
|
n.Attr = append(n.Attr, attr)
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
attr = xml.Attr{
|
// SetAttr allows an attribute value with the specified name to be changed.
|
||||||
Name: xml.Name{Local: key},
|
// If the attribute did not previously exist, it will be created.
|
||||||
Value: val,
|
func (n *Node) SetAttr(key, value string) {
|
||||||
|
name := newXMLName(key)
|
||||||
|
for i, attr := range n.Attr {
|
||||||
|
if attr.Name == name {
|
||||||
|
n.Attr[i].Value = value
|
||||||
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
AddAttr(n, key, value)
|
||||||
|
}
|
||||||
|
|
||||||
n.Attr = append(n.Attr, attr)
|
// RemoveAttr removes the attribute with the specified name.
|
||||||
|
func (n *Node) RemoveAttr(key string) {
|
||||||
|
name := newXMLName(key)
|
||||||
|
for i, attr := range n.Attr {
|
||||||
|
if attr.Name == name {
|
||||||
|
n.Attr = append(n.Attr[:i], n.Attr[i+1:]...)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddChild adds a new node 'n' to a node 'parent' as its last child.
|
// AddChild adds a new node 'n' to a node 'parent' as its last child.
|
||||||
|
33
vendor/github.com/antchfx/xmlquery/options.go
generated
vendored
Normal file
33
vendor/github.com/antchfx/xmlquery/options.go
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
package xmlquery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/xml"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ParserOptions struct {
|
||||||
|
Decoder *DecoderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func (options ParserOptions) apply(parser *parser) {
|
||||||
|
if options.Decoder != nil {
|
||||||
|
(*options.Decoder).apply(parser.decoder)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecoderOptions implement the very same options than the standard
|
||||||
|
// encoding/xml package. Please refer to this documentation:
|
||||||
|
// https://golang.org/pkg/encoding/xml/#Decoder
|
||||||
|
type DecoderOptions struct {
|
||||||
|
Strict bool
|
||||||
|
AutoClose []string
|
||||||
|
Entity map[string]string
|
||||||
|
CharsetReader func(charset string, input io.Reader) (io.Reader, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (options DecoderOptions) apply(decoder *xml.Decoder) {
|
||||||
|
decoder.Strict = options.Strict
|
||||||
|
decoder.AutoClose = options.AutoClose
|
||||||
|
decoder.Entity = options.Entity
|
||||||
|
decoder.CharsetReader = options.CharsetReader
|
||||||
|
}
|
194
vendor/github.com/antchfx/xmlquery/parse.go
generated
vendored
194
vendor/github.com/antchfx/xmlquery/parse.go
generated
vendored
@ -3,12 +3,12 @@ package xmlquery
|
|||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/antchfx/xpath"
|
"github.com/antchfx/xpath"
|
||||||
"golang.org/x/net/html/charset"
|
"golang.org/x/net/html/charset"
|
||||||
@ -32,7 +32,13 @@ func LoadURL(url string) (*Node, error) {
|
|||||||
|
|
||||||
// Parse returns the parse tree for the XML from the given Reader.
|
// Parse returns the parse tree for the XML from the given Reader.
|
||||||
func Parse(r io.Reader) (*Node, error) {
|
func Parse(r io.Reader) (*Node, error) {
|
||||||
|
return ParseWithOptions(r, ParserOptions{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseWithOptions is like parse, but with custom options
|
||||||
|
func ParseWithOptions(r io.Reader, options ParserOptions) (*Node, error) {
|
||||||
p := createParser(r)
|
p := createParser(r)
|
||||||
|
options.apply(p)
|
||||||
for {
|
for {
|
||||||
_, err := p.parse()
|
_, err := p.parse()
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
@ -47,7 +53,6 @@ func Parse(r io.Reader) (*Node, error) {
|
|||||||
type parser struct {
|
type parser struct {
|
||||||
decoder *xml.Decoder
|
decoder *xml.Decoder
|
||||||
doc *Node
|
doc *Node
|
||||||
space2prefix map[string]string
|
|
||||||
level int
|
level int
|
||||||
prev *Node
|
prev *Node
|
||||||
streamElementXPath *xpath.Expr // Under streaming mode, this specifies the xpath to the target element node(s).
|
streamElementXPath *xpath.Expr // Under streaming mode, this specifies the xpath to the target element node(s).
|
||||||
@ -55,29 +60,40 @@ type parser struct {
|
|||||||
streamNode *Node // Need to remember the last target node So we can clean it up upon next Read() call.
|
streamNode *Node // Need to remember the last target node So we can clean it up upon next Read() call.
|
||||||
streamNodePrev *Node // Need to remember target node's prev so upon target node removal, we can restore correct prev.
|
streamNodePrev *Node // Need to remember target node's prev so upon target node removal, we can restore correct prev.
|
||||||
reader *cachedReader // Need to maintain a reference to the reader, so we can determine whether a node contains CDATA.
|
reader *cachedReader // Need to maintain a reference to the reader, so we can determine whether a node contains CDATA.
|
||||||
|
once sync.Once
|
||||||
|
space2prefix map[string]*xmlnsPrefix
|
||||||
|
}
|
||||||
|
|
||||||
|
type xmlnsPrefix struct {
|
||||||
|
name string
|
||||||
|
level int
|
||||||
}
|
}
|
||||||
|
|
||||||
func createParser(r io.Reader) *parser {
|
func createParser(r io.Reader) *parser {
|
||||||
reader := newCachedReader(bufio.NewReader(r))
|
reader := newCachedReader(bufio.NewReader(r))
|
||||||
p := &parser{
|
p := &parser{
|
||||||
decoder: xml.NewDecoder(reader),
|
decoder: xml.NewDecoder(reader),
|
||||||
doc: &Node{Type: DocumentNode},
|
doc: &Node{Type: DocumentNode},
|
||||||
space2prefix: make(map[string]string),
|
level: 0,
|
||||||
level: 0,
|
reader: reader,
|
||||||
reader: reader,
|
}
|
||||||
|
if p.decoder.CharsetReader == nil {
|
||||||
|
p.decoder.CharsetReader = charset.NewReaderLabel
|
||||||
}
|
}
|
||||||
// http://www.w3.org/XML/1998/namespace is bound by definition to the prefix xml.
|
|
||||||
p.space2prefix["http://www.w3.org/XML/1998/namespace"] = "xml"
|
|
||||||
p.decoder.CharsetReader = charset.NewReaderLabel
|
|
||||||
p.prev = p.doc
|
p.prev = p.doc
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parse() (*Node, error) {
|
func (p *parser) parse() (*Node, error) {
|
||||||
var streamElementNodeCounter int
|
p.once.Do(func() {
|
||||||
|
p.space2prefix = map[string]*xmlnsPrefix{"http://www.w3.org/XML/1998/namespace": {name: "xml", level: 0}}
|
||||||
|
})
|
||||||
|
|
||||||
|
var streamElementNodeCounter int
|
||||||
for {
|
for {
|
||||||
|
p.reader.StartCaching()
|
||||||
tok, err := p.decoder.Token()
|
tok, err := p.decoder.Token()
|
||||||
|
p.reader.StopCaching()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -86,39 +102,56 @@ func (p *parser) parse() (*Node, error) {
|
|||||||
case xml.StartElement:
|
case xml.StartElement:
|
||||||
if p.level == 0 {
|
if p.level == 0 {
|
||||||
// mising XML declaration
|
// mising XML declaration
|
||||||
node := &Node{Type: DeclarationNode, Data: "xml", level: 1}
|
attributes := make([]Attr, 1)
|
||||||
|
attributes[0].Name = xml.Name{Local: "version"}
|
||||||
|
attributes[0].Value = "1.0"
|
||||||
|
node := &Node{
|
||||||
|
Type: DeclarationNode,
|
||||||
|
Data: "xml",
|
||||||
|
Attr: attributes,
|
||||||
|
level: 1,
|
||||||
|
}
|
||||||
AddChild(p.prev, node)
|
AddChild(p.prev, node)
|
||||||
p.level = 1
|
p.level = 1
|
||||||
p.prev = node
|
p.prev = node
|
||||||
}
|
}
|
||||||
// https://www.w3.org/TR/xml-names/#scoping-defaulting
|
|
||||||
for _, att := range tok.Attr {
|
for _, att := range tok.Attr {
|
||||||
if att.Name.Local == "xmlns" {
|
if att.Name.Local == "xmlns" {
|
||||||
p.space2prefix[att.Value] = ""
|
// https://github.com/antchfx/xmlquery/issues/67
|
||||||
|
if prefix, ok := p.space2prefix[att.Value]; !ok || (ok && prefix.level >= p.level) {
|
||||||
|
p.space2prefix[att.Value] = &xmlnsPrefix{name: "", level: p.level} // reset empty if exist the default namespace
|
||||||
|
}
|
||||||
} else if att.Name.Space == "xmlns" {
|
} else if att.Name.Space == "xmlns" {
|
||||||
p.space2prefix[att.Value] = att.Name.Local
|
// maybe there are have duplicate NamespaceURL?
|
||||||
|
p.space2prefix[att.Value] = &xmlnsPrefix{name: att.Name.Local, level: p.level}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if tok.Name.Space != "" {
|
if space := tok.Name.Space; space != "" {
|
||||||
if _, found := p.space2prefix[tok.Name.Space]; !found {
|
if _, found := p.space2prefix[space]; !found && p.decoder.Strict {
|
||||||
return nil, errors.New("xmlquery: invalid XML document, namespace is missing")
|
return nil, fmt.Errorf("xmlquery: invalid XML document, namespace %s is missing", space)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < len(tok.Attr); i++ {
|
attributes := make([]Attr, len(tok.Attr))
|
||||||
att := &tok.Attr[i]
|
for i, att := range tok.Attr {
|
||||||
if prefix, ok := p.space2prefix[att.Name.Space]; ok {
|
name := att.Name
|
||||||
att.Name.Space = prefix
|
if prefix, ok := p.space2prefix[name.Space]; ok {
|
||||||
|
name.Space = prefix.name
|
||||||
|
}
|
||||||
|
attributes[i] = Attr{
|
||||||
|
Name: name,
|
||||||
|
Value: att.Value,
|
||||||
|
NamespaceURI: att.Name.Space,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node := &Node{
|
node := &Node{
|
||||||
Type: ElementNode,
|
Type: ElementNode,
|
||||||
Data: tok.Name.Local,
|
Data: tok.Name.Local,
|
||||||
Prefix: p.space2prefix[tok.Name.Space],
|
|
||||||
NamespaceURI: tok.Name.Space,
|
NamespaceURI: tok.Name.Space,
|
||||||
Attr: tok.Attr,
|
Attr: attributes,
|
||||||
level: p.level,
|
level: p.level,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,6 +165,15 @@ func (p *parser) parse() (*Node, error) {
|
|||||||
}
|
}
|
||||||
AddSibling(p.prev.Parent, node)
|
AddSibling(p.prev.Parent, node)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if node.NamespaceURI != "" {
|
||||||
|
if v, ok := p.space2prefix[node.NamespaceURI]; ok {
|
||||||
|
cached := string(p.reader.Cache())
|
||||||
|
if strings.HasPrefix(cached, fmt.Sprintf("%s:%s", v.name, node.Data)) || strings.HasPrefix(cached, fmt.Sprintf("<%s:%s", v.name, node.Data)) {
|
||||||
|
node.Prefix = v.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
// If we're in the streaming mode, we need to remember the node if it is the target node
|
// If we're in the streaming mode, we need to remember the node if it is the target node
|
||||||
// so that when we finish processing the node's EndElement, we know how/what to return to
|
// so that when we finish processing the node's EndElement, we know how/what to return to
|
||||||
// caller. Also we need to remove the target node from the tree upon next Read() call so
|
// caller. Also we need to remove the target node from the tree upon next Read() call so
|
||||||
@ -149,7 +191,6 @@ func (p *parser) parse() (*Node, error) {
|
|||||||
}
|
}
|
||||||
p.prev = node
|
p.prev = node
|
||||||
p.level++
|
p.level++
|
||||||
p.reader.StartCaching()
|
|
||||||
case xml.EndElement:
|
case xml.EndElement:
|
||||||
p.level--
|
p.level--
|
||||||
// If we're in streaming mode, and we already have a potential streaming
|
// If we're in streaming mode, and we already have a potential streaming
|
||||||
@ -186,11 +227,10 @@ func (p *parser) parse() (*Node, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case xml.CharData:
|
case xml.CharData:
|
||||||
p.reader.StopCaching()
|
|
||||||
// First, normalize the cache...
|
// First, normalize the cache...
|
||||||
cached := strings.ToUpper(string(p.reader.Cache()))
|
cached := strings.ToUpper(string(p.reader.Cache()))
|
||||||
nodeType := TextNode
|
nodeType := TextNode
|
||||||
if strings.HasPrefix(cached, "<![CDATA[") {
|
if strings.HasPrefix(cached, "<![CDATA[") || strings.HasPrefix(cached, "![CDATA[") {
|
||||||
nodeType = CharDataNode
|
nodeType = CharDataNode
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,7 +245,6 @@ func (p *parser) parse() (*Node, error) {
|
|||||||
}
|
}
|
||||||
AddSibling(p.prev.Parent, node)
|
AddSibling(p.prev.Parent, node)
|
||||||
}
|
}
|
||||||
p.reader.StartCaching()
|
|
||||||
case xml.Comment:
|
case xml.Comment:
|
||||||
node := &Node{Type: CommentNode, Data: string(tok), level: p.level}
|
node := &Node{Type: CommentNode, Data: string(tok), level: p.level}
|
||||||
if p.level == p.prev.level {
|
if p.level == p.prev.level {
|
||||||
@ -234,9 +273,25 @@ func (p *parser) parse() (*Node, error) {
|
|||||||
AddSibling(p.prev, node)
|
AddSibling(p.prev, node)
|
||||||
} else if p.level > p.prev.level {
|
} else if p.level > p.prev.level {
|
||||||
AddChild(p.prev, node)
|
AddChild(p.prev, node)
|
||||||
|
} else if p.level < p.prev.level {
|
||||||
|
for i := p.prev.level - p.level; i > 1; i-- {
|
||||||
|
p.prev = p.prev.Parent
|
||||||
|
}
|
||||||
|
AddSibling(p.prev.Parent, node)
|
||||||
}
|
}
|
||||||
p.prev = node
|
p.prev = node
|
||||||
case xml.Directive:
|
case xml.Directive:
|
||||||
|
node := &Node{Type: NotationNode, Data: string(tok), level: p.level}
|
||||||
|
if p.level == p.prev.level {
|
||||||
|
AddSibling(p.prev, node)
|
||||||
|
} else if p.level > p.prev.level {
|
||||||
|
AddChild(p.prev, node)
|
||||||
|
} else if p.level < p.prev.level {
|
||||||
|
for i := p.prev.level - p.level; i > 1; i-- {
|
||||||
|
p.prev = p.prev.Parent
|
||||||
|
}
|
||||||
|
AddSibling(p.prev.Parent, node)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -253,37 +308,43 @@ type StreamParser struct {
|
|||||||
// scenarios.
|
// scenarios.
|
||||||
//
|
//
|
||||||
// Scenario 1: simple case:
|
// Scenario 1: simple case:
|
||||||
// xml := `<AAA><BBB>b1</BBB><BBB>b2</BBB></AAA>`
|
//
|
||||||
// sp, err := CreateStreamParser(strings.NewReader(xml), "/AAA/BBB")
|
// xml := `<AAA><BBB>b1</BBB><BBB>b2</BBB></AAA>`
|
||||||
// if err != nil {
|
// sp, err := CreateStreamParser(strings.NewReader(xml), "/AAA/BBB")
|
||||||
// panic(err)
|
// if err != nil {
|
||||||
// }
|
// panic(err)
|
||||||
// for {
|
// }
|
||||||
// n, err := sp.Read()
|
// for {
|
||||||
// if err != nil {
|
// n, err := sp.Read()
|
||||||
// break
|
// if err != nil {
|
||||||
// }
|
// break
|
||||||
// fmt.Println(n.OutputXML(true))
|
// }
|
||||||
// }
|
// fmt.Println(n.OutputXML(true))
|
||||||
|
// }
|
||||||
|
//
|
||||||
// Output will be:
|
// Output will be:
|
||||||
// <BBB>b1</BBB>
|
//
|
||||||
// <BBB>b2</BBB>
|
// <BBB>b1</BBB>
|
||||||
|
// <BBB>b2</BBB>
|
||||||
//
|
//
|
||||||
// Scenario 2: advanced case:
|
// Scenario 2: advanced case:
|
||||||
// xml := `<AAA><BBB>b1</BBB><BBB>b2</BBB></AAA>`
|
//
|
||||||
// sp, err := CreateStreamParser(strings.NewReader(xml), "/AAA/BBB", "/AAA/BBB[. != 'b1']")
|
// xml := `<AAA><BBB>b1</BBB><BBB>b2</BBB></AAA>`
|
||||||
// if err != nil {
|
// sp, err := CreateStreamParser(strings.NewReader(xml), "/AAA/BBB", "/AAA/BBB[. != 'b1']")
|
||||||
// panic(err)
|
// if err != nil {
|
||||||
// }
|
// panic(err)
|
||||||
// for {
|
// }
|
||||||
// n, err := sp.Read()
|
// for {
|
||||||
// if err != nil {
|
// n, err := sp.Read()
|
||||||
// break
|
// if err != nil {
|
||||||
// }
|
// break
|
||||||
// fmt.Println(n.OutputXML(true))
|
// }
|
||||||
// }
|
// fmt.Println(n.OutputXML(true))
|
||||||
|
// }
|
||||||
|
//
|
||||||
// Output will be:
|
// Output will be:
|
||||||
// <BBB>b2</BBB>
|
//
|
||||||
|
// <BBB>b2</BBB>
|
||||||
//
|
//
|
||||||
// As the argument names indicate, streamElementXPath should be used for
|
// As the argument names indicate, streamElementXPath should be used for
|
||||||
// providing xpath query pointing to the target element node only, no extra
|
// providing xpath query pointing to the target element node only, no extra
|
||||||
@ -295,6 +356,16 @@ type StreamParser struct {
|
|||||||
// streamElementFilter, if provided, cannot be successfully parsed and compiled
|
// streamElementFilter, if provided, cannot be successfully parsed and compiled
|
||||||
// into a valid xpath query.
|
// into a valid xpath query.
|
||||||
func CreateStreamParser(r io.Reader, streamElementXPath string, streamElementFilter ...string) (*StreamParser, error) {
|
func CreateStreamParser(r io.Reader, streamElementXPath string, streamElementFilter ...string) (*StreamParser, error) {
|
||||||
|
return CreateStreamParserWithOptions(r, ParserOptions{}, streamElementXPath, streamElementFilter...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateStreamParserWithOptions is like CreateStreamParser, but with custom options
|
||||||
|
func CreateStreamParserWithOptions(
|
||||||
|
r io.Reader,
|
||||||
|
options ParserOptions,
|
||||||
|
streamElementXPath string,
|
||||||
|
streamElementFilter ...string,
|
||||||
|
) (*StreamParser, error) {
|
||||||
elemXPath, err := getQuery(streamElementXPath)
|
elemXPath, err := getQuery(streamElementXPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("invalid streamElementXPath '%s', err: %s", streamElementXPath, err.Error())
|
return nil, fmt.Errorf("invalid streamElementXPath '%s', err: %s", streamElementXPath, err.Error())
|
||||||
@ -306,8 +377,10 @@ func CreateStreamParser(r io.Reader, streamElementXPath string, streamElementFil
|
|||||||
return nil, fmt.Errorf("invalid streamElementFilter '%s', err: %s", streamElementFilter[0], err.Error())
|
return nil, fmt.Errorf("invalid streamElementFilter '%s', err: %s", streamElementFilter[0], err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
parser := createParser(r)
|
||||||
|
options.apply(parser)
|
||||||
sp := &StreamParser{
|
sp := &StreamParser{
|
||||||
p: createParser(r),
|
p: parser,
|
||||||
}
|
}
|
||||||
sp.p.streamElementXPath = elemXPath
|
sp.p.streamElementXPath = elemXPath
|
||||||
sp.p.streamElementFilter = elemFilter
|
sp.p.streamElementFilter = elemFilter
|
||||||
@ -325,8 +398,15 @@ func (sp *StreamParser) Read() (*Node, error) {
|
|||||||
// Because this is a streaming read, we need to release/remove last
|
// Because this is a streaming read, we need to release/remove last
|
||||||
// target node from the node tree to free up memory.
|
// target node from the node tree to free up memory.
|
||||||
if sp.p.streamNode != nil {
|
if sp.p.streamNode != nil {
|
||||||
|
// We need to remove all siblings before the current stream node,
|
||||||
|
// because the document may contain unwanted nodes between the target
|
||||||
|
// ones (for example new line text node), which would otherwise
|
||||||
|
// accumulate as first childs, and slow down the stream over time
|
||||||
|
for sp.p.streamNode.PrevSibling != nil {
|
||||||
|
RemoveFromTree(sp.p.streamNode.PrevSibling)
|
||||||
|
}
|
||||||
|
sp.p.prev = sp.p.streamNode.Parent
|
||||||
RemoveFromTree(sp.p.streamNode)
|
RemoveFromTree(sp.p.streamNode)
|
||||||
sp.p.prev = sp.p.streamNodePrev
|
|
||||||
sp.p.streamNode = nil
|
sp.p.streamNode = nil
|
||||||
sp.p.streamNodePrev = nil
|
sp.p.streamNodePrev = nil
|
||||||
}
|
}
|
||||||
|
26
vendor/github.com/antchfx/xmlquery/query.go
generated
vendored
26
vendor/github.com/antchfx/xmlquery/query.go
generated
vendored
@ -28,14 +28,9 @@ func (n *Node) SelectAttr(name string) string {
|
|||||||
}
|
}
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
var local, space string
|
xmlName := newXMLName(name)
|
||||||
local = name
|
|
||||||
if i := strings.Index(name, ":"); i > 0 {
|
|
||||||
space = name[:i]
|
|
||||||
local = name[i+1:]
|
|
||||||
}
|
|
||||||
for _, attr := range n.Attr {
|
for _, attr := range n.Attr {
|
||||||
if attr.Name.Local == local && attr.Name.Space == space {
|
if attr.Name == xmlName {
|
||||||
return attr.Value
|
return attr.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -161,7 +156,7 @@ func (x *NodeNavigator) NodeType() xpath.NodeType {
|
|||||||
switch x.curr.Type {
|
switch x.curr.Type {
|
||||||
case CommentNode:
|
case CommentNode:
|
||||||
return xpath.CommentNode
|
return xpath.CommentNode
|
||||||
case TextNode, CharDataNode:
|
case TextNode, CharDataNode, NotationNode:
|
||||||
return xpath.TextNode
|
return xpath.TextNode
|
||||||
case DeclarationNode, DocumentNode:
|
case DeclarationNode, DocumentNode:
|
||||||
return xpath.RootNode
|
return xpath.RootNode
|
||||||
@ -193,6 +188,9 @@ func (x *NodeNavigator) Prefix() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (x *NodeNavigator) NamespaceURL() string {
|
func (x *NodeNavigator) NamespaceURL() string {
|
||||||
|
if x.attr != -1 {
|
||||||
|
return x.curr.Attr[x.attr].NamespaceURI
|
||||||
|
}
|
||||||
return x.curr.NamespaceURI
|
return x.curr.NamespaceURI
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -272,9 +270,11 @@ func (x *NodeNavigator) MoveToNext() bool {
|
|||||||
if x.attr != -1 {
|
if x.attr != -1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if node := x.curr.NextSibling; node != nil {
|
for node := x.curr.NextSibling; node != nil; node = x.curr.NextSibling {
|
||||||
x.curr = node
|
x.curr = node
|
||||||
return true
|
if x.curr.Type != TextNode || strings.TrimSpace(x.curr.Data) != "" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -283,9 +283,11 @@ func (x *NodeNavigator) MoveToPrevious() bool {
|
|||||||
if x.attr != -1 {
|
if x.attr != -1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if node := x.curr.PrevSibling; node != nil {
|
for node := x.curr.PrevSibling; node != nil; node = x.curr.PrevSibling {
|
||||||
x.curr = node
|
x.curr = node
|
||||||
return true
|
if x.curr.Type != TextNode || strings.TrimSpace(x.curr.Data) != "" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
12
vendor/github.com/antchfx/xpath/.travis.yml
generated
vendored
12
vendor/github.com/antchfx/xpath/.travis.yml
generated
vendored
@ -1,12 +0,0 @@
|
|||||||
language: go
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.6
|
|
||||||
- 1.9
|
|
||||||
- '1.10'
|
|
||||||
|
|
||||||
install:
|
|
||||||
- go get github.com/mattn/goveralls
|
|
||||||
|
|
||||||
script:
|
|
||||||
- $HOME/gopath/bin/goveralls -service=travis-ci
|
|
144
vendor/github.com/antchfx/xpath/README.md
generated
vendored
144
vendor/github.com/antchfx/xpath/README.md
generated
vendored
@ -1,14 +1,13 @@
|
|||||||
XPath
|
# XPath
|
||||||
====
|
|
||||||
[](https://godoc.org/github.com/antchfx/xpath)
|
[](https://godoc.org/github.com/antchfx/xpath)
|
||||||
[](https://coveralls.io/github/antchfx/xpath?branch=master)
|
[](https://coveralls.io/github/antchfx/xpath?branch=master)
|
||||||
[](https://travis-ci.org/antchfx/xpath)
|
[](https://github.com/antchfx/xpath/actions/workflows/testing.yml)
|
||||||
[](https://goreportcard.com/report/github.com/antchfx/xpath)
|
[](https://goreportcard.com/report/github.com/antchfx/xpath)
|
||||||
|
|
||||||
XPath is Go package provides selecting nodes from XML, HTML or other documents using XPath expression.
|
XPath is Go package provides selecting nodes from XML, HTML or other documents using XPath expression.
|
||||||
|
|
||||||
Implementation
|
# Implementation
|
||||||
===
|
|
||||||
|
|
||||||
- [htmlquery](https://github.com/antchfx/htmlquery) - an XPath query package for HTML document
|
- [htmlquery](https://github.com/antchfx/htmlquery) - an XPath query package for HTML document
|
||||||
|
|
||||||
@ -16,8 +15,7 @@ Implementation
|
|||||||
|
|
||||||
- [jsonquery](https://github.com/antchfx/jsonquery) - an XPath query package for JSON document
|
- [jsonquery](https://github.com/antchfx/jsonquery) - an XPath query package for JSON document
|
||||||
|
|
||||||
Supported Features
|
# Supported Features
|
||||||
===
|
|
||||||
|
|
||||||
#### The basic XPath patterns.
|
#### The basic XPath patterns.
|
||||||
|
|
||||||
@ -57,16 +55,20 @@ Supported Features
|
|||||||
|
|
||||||
- `(a, b, c)` : Evaluates each of its operands and concatenates the resulting sequences, in order, into a single result sequence
|
- `(a, b, c)` : Evaluates each of its operands and concatenates the resulting sequences, in order, into a single result sequence
|
||||||
|
|
||||||
|
- `(a/b)` : Selects all matches nodes as grouping set.
|
||||||
|
|
||||||
#### Node Axes
|
#### Node Axes
|
||||||
|
|
||||||
- `child::*` : The child axis selects children of the current node.
|
- `child::*` : The child axis selects children of the current node.
|
||||||
|
|
||||||
|
- `child::node()`: Selects all the children of the context node.
|
||||||
|
- `child::text()`: Selects all text node children of the context node.
|
||||||
|
|
||||||
- `descendant::*` : The descendant axis selects descendants of the current node. It is equivalent to '//'.
|
- `descendant::*` : The descendant axis selects descendants of the current node. It is equivalent to '//'.
|
||||||
|
|
||||||
- `descendant-or-self::*` : Selects descendants including the current node.
|
- `descendant-or-self::*` : Selects descendants including the current node.
|
||||||
|
|
||||||
- `attribute::*` : Selects attributes of the current element. It is equivalent to @*
|
- `attribute::*` : Selects attributes of the current element. It is equivalent to @\*
|
||||||
|
|
||||||
- `following-sibling::*` : Selects nodes after the current node.
|
- `following-sibling::*` : Selects nodes after the current node.
|
||||||
|
|
||||||
@ -86,27 +88,27 @@ Supported Features
|
|||||||
|
|
||||||
#### Expressions
|
#### Expressions
|
||||||
|
|
||||||
The gxpath supported three types: number, boolean, string.
|
The gxpath supported three types: number, boolean, string.
|
||||||
|
|
||||||
- `path` : Selects nodes based on the path.
|
- `path` : Selects nodes based on the path.
|
||||||
|
|
||||||
- `a = b` : Standard comparisons.
|
- `a = b` : Standard comparisons.
|
||||||
|
|
||||||
* a = b True if a equals b.
|
- `a = b` : True if a equals b.
|
||||||
* a != b True if a is not equal to b.
|
- `a != b` : True if a is not equal to b.
|
||||||
* a < b True if a is less than b.
|
- `a < b` : True if a is less than b.
|
||||||
* a <= b True if a is less than or equal to b.
|
- `a <= b` : True if a is less than or equal to b.
|
||||||
* a > b True if a is greater than b.
|
- `a > b` : True if a is greater than b.
|
||||||
* a >= b True if a is greater than or equal to b.
|
- `a >= b` : True if a is greater than or equal to b.
|
||||||
|
|
||||||
- `a + b` : Arithmetic expressions.
|
- `a + b` : Arithmetic expressions.
|
||||||
|
|
||||||
* `- a` Unary minus
|
- `- a` Unary minus
|
||||||
* a + b Add
|
- `a + b` : Addition
|
||||||
* a - b Substract
|
- `a - b` : Subtraction
|
||||||
* a * b Multiply
|
- `a * b` : Multiplication
|
||||||
* a div b Divide
|
- `a div b` : Division
|
||||||
* a mod b Floating point mod, like Java.
|
- `a mod b` : Modulus (division remainder)
|
||||||
|
|
||||||
- `a or b` : Boolean `or` operation.
|
- `a or b` : Boolean `or` operation.
|
||||||
|
|
||||||
@ -116,58 +118,50 @@ Supported Features
|
|||||||
|
|
||||||
- `fun(arg1, ..., argn)` : Function calls:
|
- `fun(arg1, ..., argn)` : Function calls:
|
||||||
|
|
||||||
| Function | Supported |
|
| Function | Supported |
|
||||||
| --- | --- |
|
| ----------------------- | --------- |
|
||||||
`boolean()`| ✓ |
|
| `boolean()` | ✓ |
|
||||||
`ceiling()`| ✓ |
|
| `ceiling()` | ✓ |
|
||||||
`choose()`| ✗ |
|
| `choose()` | ✗ |
|
||||||
`concat()`| ✓ |
|
| `concat()` | ✓ |
|
||||||
`contains()`| ✓ |
|
| `contains()` | ✓ |
|
||||||
`count()`| ✓ |
|
| `count()` | ✓ |
|
||||||
`current()`| ✗ |
|
| `current()` | ✗ |
|
||||||
`document()`| ✗ |
|
| `document()` | ✗ |
|
||||||
`element-available()`| ✗ |
|
| `element-available()` | ✗ |
|
||||||
`ends-with()`| ✓ |
|
| `ends-with()` | ✓ |
|
||||||
`false()`| ✓ |
|
| `false()` | ✓ |
|
||||||
`floor()`| ✓ |
|
| `floor()` | ✓ |
|
||||||
`format-number()`| ✗ |
|
| `format-number()` | ✗ |
|
||||||
`function-available()`| ✗ |
|
| `function-available()` | ✗ |
|
||||||
`generate-id()`| ✗ |
|
| `generate-id()` | ✗ |
|
||||||
`id()`| ✗ |
|
| `id()` | ✗ |
|
||||||
`key()`| ✗ |
|
| `key()` | ✗ |
|
||||||
`lang()`| ✗ |
|
| `lang()` | ✗ |
|
||||||
`last()`| ✓ |
|
| `last()` | ✓ |
|
||||||
`local-name()`| ✓ |
|
| `local-name()` | ✓ |
|
||||||
`matches()`| ✓ |
|
| `lower-case()`[^1] | ✓ |
|
||||||
`name()`| ✓ |
|
| `matches()` | ✓ |
|
||||||
`namespace-uri()`| ✓ |
|
| `name()` | ✓ |
|
||||||
`normalize-space()`| ✓ |
|
| `namespace-uri()` | ✓ |
|
||||||
`not()`| ✓ |
|
| `normalize-space()` | ✓ |
|
||||||
`number()`| ✓ |
|
| `not()` | ✓ |
|
||||||
`position()`| ✓ |
|
| `number()` | ✓ |
|
||||||
`replace()`| ✓ |
|
| `position()` | ✓ |
|
||||||
`reverse()`| ✓ |
|
| `replace()` | ✓ |
|
||||||
`round()`| ✓ |
|
| `reverse()` | ✓ |
|
||||||
`starts-with()`| ✓ |
|
| `round()` | ✓ |
|
||||||
`string()`| ✓ |
|
| `starts-with()` | ✓ |
|
||||||
`string-length()`| ✓ |
|
| `string()` | ✓ |
|
||||||
`substring()`| ✓ |
|
| `string-join()`[^1] | ✓ |
|
||||||
`substring-after()`| ✓ |
|
| `string-length()` | ✓ |
|
||||||
`substring-before()`| ✓ |
|
| `substring()` | ✓ |
|
||||||
`sum()`| ✓ |
|
| `substring-after()` | ✓ |
|
||||||
`system-property()`| ✗ |
|
| `substring-before()` | ✓ |
|
||||||
`translate()`| ✓ |
|
| `sum()` | ✓ |
|
||||||
`true()`| ✓ |
|
| `system-property()` | ✗ |
|
||||||
`unparsed-entity-url()` | ✗ |
|
| `translate()` | ✓ |
|
||||||
|
| `true()` | ✓ |
|
||||||
|
| `unparsed-entity-url()` | ✗ |
|
||||||
|
|
||||||
Changelogs
|
[^1]: XPath-2.0 expression
|
||||||
===
|
|
||||||
|
|
||||||
2019-03-19
|
|
||||||
- optimize XPath `|` operation performance. [#33](https://github.com/antchfx/xpath/issues/33). Tips: suggest split into multiple subquery if you have a lot of `|` operations.
|
|
||||||
|
|
||||||
2019-01-29
|
|
||||||
- improvement `normalize-space` function. [#32](https://github.com/antchfx/xpath/issues/32)
|
|
||||||
|
|
||||||
2018-12-07
|
|
||||||
- supports XPath 2.0 Sequence expressions. [#30](https://github.com/antchfx/xpath/pull/30) by [@minherz](https://github.com/minherz).
|
|
||||||
|
417
vendor/github.com/antchfx/xpath/build.go
generated
vendored
417
vendor/github.com/antchfx/xpath/build.go
generated
vendored
@ -7,15 +7,39 @@ import (
|
|||||||
|
|
||||||
type flag int
|
type flag int
|
||||||
|
|
||||||
const (
|
var flagsEnum = struct {
|
||||||
noneFlag flag = iota
|
None flag
|
||||||
filterFlag
|
SmartDesc flag
|
||||||
)
|
PosFilter flag
|
||||||
|
Filter flag
|
||||||
|
Condition flag
|
||||||
|
}{
|
||||||
|
None: 0,
|
||||||
|
SmartDesc: 1,
|
||||||
|
PosFilter: 2,
|
||||||
|
Filter: 4,
|
||||||
|
Condition: 8,
|
||||||
|
}
|
||||||
|
|
||||||
|
type builderProp int
|
||||||
|
|
||||||
|
var builderProps = struct {
|
||||||
|
None builderProp
|
||||||
|
PosFilter builderProp
|
||||||
|
HasPosition builderProp
|
||||||
|
HasLast builderProp
|
||||||
|
NonFlat builderProp
|
||||||
|
}{
|
||||||
|
None: 0,
|
||||||
|
PosFilter: 1,
|
||||||
|
HasPosition: 2,
|
||||||
|
HasLast: 4,
|
||||||
|
NonFlat: 8,
|
||||||
|
}
|
||||||
|
|
||||||
// builder provides building an XPath expressions.
|
// builder provides building an XPath expressions.
|
||||||
type builder struct {
|
type builder struct {
|
||||||
depth int
|
parseDepth int
|
||||||
flag flag
|
|
||||||
firstInput query
|
firstInput query
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,8 +66,14 @@ func axisPredicate(root *axisNode) func(NodeNavigator) bool {
|
|||||||
}
|
}
|
||||||
nametest := root.LocalName != "" || root.Prefix != ""
|
nametest := root.LocalName != "" || root.Prefix != ""
|
||||||
predicate := func(n NodeNavigator) bool {
|
predicate := func(n NodeNavigator) bool {
|
||||||
if typ == n.NodeType() || typ == allNode || typ == TextNode {
|
if typ == n.NodeType() || typ == allNode {
|
||||||
if nametest {
|
if nametest {
|
||||||
|
type namespaceURL interface {
|
||||||
|
NamespaceURL() string
|
||||||
|
}
|
||||||
|
if ns, ok := n.(namespaceURL); ok && root.hasNamespaceURI {
|
||||||
|
return root.LocalName == n.LocalName() && root.namespaceURI == ns.NamespaceURL()
|
||||||
|
}
|
||||||
if root.LocalName == n.LocalName() && root.Prefix == n.Prefix() {
|
if root.LocalName == n.LocalName() && root.Prefix == n.Prefix() {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@ -57,23 +87,26 @@ func axisPredicate(root *axisNode) func(NodeNavigator) bool {
|
|||||||
return predicate
|
return predicate
|
||||||
}
|
}
|
||||||
|
|
||||||
// processAxisNode processes a query for the XPath axis node.
|
// processAxis processes a query for the XPath axis node.
|
||||||
func (b *builder) processAxisNode(root *axisNode) (query, error) {
|
func (b *builder) processAxis(root *axisNode, flags flag, props *builderProp) (query, error) {
|
||||||
var (
|
var (
|
||||||
err error
|
err error
|
||||||
qyInput query
|
qyInput query
|
||||||
qyOutput query
|
qyOutput query
|
||||||
predicate = axisPredicate(root)
|
|
||||||
)
|
)
|
||||||
|
b.firstInput = nil
|
||||||
|
predicate := axisPredicate(root)
|
||||||
|
|
||||||
if root.Input == nil {
|
if root.Input == nil {
|
||||||
qyInput = &contextQuery{}
|
qyInput = &contextQuery{}
|
||||||
|
*props = builderProps.None
|
||||||
} else {
|
} else {
|
||||||
|
inputFlags := flagsEnum.None
|
||||||
if root.AxeType == "child" && (root.Input.Type() == nodeAxis) {
|
if root.AxeType == "child" && (root.Input.Type() == nodeAxis) {
|
||||||
if input := root.Input.(*axisNode); input.AxeType == "descendant-or-self" {
|
if input := root.Input.(*axisNode); input.AxeType == "descendant-or-self" {
|
||||||
var qyGrandInput query
|
var qyGrandInput query
|
||||||
if input.Input != nil {
|
if input.Input != nil {
|
||||||
qyGrandInput, _ = b.processNode(input.Input)
|
qyGrandInput, _ = b.processNode(input.Input, flagsEnum.SmartDesc, props)
|
||||||
} else {
|
} else {
|
||||||
qyGrandInput = &contextQuery{}
|
qyGrandInput = &contextQuery{}
|
||||||
}
|
}
|
||||||
@ -88,11 +121,14 @@ func (b *builder) processAxisNode(root *axisNode) (query, error) {
|
|||||||
}
|
}
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
qyOutput = &descendantQuery{Input: qyGrandInput, Predicate: filter, Self: true}
|
qyOutput = &descendantQuery{name: root.LocalName, Input: qyGrandInput, Predicate: filter, Self: false}
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
return qyOutput, nil
|
return qyOutput, nil
|
||||||
}
|
}
|
||||||
|
} else if ((flags & flagsEnum.Filter) == 0) && (root.AxeType == "descendant" || root.AxeType == "descendant-or-self") {
|
||||||
|
inputFlags |= flagsEnum.SmartDesc
|
||||||
}
|
}
|
||||||
qyInput, err = b.processNode(root.Input)
|
qyInput, err = b.processNode(root.Input, inputFlags, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -100,11 +136,13 @@ func (b *builder) processAxisNode(root *axisNode) (query, error) {
|
|||||||
|
|
||||||
switch root.AxeType {
|
switch root.AxeType {
|
||||||
case "ancestor":
|
case "ancestor":
|
||||||
qyOutput = &ancestorQuery{Input: qyInput, Predicate: predicate}
|
qyOutput = &ancestorQuery{name: root.LocalName, Input: qyInput, Predicate: predicate}
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
case "ancestor-or-self":
|
case "ancestor-or-self":
|
||||||
qyOutput = &ancestorQuery{Input: qyInput, Predicate: predicate, Self: true}
|
qyOutput = &ancestorQuery{name: root.LocalName, Input: qyInput, Predicate: predicate, Self: true}
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
case "attribute":
|
case "attribute":
|
||||||
qyOutput = &attributeQuery{Input: qyInput, Predicate: predicate}
|
qyOutput = &attributeQuery{name: root.LocalName, Input: qyInput, Predicate: predicate}
|
||||||
case "child":
|
case "child":
|
||||||
filter := func(n NodeNavigator) bool {
|
filter := func(n NodeNavigator) bool {
|
||||||
v := predicate(n)
|
v := predicate(n)
|
||||||
@ -118,19 +156,35 @@ func (b *builder) processAxisNode(root *axisNode) (query, error) {
|
|||||||
}
|
}
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
qyOutput = &childQuery{Input: qyInput, Predicate: filter}
|
if (*props & builderProps.NonFlat) == 0 {
|
||||||
|
qyOutput = &childQuery{name: root.LocalName, Input: qyInput, Predicate: filter}
|
||||||
|
} else {
|
||||||
|
qyOutput = &cachedChildQuery{name: root.LocalName, Input: qyInput, Predicate: filter}
|
||||||
|
}
|
||||||
case "descendant":
|
case "descendant":
|
||||||
qyOutput = &descendantQuery{Input: qyInput, Predicate: predicate}
|
if (flags & flagsEnum.SmartDesc) != flagsEnum.None {
|
||||||
|
qyOutput = &descendantOverDescendantQuery{name: root.LocalName, Input: qyInput, MatchSelf: false, Predicate: predicate}
|
||||||
|
} else {
|
||||||
|
qyOutput = &descendantQuery{name: root.LocalName, Input: qyInput, Predicate: predicate}
|
||||||
|
}
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
case "descendant-or-self":
|
case "descendant-or-self":
|
||||||
qyOutput = &descendantQuery{Input: qyInput, Predicate: predicate, Self: true}
|
if (flags & flagsEnum.SmartDesc) != flagsEnum.None {
|
||||||
|
qyOutput = &descendantOverDescendantQuery{name: root.LocalName, Input: qyInput, MatchSelf: true, Predicate: predicate}
|
||||||
|
} else {
|
||||||
|
qyOutput = &descendantQuery{name: root.LocalName, Input: qyInput, Predicate: predicate, Self: true}
|
||||||
|
}
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
case "following":
|
case "following":
|
||||||
qyOutput = &followingQuery{Input: qyInput, Predicate: predicate}
|
qyOutput = &followingQuery{Input: qyInput, Predicate: predicate}
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
case "following-sibling":
|
case "following-sibling":
|
||||||
qyOutput = &followingQuery{Input: qyInput, Predicate: predicate, Sibling: true}
|
qyOutput = &followingQuery{Input: qyInput, Predicate: predicate, Sibling: true}
|
||||||
case "parent":
|
case "parent":
|
||||||
qyOutput = &parentQuery{Input: qyInput, Predicate: predicate}
|
qyOutput = &parentQuery{Input: qyInput, Predicate: predicate}
|
||||||
case "preceding":
|
case "preceding":
|
||||||
qyOutput = &precedingQuery{Input: qyInput, Predicate: predicate}
|
qyOutput = &precedingQuery{Input: qyInput, Predicate: predicate}
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
case "preceding-sibling":
|
case "preceding-sibling":
|
||||||
qyOutput = &precedingQuery{Input: qyInput, Predicate: predicate, Sibling: true}
|
qyOutput = &precedingQuery{Input: qyInput, Predicate: predicate, Sibling: true}
|
||||||
case "self":
|
case "self":
|
||||||
@ -144,56 +198,182 @@ func (b *builder) processAxisNode(root *axisNode) (query, error) {
|
|||||||
return qyOutput, nil
|
return qyOutput, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// processFilterNode builds query for the XPath filter predicate.
|
func canBeNumber(q query) bool {
|
||||||
func (b *builder) processFilterNode(root *filterNode) (query, error) {
|
if q.ValueType() != xpathResultType.Any {
|
||||||
b.flag |= filterFlag
|
return q.ValueType() == xpathResultType.Number
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
qyInput, err := b.processNode(root.Input)
|
// processFilterNode builds query for the XPath filter predicate.
|
||||||
|
func (b *builder) processFilter(root *filterNode, flags flag, props *builderProp) (query, error) {
|
||||||
|
first := (flags & flagsEnum.Filter) == 0
|
||||||
|
|
||||||
|
qyInput, err := b.processNode(root.Input, (flags | flagsEnum.Filter), props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyCond, err := b.processNode(root.Condition)
|
firstInput := b.firstInput
|
||||||
|
|
||||||
|
var propsCond builderProp
|
||||||
|
cond, err := b.processNode(root.Condition, flags, &propsCond)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput := &filterQuery{Input: qyInput, Predicate: qyCond}
|
|
||||||
return qyOutput, nil
|
// Checking whether is number
|
||||||
|
if canBeNumber(cond) || ((propsCond & (builderProps.HasPosition | builderProps.HasLast)) != 0) {
|
||||||
|
propsCond |= builderProps.HasPosition
|
||||||
|
flags |= flagsEnum.PosFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
if root.Input.Type() != nodeFilter {
|
||||||
|
*props &= ^builderProps.PosFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
if (propsCond & builderProps.HasPosition) != 0 {
|
||||||
|
*props |= builderProps.PosFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
merge := (qyInput.Properties() & queryProps.Merge) != 0
|
||||||
|
if (propsCond & builderProps.HasPosition) != builderProps.None {
|
||||||
|
if (propsCond & builderProps.HasLast) != 0 {
|
||||||
|
// https://github.com/antchfx/xpath/issues/76
|
||||||
|
// https://github.com/antchfx/xpath/issues/78
|
||||||
|
if qyFunc, ok := cond.(*functionQuery); ok {
|
||||||
|
switch qyFunc.Input.(type) {
|
||||||
|
case *filterQuery:
|
||||||
|
cond = &lastQuery{Input: qyFunc.Input}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if first && firstInput != nil {
|
||||||
|
if merge && ((*props & builderProps.PosFilter) != 0) {
|
||||||
|
qyInput = &filterQuery{Input: qyInput, Predicate: cond, NoPosition: false}
|
||||||
|
|
||||||
|
var (
|
||||||
|
rootQuery = &contextQuery{}
|
||||||
|
parent query
|
||||||
|
)
|
||||||
|
switch axisQuery := firstInput.(type) {
|
||||||
|
case *ancestorQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *attributeQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *childQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *cachedChildQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *descendantQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *followingQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *precedingQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *parentQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *selfQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *groupQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
case *descendantOverDescendantQuery:
|
||||||
|
if _, ok := axisQuery.Input.(*contextQuery); !ok {
|
||||||
|
parent = axisQuery.Input
|
||||||
|
axisQuery.Input = rootQuery
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b.firstInput = nil
|
||||||
|
if parent != nil {
|
||||||
|
return &mergeQuery{Input: parent, Child: qyInput}, nil
|
||||||
|
}
|
||||||
|
return qyInput, nil
|
||||||
|
}
|
||||||
|
b.firstInput = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
resultQuery := &filterQuery{
|
||||||
|
Input: qyInput,
|
||||||
|
Predicate: cond,
|
||||||
|
NoPosition: (propsCond & builderProps.HasPosition) == 0,
|
||||||
|
}
|
||||||
|
return resultQuery, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// processFunctionNode processes query for the XPath function node.
|
// processFunctionNode processes query for the XPath function node.
|
||||||
func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
func (b *builder) processFunction(root *functionNode, props *builderProp) (query, error) {
|
||||||
|
// Reset builder props
|
||||||
|
*props = builderProps.None
|
||||||
|
|
||||||
var qyOutput query
|
var qyOutput query
|
||||||
switch root.FuncName {
|
switch root.FuncName {
|
||||||
|
case "lower-case":
|
||||||
|
arg, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
qyOutput = &functionQuery{Input: arg, Func: lowerCaseFunc}
|
||||||
case "starts-with":
|
case "starts-with":
|
||||||
arg1, err := b.processNode(root.Args[0])
|
arg1, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
arg2, err := b.processNode(root.Args[1])
|
arg2, err := b.processNode(root.Args[1], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: startwithFunc(arg1, arg2)}
|
qyOutput = &functionQuery{Func: startwithFunc(arg1, arg2)}
|
||||||
case "ends-with":
|
case "ends-with":
|
||||||
arg1, err := b.processNode(root.Args[0])
|
arg1, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
arg2, err := b.processNode(root.Args[1])
|
arg2, err := b.processNode(root.Args[1], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: endwithFunc(arg1, arg2)}
|
qyOutput = &functionQuery{Func: endwithFunc(arg1, arg2)}
|
||||||
case "contains":
|
case "contains":
|
||||||
arg1, err := b.processNode(root.Args[0])
|
arg1, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
arg2, err := b.processNode(root.Args[1])
|
arg2, err := b.processNode(root.Args[1], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: containsFunc(arg1, arg2)}
|
qyOutput = &functionQuery{Func: containsFunc(arg1, arg2)}
|
||||||
case "matches":
|
case "matches":
|
||||||
//matches(string , pattern)
|
//matches(string , pattern)
|
||||||
if len(root.Args) != 2 {
|
if len(root.Args) != 2 {
|
||||||
@ -203,13 +383,19 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
arg1, arg2 query
|
arg1, arg2 query
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
if arg1, err = b.processNode(root.Args[0]); err != nil {
|
if arg1, err = b.processNode(root.Args[0], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if arg2, err = b.processNode(root.Args[1]); err != nil {
|
if arg2, err = b.processNode(root.Args[1], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: matchesFunc(arg1, arg2)}
|
// Issue #92, testing the regular expression before.
|
||||||
|
if q, ok := arg2.(*constantQuery); ok {
|
||||||
|
if _, err = getRegexp(q.Val.(string)); err != nil {
|
||||||
|
return nil, fmt.Errorf("matches() got error. %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
qyOutput = &functionQuery{Func: matchesFunc(arg1, arg2)}
|
||||||
case "substring":
|
case "substring":
|
||||||
//substring( string , start [, length] )
|
//substring( string , start [, length] )
|
||||||
if len(root.Args) < 2 {
|
if len(root.Args) < 2 {
|
||||||
@ -219,18 +405,18 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
arg1, arg2, arg3 query
|
arg1, arg2, arg3 query
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
if arg1, err = b.processNode(root.Args[0]); err != nil {
|
if arg1, err = b.processNode(root.Args[0], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if arg2, err = b.processNode(root.Args[1]); err != nil {
|
if arg2, err = b.processNode(root.Args[1], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if len(root.Args) == 3 {
|
if len(root.Args) == 3 {
|
||||||
if arg3, err = b.processNode(root.Args[2]); err != nil {
|
if arg3, err = b.processNode(root.Args[2], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: substringFunc(arg1, arg2, arg3)}
|
qyOutput = &functionQuery{Func: substringFunc(arg1, arg2, arg3)}
|
||||||
case "substring-before", "substring-after":
|
case "substring-before", "substring-after":
|
||||||
//substring-xxxx( haystack, needle )
|
//substring-xxxx( haystack, needle )
|
||||||
if len(root.Args) != 2 {
|
if len(root.Args) != 2 {
|
||||||
@ -240,31 +426,30 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
arg1, arg2 query
|
arg1, arg2 query
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
if arg1, err = b.processNode(root.Args[0]); err != nil {
|
if arg1, err = b.processNode(root.Args[0], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if arg2, err = b.processNode(root.Args[1]); err != nil {
|
if arg2, err = b.processNode(root.Args[1], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{
|
qyOutput = &functionQuery{
|
||||||
Input: b.firstInput,
|
Func: substringIndFunc(arg1, arg2, root.FuncName == "substring-after"),
|
||||||
Func: substringIndFunc(arg1, arg2, root.FuncName == "substring-after"),
|
|
||||||
}
|
}
|
||||||
case "string-length":
|
case "string-length":
|
||||||
// string-length( [string] )
|
// string-length( [string] )
|
||||||
if len(root.Args) < 1 {
|
if len(root.Args) < 1 {
|
||||||
return nil, errors.New("xpath: string-length function must have at least one parameter")
|
return nil, errors.New("xpath: string-length function must have at least one parameter")
|
||||||
}
|
}
|
||||||
arg1, err := b.processNode(root.Args[0])
|
arg1, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: stringLengthFunc(arg1)}
|
qyOutput = &functionQuery{Func: stringLengthFunc(arg1)}
|
||||||
case "normalize-space":
|
case "normalize-space":
|
||||||
if len(root.Args) == 0 {
|
if len(root.Args) == 0 {
|
||||||
return nil, errors.New("xpath: normalize-space function must have at least one parameter")
|
return nil, errors.New("xpath: normalize-space function must have at least one parameter")
|
||||||
}
|
}
|
||||||
argQuery, err := b.processNode(root.Args[0])
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -278,16 +463,16 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
arg1, arg2, arg3 query
|
arg1, arg2, arg3 query
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
if arg1, err = b.processNode(root.Args[0]); err != nil {
|
if arg1, err = b.processNode(root.Args[0], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if arg2, err = b.processNode(root.Args[1]); err != nil {
|
if arg2, err = b.processNode(root.Args[1], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if arg3, err = b.processNode(root.Args[2]); err != nil {
|
if arg3, err = b.processNode(root.Args[2], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: replaceFunc(arg1, arg2, arg3)}
|
qyOutput = &functionQuery{Func: replaceFunc(arg1, arg2, arg3)}
|
||||||
case "translate":
|
case "translate":
|
||||||
//translate( string , string, string )
|
//translate( string , string, string )
|
||||||
if len(root.Args) != 3 {
|
if len(root.Args) != 3 {
|
||||||
@ -297,21 +482,21 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
arg1, arg2, arg3 query
|
arg1, arg2, arg3 query
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
if arg1, err = b.processNode(root.Args[0]); err != nil {
|
if arg1, err = b.processNode(root.Args[0], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if arg2, err = b.processNode(root.Args[1]); err != nil {
|
if arg2, err = b.processNode(root.Args[1], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if arg3, err = b.processNode(root.Args[2]); err != nil {
|
if arg3, err = b.processNode(root.Args[2], flagsEnum.None, props); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: translateFunc(arg1, arg2, arg3)}
|
qyOutput = &functionQuery{Func: translateFunc(arg1, arg2, arg3)}
|
||||||
case "not":
|
case "not":
|
||||||
if len(root.Args) == 0 {
|
if len(root.Args) == 0 {
|
||||||
return nil, errors.New("xpath: not function must have at least one parameter")
|
return nil, errors.New("xpath: not function must have at least one parameter")
|
||||||
}
|
}
|
||||||
argQuery, err := b.processNode(root.Args[0])
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -325,38 +510,46 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
if len(root.Args) == 1 {
|
if len(root.Args) == 1 {
|
||||||
arg, err = b.processNode(root.Args[0])
|
arg, err = b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
switch root.FuncName {
|
switch root.FuncName {
|
||||||
case "name":
|
case "name":
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: nameFunc(arg)}
|
qyOutput = &functionQuery{Func: nameFunc(arg)}
|
||||||
case "local-name":
|
case "local-name":
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: localNameFunc(arg)}
|
qyOutput = &functionQuery{Func: localNameFunc(arg)}
|
||||||
case "namespace-uri":
|
case "namespace-uri":
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: namespaceFunc(arg)}
|
qyOutput = &functionQuery{Func: namespaceFunc(arg)}
|
||||||
}
|
}
|
||||||
case "true", "false":
|
case "true", "false":
|
||||||
val := root.FuncName == "true"
|
val := root.FuncName == "true"
|
||||||
qyOutput = &functionQuery{
|
qyOutput = &functionQuery{
|
||||||
Input: b.firstInput,
|
|
||||||
Func: func(_ query, _ iterator) interface{} {
|
Func: func(_ query, _ iterator) interface{} {
|
||||||
return val
|
return val
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
case "last":
|
case "last":
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: lastFunc}
|
//switch typ := b.firstInput.(type) {
|
||||||
|
//case *groupQuery, *filterQuery:
|
||||||
|
// https://github.com/antchfx/xpath/issues/76
|
||||||
|
// https://github.com/antchfx/xpath/issues/78
|
||||||
|
//qyOutput = &lastQuery{Input: typ}
|
||||||
|
//default:
|
||||||
|
qyOutput = &functionQuery{Func: lastFunc}
|
||||||
|
//}
|
||||||
|
*props |= builderProps.HasLast
|
||||||
case "position":
|
case "position":
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: positionFunc}
|
qyOutput = &functionQuery{Func: positionFunc}
|
||||||
|
*props |= builderProps.HasPosition
|
||||||
case "boolean", "number", "string":
|
case "boolean", "number", "string":
|
||||||
inp := b.firstInput
|
var inp query
|
||||||
if len(root.Args) > 1 {
|
if len(root.Args) > 1 {
|
||||||
return nil, fmt.Errorf("xpath: %s function must have at most one parameter", root.FuncName)
|
return nil, fmt.Errorf("xpath: %s function must have at most one parameter", root.FuncName)
|
||||||
}
|
}
|
||||||
if len(root.Args) == 1 {
|
if len(root.Args) == 1 {
|
||||||
argQuery, err := b.processNode(root.Args[0])
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -373,13 +566,10 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
}
|
}
|
||||||
qyOutput = f
|
qyOutput = f
|
||||||
case "count":
|
case "count":
|
||||||
//if b.firstInput == nil {
|
|
||||||
// return nil, errors.New("xpath: expression must evaluate to node-set")
|
|
||||||
//}
|
|
||||||
if len(root.Args) == 0 {
|
if len(root.Args) == 0 {
|
||||||
return nil, fmt.Errorf("xpath: count(node-sets) function must with have parameters node-sets")
|
return nil, fmt.Errorf("xpath: count(node-sets) function must with have parameters node-sets")
|
||||||
}
|
}
|
||||||
argQuery, err := b.processNode(root.Args[0])
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -388,7 +578,7 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
if len(root.Args) == 0 {
|
if len(root.Args) == 0 {
|
||||||
return nil, fmt.Errorf("xpath: sum(node-sets) function must with have parameters node-sets")
|
return nil, fmt.Errorf("xpath: sum(node-sets) function must with have parameters node-sets")
|
||||||
}
|
}
|
||||||
argQuery, err := b.processNode(root.Args[0])
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -397,7 +587,7 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
if len(root.Args) == 0 {
|
if len(root.Args) == 0 {
|
||||||
return nil, fmt.Errorf("xpath: ceiling(node-sets) function must with have parameters node-sets")
|
return nil, fmt.Errorf("xpath: ceiling(node-sets) function must with have parameters node-sets")
|
||||||
}
|
}
|
||||||
argQuery, err := b.processNode(root.Args[0])
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -417,41 +607,65 @@ func (b *builder) processFunctionNode(root *functionNode) (query, error) {
|
|||||||
}
|
}
|
||||||
var args []query
|
var args []query
|
||||||
for _, v := range root.Args {
|
for _, v := range root.Args {
|
||||||
q, err := b.processNode(v)
|
q, err := b.processNode(v, flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
args = append(args, q)
|
args = append(args, q)
|
||||||
}
|
}
|
||||||
qyOutput = &functionQuery{Input: b.firstInput, Func: concatFunc(args...)}
|
qyOutput = &functionQuery{Func: concatFunc(args...)}
|
||||||
case "reverse":
|
case "reverse":
|
||||||
if len(root.Args) == 0 {
|
if len(root.Args) == 0 {
|
||||||
return nil, fmt.Errorf("xpath: reverse(node-sets) function must with have parameters node-sets")
|
return nil, fmt.Errorf("xpath: reverse(node-sets) function must with have parameters node-sets")
|
||||||
}
|
}
|
||||||
argQuery, err := b.processNode(root.Args[0])
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
qyOutput = &transformFunctionQuery{Input: argQuery, Func: reverseFunc}
|
qyOutput = &transformFunctionQuery{Input: argQuery, Func: reverseFunc}
|
||||||
|
case "string-join":
|
||||||
|
if len(root.Args) != 2 {
|
||||||
|
return nil, fmt.Errorf("xpath: string-join(node-sets, separator) function requires node-set and argument")
|
||||||
|
}
|
||||||
|
argQuery, err := b.processNode(root.Args[0], flagsEnum.None, props)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
arg1, err := b.processNode(root.Args[1], flagsEnum.None, props)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
qyOutput = &functionQuery{Input: argQuery, Func: stringJoinFunc(arg1)}
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("not yet support this function %s()", root.FuncName)
|
return nil, fmt.Errorf("not yet support this function %s()", root.FuncName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if funcQuery, ok := qyOutput.(*functionQuery); ok && funcQuery.Input == nil {
|
||||||
|
funcQuery.Input = b.firstInput
|
||||||
|
}
|
||||||
return qyOutput, nil
|
return qyOutput, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *builder) processOperatorNode(root *operatorNode) (query, error) {
|
func (b *builder) processOperator(root *operatorNode, props *builderProp) (query, error) {
|
||||||
left, err := b.processNode(root.Left)
|
var (
|
||||||
|
leftProp builderProp
|
||||||
|
rightProp builderProp
|
||||||
|
)
|
||||||
|
|
||||||
|
left, err := b.processNode(root.Left, flagsEnum.None, &leftProp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
right, err := b.processNode(root.Right)
|
right, err := b.processNode(root.Right, flagsEnum.None, &rightProp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
*props = leftProp | rightProp
|
||||||
|
|
||||||
var qyOutput query
|
var qyOutput query
|
||||||
switch root.Op {
|
switch root.Op {
|
||||||
case "+", "-", "*", "div", "mod": // Numeric operator
|
case "+", "-", "*", "div", "mod": // Numeric operator
|
||||||
var exprFunc func(interface{}, interface{}) interface{}
|
var exprFunc func(iterator, interface{}, interface{}) interface{}
|
||||||
switch root.Op {
|
switch root.Op {
|
||||||
case "+":
|
case "+":
|
||||||
exprFunc = plusFunc
|
exprFunc = plusFunc
|
||||||
@ -489,38 +703,50 @@ func (b *builder) processOperatorNode(root *operatorNode) (query, error) {
|
|||||||
}
|
}
|
||||||
qyOutput = &booleanQuery{Left: left, Right: right, IsOr: isOr}
|
qyOutput = &booleanQuery{Left: left, Right: right, IsOr: isOr}
|
||||||
case "|":
|
case "|":
|
||||||
|
*props |= builderProps.NonFlat
|
||||||
qyOutput = &unionQuery{Left: left, Right: right}
|
qyOutput = &unionQuery{Left: left, Right: right}
|
||||||
}
|
}
|
||||||
return qyOutput, nil
|
return qyOutput, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *builder) processNode(root node) (q query, err error) {
|
func (b *builder) processNode(root node, flags flag, props *builderProp) (q query, err error) {
|
||||||
if b.depth = b.depth + 1; b.depth > 1024 {
|
if b.parseDepth = b.parseDepth + 1; b.parseDepth > 1024 {
|
||||||
err = errors.New("the xpath expressions is too complex")
|
err = errors.New("the xpath expressions is too complex")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
*props = builderProps.None
|
||||||
switch root.Type() {
|
switch root.Type() {
|
||||||
case nodeConstantOperand:
|
case nodeConstantOperand:
|
||||||
n := root.(*operandNode)
|
n := root.(*operandNode)
|
||||||
q = &constantQuery{Val: n.Val}
|
q = &constantQuery{Val: n.Val}
|
||||||
case nodeRoot:
|
case nodeRoot:
|
||||||
q = &contextQuery{Root: true}
|
q = &absoluteQuery{}
|
||||||
case nodeAxis:
|
case nodeAxis:
|
||||||
q, err = b.processAxisNode(root.(*axisNode))
|
q, err = b.processAxis(root.(*axisNode), flags, props)
|
||||||
b.firstInput = q
|
b.firstInput = q
|
||||||
case nodeFilter:
|
case nodeFilter:
|
||||||
q, err = b.processFilterNode(root.(*filterNode))
|
q, err = b.processFilter(root.(*filterNode), flags, props)
|
||||||
|
b.firstInput = q
|
||||||
case nodeFunction:
|
case nodeFunction:
|
||||||
q, err = b.processFunctionNode(root.(*functionNode))
|
q, err = b.processFunction(root.(*functionNode), props)
|
||||||
case nodeOperator:
|
case nodeOperator:
|
||||||
q, err = b.processOperatorNode(root.(*operatorNode))
|
q, err = b.processOperator(root.(*operatorNode), props)
|
||||||
|
case nodeGroup:
|
||||||
|
q, err = b.processNode(root.(*groupNode).Input, flagsEnum.None, props)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
q = &groupQuery{Input: q}
|
||||||
|
if b.firstInput == nil {
|
||||||
|
b.firstInput = q
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
b.parseDepth--
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// build builds a specified XPath expressions expr.
|
// build builds a specified XPath expressions expr.
|
||||||
func build(expr string) (q query, err error) {
|
func build(expr string, namespaces map[string]string) (q query, err error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
if e := recover(); e != nil {
|
if e := recover(); e != nil {
|
||||||
switch x := e.(type) {
|
switch x := e.(type) {
|
||||||
@ -533,7 +759,8 @@ func build(expr string) (q query, err error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
root := parse(expr)
|
root := parse(expr, namespaces)
|
||||||
b := &builder{}
|
b := &builder{}
|
||||||
return b.processNode(root)
|
props := builderProps.None
|
||||||
|
return b.processNode(root, flagsEnum.None, &props)
|
||||||
}
|
}
|
||||||
|
49
vendor/github.com/antchfx/xpath/func.go
generated
vendored
49
vendor/github.com/antchfx/xpath/func.go
generated
vendored
@ -113,7 +113,7 @@ func asNumber(t iterator, o interface{}) float64 {
|
|||||||
case query:
|
case query:
|
||||||
node := typ.Select(t)
|
node := typ.Select(t)
|
||||||
if node == nil {
|
if node == nil {
|
||||||
return float64(0)
|
return math.NaN()
|
||||||
}
|
}
|
||||||
if v, err := strconv.ParseFloat(node.Value(), 64); err == nil {
|
if v, err := strconv.ParseFloat(node.Value(), 64); err == nil {
|
||||||
return v
|
return v
|
||||||
@ -122,17 +122,19 @@ func asNumber(t iterator, o interface{}) float64 {
|
|||||||
return typ
|
return typ
|
||||||
case string:
|
case string:
|
||||||
v, err := strconv.ParseFloat(typ, 64)
|
v, err := strconv.ParseFloat(typ, 64)
|
||||||
if err != nil {
|
if err == nil {
|
||||||
panic(errors.New("ceiling() function argument type must be a node-set or number"))
|
return v
|
||||||
}
|
}
|
||||||
return v
|
|
||||||
}
|
}
|
||||||
return 0
|
return math.NaN()
|
||||||
}
|
}
|
||||||
|
|
||||||
// ceilingFunc is a XPath Node Set functions ceiling(node-set).
|
// ceilingFunc is a XPath Node Set functions ceiling(node-set).
|
||||||
func ceilingFunc(q query, t iterator) interface{} {
|
func ceilingFunc(q query, t iterator) interface{} {
|
||||||
val := asNumber(t, functionArgs(q).Evaluate(t))
|
val := asNumber(t, functionArgs(q).Evaluate(t))
|
||||||
|
// if math.IsNaN(val) {
|
||||||
|
// panic(errors.New("ceiling() function argument type must be a valid number"))
|
||||||
|
// }
|
||||||
return math.Ceil(val)
|
return math.Ceil(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -612,3 +614,40 @@ func reverseFunc(q query, t iterator) func() NodeNavigator {
|
|||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// string-join is a XPath Node Set functions string-join(node-set, separator).
|
||||||
|
func stringJoinFunc(arg1 query) func(query, iterator) interface{} {
|
||||||
|
return func(q query, t iterator) interface{} {
|
||||||
|
var separator string
|
||||||
|
switch v := functionArgs(arg1).Evaluate(t).(type) {
|
||||||
|
case string:
|
||||||
|
separator = v
|
||||||
|
case query:
|
||||||
|
node := v.Select(t)
|
||||||
|
if node != nil {
|
||||||
|
separator = node.Value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
q = functionArgs(q)
|
||||||
|
test := predicate(q)
|
||||||
|
var parts []string
|
||||||
|
switch v := q.Evaluate(t).(type) {
|
||||||
|
case string:
|
||||||
|
return v
|
||||||
|
case query:
|
||||||
|
for node := v.Select(t); node != nil; node = v.Select(t) {
|
||||||
|
if test(node) {
|
||||||
|
parts = append(parts, node.Value())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(parts, separator)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// lower-case is XPATH function that converts a string to lower case.
|
||||||
|
func lowerCaseFunc(q query, t iterator) interface{} {
|
||||||
|
v := functionArgs(q).Evaluate(t)
|
||||||
|
return strings.ToLower(asString(t, v))
|
||||||
|
}
|
||||||
|
2
vendor/github.com/antchfx/xpath/func_go110.go
generated
vendored
2
vendor/github.com/antchfx/xpath/func_go110.go
generated
vendored
@ -11,6 +11,6 @@ func round(f float64) int {
|
|||||||
return int(math.Round(f))
|
return int(math.Round(f))
|
||||||
}
|
}
|
||||||
|
|
||||||
func newStringBuilder() stringBuilder{
|
func newStringBuilder() stringBuilder {
|
||||||
return &strings.Builder{}
|
return &strings.Builder{}
|
||||||
}
|
}
|
||||||
|
3
vendor/github.com/antchfx/xpath/go.mod
generated
vendored
3
vendor/github.com/antchfx/xpath/go.mod
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
module github.com/antchfx/xpath
|
|
||||||
|
|
||||||
go 1.14
|
|
115
vendor/github.com/antchfx/xpath/operator.go
generated
vendored
115
vendor/github.com/antchfx/xpath/operator.go
generated
vendored
@ -1,40 +1,11 @@
|
|||||||
package xpath
|
package xpath
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
// The XPath number operator function list.
|
// The XPath number operator function list.
|
||||||
|
|
||||||
// valueType is a return value type.
|
|
||||||
type valueType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
booleanType valueType = iota
|
|
||||||
numberType
|
|
||||||
stringType
|
|
||||||
nodeSetType
|
|
||||||
)
|
|
||||||
|
|
||||||
func getValueType(i interface{}) valueType {
|
|
||||||
v := reflect.ValueOf(i)
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Float64:
|
|
||||||
return numberType
|
|
||||||
case reflect.String:
|
|
||||||
return stringType
|
|
||||||
case reflect.Bool:
|
|
||||||
return booleanType
|
|
||||||
default:
|
|
||||||
if _, ok := i.(query); ok {
|
|
||||||
return nodeSetType
|
|
||||||
}
|
|
||||||
}
|
|
||||||
panic(fmt.Errorf("xpath unknown value type: %v", v.Kind()))
|
|
||||||
}
|
|
||||||
|
|
||||||
type logical func(iterator, string, interface{}, interface{}) bool
|
type logical func(iterator, string, interface{}, interface{}) bool
|
||||||
|
|
||||||
var logicalFuncs = [][]logical{
|
var logicalFuncs = [][]logical{
|
||||||
@ -165,15 +136,28 @@ func cmpNodeSetString(t iterator, op string, m, n interface{}) bool {
|
|||||||
func cmpNodeSetNodeSet(t iterator, op string, m, n interface{}) bool {
|
func cmpNodeSetNodeSet(t iterator, op string, m, n interface{}) bool {
|
||||||
a := m.(query)
|
a := m.(query)
|
||||||
b := n.(query)
|
b := n.(query)
|
||||||
x := a.Select(t)
|
for {
|
||||||
if x == nil {
|
x := a.Select(t)
|
||||||
return false
|
if x == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
y := b.Select(t)
|
||||||
|
if y == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
if cmpStringStringF(op, x.Value(), y.Value()) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if y = b.Select(t); y == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// reset
|
||||||
|
b.Evaluate(t)
|
||||||
}
|
}
|
||||||
y := b.Select(t)
|
|
||||||
if y == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return cmpStringStringF(op, x.Value(), y.Value())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func cmpStringNumeric(t iterator, op string, m, n interface{}) bool {
|
func cmpStringNumeric(t iterator, op string, m, n interface{}) bool {
|
||||||
@ -215,91 +199,90 @@ func cmpBooleanBoolean(t iterator, op string, m, n interface{}) bool {
|
|||||||
|
|
||||||
// eqFunc is an `=` operator.
|
// eqFunc is an `=` operator.
|
||||||
func eqFunc(t iterator, m, n interface{}) interface{} {
|
func eqFunc(t iterator, m, n interface{}) interface{} {
|
||||||
t1 := getValueType(m)
|
t1 := getXPathType(m)
|
||||||
t2 := getValueType(n)
|
t2 := getXPathType(n)
|
||||||
return logicalFuncs[t1][t2](t, "=", m, n)
|
return logicalFuncs[t1][t2](t, "=", m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// gtFunc is an `>` operator.
|
// gtFunc is an `>` operator.
|
||||||
func gtFunc(t iterator, m, n interface{}) interface{} {
|
func gtFunc(t iterator, m, n interface{}) interface{} {
|
||||||
t1 := getValueType(m)
|
t1 := getXPathType(m)
|
||||||
t2 := getValueType(n)
|
t2 := getXPathType(n)
|
||||||
return logicalFuncs[t1][t2](t, ">", m, n)
|
return logicalFuncs[t1][t2](t, ">", m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// geFunc is an `>=` operator.
|
// geFunc is an `>=` operator.
|
||||||
func geFunc(t iterator, m, n interface{}) interface{} {
|
func geFunc(t iterator, m, n interface{}) interface{} {
|
||||||
t1 := getValueType(m)
|
t1 := getXPathType(m)
|
||||||
t2 := getValueType(n)
|
t2 := getXPathType(n)
|
||||||
return logicalFuncs[t1][t2](t, ">=", m, n)
|
return logicalFuncs[t1][t2](t, ">=", m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ltFunc is an `<` operator.
|
// ltFunc is an `<` operator.
|
||||||
func ltFunc(t iterator, m, n interface{}) interface{} {
|
func ltFunc(t iterator, m, n interface{}) interface{} {
|
||||||
t1 := getValueType(m)
|
t1 := getXPathType(m)
|
||||||
t2 := getValueType(n)
|
t2 := getXPathType(n)
|
||||||
return logicalFuncs[t1][t2](t, "<", m, n)
|
return logicalFuncs[t1][t2](t, "<", m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// leFunc is an `<=` operator.
|
// leFunc is an `<=` operator.
|
||||||
func leFunc(t iterator, m, n interface{}) interface{} {
|
func leFunc(t iterator, m, n interface{}) interface{} {
|
||||||
t1 := getValueType(m)
|
t1 := getXPathType(m)
|
||||||
t2 := getValueType(n)
|
t2 := getXPathType(n)
|
||||||
return logicalFuncs[t1][t2](t, "<=", m, n)
|
return logicalFuncs[t1][t2](t, "<=", m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// neFunc is an `!=` operator.
|
// neFunc is an `!=` operator.
|
||||||
func neFunc(t iterator, m, n interface{}) interface{} {
|
func neFunc(t iterator, m, n interface{}) interface{} {
|
||||||
t1 := getValueType(m)
|
t1 := getXPathType(m)
|
||||||
t2 := getValueType(n)
|
t2 := getXPathType(n)
|
||||||
return logicalFuncs[t1][t2](t, "!=", m, n)
|
return logicalFuncs[t1][t2](t, "!=", m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// orFunc is an `or` operator.
|
// orFunc is an `or` operator.
|
||||||
var orFunc = func(t iterator, m, n interface{}) interface{} {
|
var orFunc = func(t iterator, m, n interface{}) interface{} {
|
||||||
t1 := getValueType(m)
|
t1 := getXPathType(m)
|
||||||
t2 := getValueType(n)
|
t2 := getXPathType(n)
|
||||||
return logicalFuncs[t1][t2](t, "or", m, n)
|
return logicalFuncs[t1][t2](t, "or", m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func numericExpr(m, n interface{}, cb func(float64, float64) float64) float64 {
|
func numericExpr(t iterator, m, n interface{}, cb func(float64, float64) float64) float64 {
|
||||||
typ := reflect.TypeOf(float64(0))
|
a := asNumber(t, m)
|
||||||
a := reflect.ValueOf(m).Convert(typ)
|
b := asNumber(t, n)
|
||||||
b := reflect.ValueOf(n).Convert(typ)
|
return cb(a, b)
|
||||||
return cb(a.Float(), b.Float())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// plusFunc is an `+` operator.
|
// plusFunc is an `+` operator.
|
||||||
var plusFunc = func(m, n interface{}) interface{} {
|
var plusFunc = func(t iterator, m, n interface{}) interface{} {
|
||||||
return numericExpr(m, n, func(a, b float64) float64 {
|
return numericExpr(t, m, n, func(a, b float64) float64 {
|
||||||
return a + b
|
return a + b
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// minusFunc is an `-` operator.
|
// minusFunc is an `-` operator.
|
||||||
var minusFunc = func(m, n interface{}) interface{} {
|
var minusFunc = func(t iterator, m, n interface{}) interface{} {
|
||||||
return numericExpr(m, n, func(a, b float64) float64 {
|
return numericExpr(t, m, n, func(a, b float64) float64 {
|
||||||
return a - b
|
return a - b
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// mulFunc is an `*` operator.
|
// mulFunc is an `*` operator.
|
||||||
var mulFunc = func(m, n interface{}) interface{} {
|
var mulFunc = func(t iterator, m, n interface{}) interface{} {
|
||||||
return numericExpr(m, n, func(a, b float64) float64 {
|
return numericExpr(t, m, n, func(a, b float64) float64 {
|
||||||
return a * b
|
return a * b
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// divFunc is an `DIV` operator.
|
// divFunc is an `DIV` operator.
|
||||||
var divFunc = func(m, n interface{}) interface{} {
|
var divFunc = func(t iterator, m, n interface{}) interface{} {
|
||||||
return numericExpr(m, n, func(a, b float64) float64 {
|
return numericExpr(t, m, n, func(a, b float64) float64 {
|
||||||
return a / b
|
return a / b
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// modFunc is an 'MOD' operator.
|
// modFunc is an 'MOD' operator.
|
||||||
var modFunc = func(m, n interface{}) interface{} {
|
var modFunc = func(t iterator, m, n interface{}) interface{} {
|
||||||
return numericExpr(m, n, func(a, b float64) float64 {
|
return numericExpr(t, m, n, func(a, b float64) float64 {
|
||||||
return float64(int(a) % int(b))
|
return float64(int(a) % int(b))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
58
vendor/github.com/antchfx/xpath/parse.go
generated
vendored
58
vendor/github.com/antchfx/xpath/parse.go
generated
vendored
@ -65,11 +65,13 @@ const (
|
|||||||
nodeOperator
|
nodeOperator
|
||||||
nodeVariable
|
nodeVariable
|
||||||
nodeConstantOperand
|
nodeConstantOperand
|
||||||
|
nodeGroup
|
||||||
)
|
)
|
||||||
|
|
||||||
type parser struct {
|
type parser struct {
|
||||||
r *scanner
|
r *scanner
|
||||||
d int
|
d int
|
||||||
|
namespaces map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
// newOperatorNode returns new operator node OperatorNode.
|
// newOperatorNode returns new operator node OperatorNode.
|
||||||
@ -83,8 +85,8 @@ func newOperandNode(v interface{}) node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// newAxisNode returns new axis node AxisNode.
|
// newAxisNode returns new axis node AxisNode.
|
||||||
func newAxisNode(axeTyp, localName, prefix, prop string, n node) node {
|
func newAxisNode(axeTyp, localName, prefix, prop string, n node, opts ...func(p *axisNode)) node {
|
||||||
return &axisNode{
|
a := axisNode{
|
||||||
nodeType: nodeAxis,
|
nodeType: nodeAxis,
|
||||||
LocalName: localName,
|
LocalName: localName,
|
||||||
Prefix: prefix,
|
Prefix: prefix,
|
||||||
@ -92,6 +94,10 @@ func newAxisNode(axeTyp, localName, prefix, prop string, n node) node {
|
|||||||
Prop: prop,
|
Prop: prop,
|
||||||
Input: n,
|
Input: n,
|
||||||
}
|
}
|
||||||
|
for _, o := range opts {
|
||||||
|
o(&a)
|
||||||
|
}
|
||||||
|
return &a
|
||||||
}
|
}
|
||||||
|
|
||||||
// newVariableNode returns new variable node VariableNode.
|
// newVariableNode returns new variable node VariableNode.
|
||||||
@ -104,6 +110,10 @@ func newFilterNode(n, m node) node {
|
|||||||
return &filterNode{nodeType: nodeFilter, Input: n, Condition: m}
|
return &filterNode{nodeType: nodeFilter, Input: n, Condition: m}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newGroupNode(n node) node {
|
||||||
|
return &groupNode{nodeType: nodeGroup, Input: n}
|
||||||
|
}
|
||||||
|
|
||||||
// newRootNode returns a root node.
|
// newRootNode returns a root node.
|
||||||
func newRootNode(s string) node {
|
func newRootNode(s string) node {
|
||||||
return &rootNode{nodeType: nodeRoot, slash: s}
|
return &rootNode{nodeType: nodeRoot, slash: s}
|
||||||
@ -464,7 +474,16 @@ func (p *parser) parseNodeTest(n node, axeTyp string) (opnd node) {
|
|||||||
if p.r.name == "*" {
|
if p.r.name == "*" {
|
||||||
name = ""
|
name = ""
|
||||||
}
|
}
|
||||||
opnd = newAxisNode(axeTyp, name, prefix, "", n)
|
opnd = newAxisNode(axeTyp, name, prefix, "", n, func(a *axisNode) {
|
||||||
|
if prefix != "" && p.namespaces != nil {
|
||||||
|
if ns, ok := p.namespaces[prefix]; ok {
|
||||||
|
a.hasNamespaceURI = true
|
||||||
|
a.namespaceURI = ns
|
||||||
|
} else {
|
||||||
|
panic(fmt.Sprintf("prefix %s not defined.", prefix))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
case itemStar:
|
case itemStar:
|
||||||
opnd = newAxisNode(axeTyp, "", "", "", n)
|
opnd = newAxisNode(axeTyp, "", "", "", n)
|
||||||
@ -492,6 +511,9 @@ func (p *parser) parsePrimaryExpr(n node) (opnd node) {
|
|||||||
case itemLParens:
|
case itemLParens:
|
||||||
p.next()
|
p.next()
|
||||||
opnd = p.parseExpression(n)
|
opnd = p.parseExpression(n)
|
||||||
|
if opnd.Type() != nodeConstantOperand {
|
||||||
|
opnd = newGroupNode(opnd)
|
||||||
|
}
|
||||||
p.skipItem(itemRParens)
|
p.skipItem(itemRParens)
|
||||||
case itemName:
|
case itemName:
|
||||||
if p.r.canBeFunc && !isNodeType(p.r) {
|
if p.r.canBeFunc && !isNodeType(p.r) {
|
||||||
@ -523,11 +545,11 @@ func (p *parser) parseMethod(n node) node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Parse parsing the XPath express string expr and returns a tree node.
|
// Parse parsing the XPath express string expr and returns a tree node.
|
||||||
func parse(expr string) node {
|
func parse(expr string, namespaces map[string]string) node {
|
||||||
r := &scanner{text: expr}
|
r := &scanner{text: expr}
|
||||||
r.nextChar()
|
r.nextChar()
|
||||||
r.nextItem()
|
r.nextItem()
|
||||||
p := &parser{r: r}
|
p := &parser{r: r, namespaces: namespaces}
|
||||||
return p.parseExpression(nil)
|
return p.parseExpression(nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -555,11 +577,13 @@ func (o *operatorNode) String() string {
|
|||||||
// axisNode holds a location step.
|
// axisNode holds a location step.
|
||||||
type axisNode struct {
|
type axisNode struct {
|
||||||
nodeType
|
nodeType
|
||||||
Input node
|
Input node
|
||||||
Prop string // node-test name.[comment|text|processing-instruction|node]
|
Prop string // node-test name.[comment|text|processing-instruction|node]
|
||||||
AxeType string // name of the axes.[attribute|ancestor|child|....]
|
AxeType string // name of the axes.[attribute|ancestor|child|....]
|
||||||
LocalName string // local part name of node.
|
LocalName string // local part name of node.
|
||||||
Prefix string // prefix name of node.
|
Prefix string // prefix name of node.
|
||||||
|
namespaceURI string // namespace URI of node
|
||||||
|
hasNamespaceURI bool // if namespace URI is set (can be "")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *axisNode) String() string {
|
func (a *axisNode) String() string {
|
||||||
@ -587,6 +611,16 @@ func (o *operandNode) String() string {
|
|||||||
return fmt.Sprintf("%v", o.Val)
|
return fmt.Sprintf("%v", o.Val)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// groupNode holds a set of node expression
|
||||||
|
type groupNode struct {
|
||||||
|
nodeType
|
||||||
|
Input node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *groupNode) String() string {
|
||||||
|
return fmt.Sprintf("%s", g.Input)
|
||||||
|
}
|
||||||
|
|
||||||
// filterNode holds a condition filter.
|
// filterNode holds a condition filter.
|
||||||
type filterNode struct {
|
type filterNode struct {
|
||||||
nodeType
|
nodeType
|
||||||
|
595
vendor/github.com/antchfx/xpath/query.go
generated
vendored
595
vendor/github.com/antchfx/xpath/query.go
generated
vendored
@ -7,6 +7,44 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// The return type of the XPath expression.
|
||||||
|
type resultType int
|
||||||
|
|
||||||
|
var xpathResultType = struct {
|
||||||
|
Boolean resultType
|
||||||
|
// A numeric value
|
||||||
|
Number resultType
|
||||||
|
String resultType
|
||||||
|
// A node collection.
|
||||||
|
NodeSet resultType
|
||||||
|
// Any of the XPath node types.
|
||||||
|
Any resultType
|
||||||
|
}{
|
||||||
|
Boolean: 0,
|
||||||
|
Number: 1,
|
||||||
|
String: 2,
|
||||||
|
NodeSet: 3,
|
||||||
|
Any: 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
type queryProp int
|
||||||
|
|
||||||
|
var queryProps = struct {
|
||||||
|
None queryProp
|
||||||
|
Position queryProp
|
||||||
|
Count queryProp
|
||||||
|
Cached queryProp
|
||||||
|
Reverse queryProp
|
||||||
|
Merge queryProp
|
||||||
|
}{
|
||||||
|
None: 0,
|
||||||
|
Position: 1,
|
||||||
|
Count: 2,
|
||||||
|
Cached: 4,
|
||||||
|
Reverse: 8,
|
||||||
|
Merge: 16,
|
||||||
|
}
|
||||||
|
|
||||||
type iterator interface {
|
type iterator interface {
|
||||||
Current() NodeNavigator
|
Current() NodeNavigator
|
||||||
}
|
}
|
||||||
@ -20,12 +58,15 @@ type query interface {
|
|||||||
Evaluate(iterator) interface{}
|
Evaluate(iterator) interface{}
|
||||||
|
|
||||||
Clone() query
|
Clone() query
|
||||||
|
|
||||||
|
// ValueType returns the value type of the current query.
|
||||||
|
ValueType() resultType
|
||||||
|
|
||||||
|
Properties() queryProp
|
||||||
}
|
}
|
||||||
|
|
||||||
// nopQuery is an empty query that always return nil for any query.
|
// nopQuery is an empty query that always return nil for any query.
|
||||||
type nopQuery struct {
|
type nopQuery struct{}
|
||||||
query
|
|
||||||
}
|
|
||||||
|
|
||||||
func (nopQuery) Select(iterator) NodeNavigator { return nil }
|
func (nopQuery) Select(iterator) NodeNavigator { return nil }
|
||||||
|
|
||||||
@ -33,21 +74,23 @@ func (nopQuery) Evaluate(iterator) interface{} { return nil }
|
|||||||
|
|
||||||
func (nopQuery) Clone() query { return nopQuery{} }
|
func (nopQuery) Clone() query { return nopQuery{} }
|
||||||
|
|
||||||
|
func (nopQuery) ValueType() resultType { return xpathResultType.NodeSet }
|
||||||
|
|
||||||
|
func (nopQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge | queryProps.Position | queryProps.Count | queryProps.Cached
|
||||||
|
}
|
||||||
|
|
||||||
// contextQuery is returns current node on the iterator object query.
|
// contextQuery is returns current node on the iterator object query.
|
||||||
type contextQuery struct {
|
type contextQuery struct {
|
||||||
count int
|
count int
|
||||||
Root bool // Moving to root-level node in the current context iterator.
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *contextQuery) Select(t iterator) (n NodeNavigator) {
|
func (c *contextQuery) Select(t iterator) NodeNavigator {
|
||||||
if c.count == 0 {
|
if c.count > 0 {
|
||||||
c.count++
|
return nil
|
||||||
n = t.Current().Copy()
|
|
||||||
if c.Root {
|
|
||||||
n.MoveToRoot()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return n
|
c.count++
|
||||||
|
return t.Current().Copy()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *contextQuery) Evaluate(iterator) interface{} {
|
func (c *contextQuery) Evaluate(iterator) interface{} {
|
||||||
@ -56,12 +99,53 @@ func (c *contextQuery) Evaluate(iterator) interface{} {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *contextQuery) Clone() query {
|
func (c *contextQuery) Clone() query {
|
||||||
return &contextQuery{count: 0, Root: c.Root}
|
return &contextQuery{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *contextQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *contextQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge | queryProps.Position | queryProps.Count | queryProps.Cached
|
||||||
|
}
|
||||||
|
|
||||||
|
type absoluteQuery struct {
|
||||||
|
count int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *absoluteQuery) Select(t iterator) (n NodeNavigator) {
|
||||||
|
if a.count > 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
a.count++
|
||||||
|
n = t.Current().Copy()
|
||||||
|
n.MoveToRoot()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *absoluteQuery) Evaluate(t iterator) interface{} {
|
||||||
|
a.count = 0
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *absoluteQuery) Clone() query {
|
||||||
|
return &absoluteQuery{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *absoluteQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *absoluteQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge | queryProps.Position | queryProps.Count | queryProps.Cached
|
||||||
}
|
}
|
||||||
|
|
||||||
// ancestorQuery is an XPath ancestor node query.(ancestor::*|ancestor-self::*)
|
// ancestorQuery is an XPath ancestor node query.(ancestor::*|ancestor-self::*)
|
||||||
type ancestorQuery struct {
|
type ancestorQuery struct {
|
||||||
|
name string
|
||||||
iterator func() NodeNavigator
|
iterator func() NodeNavigator
|
||||||
|
table map[uint64]bool
|
||||||
|
|
||||||
Self bool
|
Self bool
|
||||||
Input query
|
Input query
|
||||||
@ -69,6 +153,10 @@ type ancestorQuery struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *ancestorQuery) Select(t iterator) NodeNavigator {
|
func (a *ancestorQuery) Select(t iterator) NodeNavigator {
|
||||||
|
if a.table == nil {
|
||||||
|
a.table = make(map[uint64]bool)
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if a.iterator == nil {
|
if a.iterator == nil {
|
||||||
node := a.Input.Select(t)
|
node := a.Input.Select(t)
|
||||||
@ -78,24 +166,27 @@ func (a *ancestorQuery) Select(t iterator) NodeNavigator {
|
|||||||
first := true
|
first := true
|
||||||
node = node.Copy()
|
node = node.Copy()
|
||||||
a.iterator = func() NodeNavigator {
|
a.iterator = func() NodeNavigator {
|
||||||
if first && a.Self {
|
if first {
|
||||||
first = false
|
first = false
|
||||||
if a.Predicate(node) {
|
if a.Self && a.Predicate(node) {
|
||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for node.MoveToParent() {
|
for node.MoveToParent() {
|
||||||
if !a.Predicate(node) {
|
if a.Predicate(node) {
|
||||||
continue
|
return node
|
||||||
}
|
}
|
||||||
return node
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if node := a.iterator(); node != nil {
|
for node := a.iterator(); node != nil; node = a.iterator() {
|
||||||
return node
|
node_id := getHashCode(node.Copy())
|
||||||
|
if _, ok := a.table[node_id]; !ok {
|
||||||
|
a.table[node_id] = true
|
||||||
|
return node
|
||||||
|
}
|
||||||
}
|
}
|
||||||
a.iterator = nil
|
a.iterator = nil
|
||||||
}
|
}
|
||||||
@ -112,11 +203,20 @@ func (a *ancestorQuery) Test(n NodeNavigator) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *ancestorQuery) Clone() query {
|
func (a *ancestorQuery) Clone() query {
|
||||||
return &ancestorQuery{Self: a.Self, Input: a.Input.Clone(), Predicate: a.Predicate}
|
return &ancestorQuery{name: a.name, Self: a.Self, Input: a.Input.Clone(), Predicate: a.Predicate}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ancestorQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ancestorQuery) Properties() queryProp {
|
||||||
|
return queryProps.Position | queryProps.Count | queryProps.Cached | queryProps.Merge | queryProps.Reverse
|
||||||
}
|
}
|
||||||
|
|
||||||
// attributeQuery is an XPath attribute node query.(@*)
|
// attributeQuery is an XPath attribute node query.(@*)
|
||||||
type attributeQuery struct {
|
type attributeQuery struct {
|
||||||
|
name string
|
||||||
iterator func() NodeNavigator
|
iterator func() NodeNavigator
|
||||||
|
|
||||||
Input query
|
Input query
|
||||||
@ -162,11 +262,20 @@ func (a *attributeQuery) Test(n NodeNavigator) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *attributeQuery) Clone() query {
|
func (a *attributeQuery) Clone() query {
|
||||||
return &attributeQuery{Input: a.Input.Clone(), Predicate: a.Predicate}
|
return &attributeQuery{name: a.name, Input: a.Input.Clone(), Predicate: a.Predicate}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *attributeQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *attributeQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
}
|
}
|
||||||
|
|
||||||
// childQuery is an XPath child node query.(child::*)
|
// childQuery is an XPath child node query.(child::*)
|
||||||
type childQuery struct {
|
type childQuery struct {
|
||||||
|
name string
|
||||||
posit int
|
posit int
|
||||||
iterator func() NodeNavigator
|
iterator func() NodeNavigator
|
||||||
|
|
||||||
@ -216,7 +325,15 @@ func (c *childQuery) Test(n NodeNavigator) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *childQuery) Clone() query {
|
func (c *childQuery) Clone() query {
|
||||||
return &childQuery{Input: c.Input.Clone(), Predicate: c.Predicate}
|
return &childQuery{name: c.name, Input: c.Input.Clone(), Predicate: c.Predicate}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *childQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *childQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
}
|
}
|
||||||
|
|
||||||
// position returns a position of current NodeNavigator.
|
// position returns a position of current NodeNavigator.
|
||||||
@ -224,8 +341,75 @@ func (c *childQuery) position() int {
|
|||||||
return c.posit
|
return c.posit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type cachedChildQuery struct {
|
||||||
|
name string
|
||||||
|
posit int
|
||||||
|
iterator func() NodeNavigator
|
||||||
|
|
||||||
|
Input query
|
||||||
|
Predicate func(NodeNavigator) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *cachedChildQuery) Select(t iterator) NodeNavigator {
|
||||||
|
for {
|
||||||
|
if c.iterator == nil {
|
||||||
|
c.posit = 0
|
||||||
|
node := c.Input.Select(t)
|
||||||
|
if node == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
node = node.Copy()
|
||||||
|
first := true
|
||||||
|
c.iterator = func() NodeNavigator {
|
||||||
|
for {
|
||||||
|
if (first && !node.MoveToChild()) || (!first && !node.MoveToNext()) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
first = false
|
||||||
|
if c.Predicate(node) {
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if node := c.iterator(); node != nil {
|
||||||
|
c.posit++
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
c.iterator = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *cachedChildQuery) Evaluate(t iterator) interface{} {
|
||||||
|
c.Input.Evaluate(t)
|
||||||
|
c.iterator = nil
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *cachedChildQuery) position() int {
|
||||||
|
return c.posit
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *cachedChildQuery) Test(n NodeNavigator) bool {
|
||||||
|
return c.Predicate(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *cachedChildQuery) Clone() query {
|
||||||
|
return &childQuery{name: c.name, Input: c.Input.Clone(), Predicate: c.Predicate}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *cachedChildQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *cachedChildQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
// descendantQuery is an XPath descendant node query.(descendant::* | descendant-or-self::*)
|
// descendantQuery is an XPath descendant node query.(descendant::* | descendant-or-self::*)
|
||||||
type descendantQuery struct {
|
type descendantQuery struct {
|
||||||
|
name string
|
||||||
iterator func() NodeNavigator
|
iterator func() NodeNavigator
|
||||||
posit int
|
posit int
|
||||||
level int
|
level int
|
||||||
@ -245,14 +429,11 @@ func (d *descendantQuery) Select(t iterator) NodeNavigator {
|
|||||||
}
|
}
|
||||||
node = node.Copy()
|
node = node.Copy()
|
||||||
d.level = 0
|
d.level = 0
|
||||||
positmap := make(map[int]int)
|
|
||||||
first := true
|
first := true
|
||||||
d.iterator = func() NodeNavigator {
|
d.iterator = func() NodeNavigator {
|
||||||
if first && d.Self {
|
if first {
|
||||||
first = false
|
first = false
|
||||||
if d.Predicate(node) {
|
if d.Self && d.Predicate(node) {
|
||||||
d.posit = 1
|
|
||||||
positmap[d.level] = 1
|
|
||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -260,7 +441,6 @@ func (d *descendantQuery) Select(t iterator) NodeNavigator {
|
|||||||
for {
|
for {
|
||||||
if node.MoveToChild() {
|
if node.MoveToChild() {
|
||||||
d.level = d.level + 1
|
d.level = d.level + 1
|
||||||
positmap[d.level] = 0
|
|
||||||
} else {
|
} else {
|
||||||
for {
|
for {
|
||||||
if d.level == 0 {
|
if d.level == 0 {
|
||||||
@ -274,8 +454,6 @@ func (d *descendantQuery) Select(t iterator) NodeNavigator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if d.Predicate(node) {
|
if d.Predicate(node) {
|
||||||
positmap[d.level]++
|
|
||||||
d.posit = positmap[d.level]
|
|
||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -283,6 +461,7 @@ func (d *descendantQuery) Select(t iterator) NodeNavigator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if node := d.iterator(); node != nil {
|
if node := d.iterator(); node != nil {
|
||||||
|
d.posit++
|
||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
d.iterator = nil
|
d.iterator = nil
|
||||||
@ -309,7 +488,15 @@ func (d *descendantQuery) depth() int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *descendantQuery) Clone() query {
|
func (d *descendantQuery) Clone() query {
|
||||||
return &descendantQuery{Self: d.Self, Input: d.Input.Clone(), Predicate: d.Predicate}
|
return &descendantQuery{name: d.name, Self: d.Self, Input: d.Input.Clone(), Predicate: d.Predicate}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
}
|
}
|
||||||
|
|
||||||
// followingQuery is an XPath following node query.(following::*|following-sibling::*)
|
// followingQuery is an XPath following node query.(following::*|following-sibling::*)
|
||||||
@ -390,6 +577,14 @@ func (f *followingQuery) Clone() query {
|
|||||||
return &followingQuery{Input: f.Input.Clone(), Sibling: f.Sibling, Predicate: f.Predicate}
|
return &followingQuery{Input: f.Input.Clone(), Sibling: f.Sibling, Predicate: f.Predicate}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *followingQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *followingQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
func (f *followingQuery) position() int {
|
func (f *followingQuery) position() int {
|
||||||
return f.posit
|
return f.posit
|
||||||
}
|
}
|
||||||
@ -471,6 +666,14 @@ func (p *precedingQuery) Clone() query {
|
|||||||
return &precedingQuery{Input: p.Input.Clone(), Sibling: p.Sibling, Predicate: p.Predicate}
|
return &precedingQuery{Input: p.Input.Clone(), Sibling: p.Sibling, Predicate: p.Predicate}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *precedingQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *precedingQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge | queryProps.Reverse
|
||||||
|
}
|
||||||
|
|
||||||
func (p *precedingQuery) position() int {
|
func (p *precedingQuery) position() int {
|
||||||
return p.posit
|
return p.posit
|
||||||
}
|
}
|
||||||
@ -503,6 +706,14 @@ func (p *parentQuery) Clone() query {
|
|||||||
return &parentQuery{Input: p.Input.Clone(), Predicate: p.Predicate}
|
return &parentQuery{Input: p.Input.Clone(), Predicate: p.Predicate}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *parentQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parentQuery) Properties() queryProp {
|
||||||
|
return queryProps.Position | queryProps.Count | queryProps.Cached | queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
func (p *parentQuery) Test(n NodeNavigator) bool {
|
func (p *parentQuery) Test(n NodeNavigator) bool {
|
||||||
return p.Predicate(n)
|
return p.Predicate(n)
|
||||||
}
|
}
|
||||||
@ -539,12 +750,22 @@ func (s *selfQuery) Clone() query {
|
|||||||
return &selfQuery{Input: s.Input.Clone(), Predicate: s.Predicate}
|
return &selfQuery{Input: s.Input.Clone(), Predicate: s.Predicate}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *selfQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *selfQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
// filterQuery is an XPath query for predicate filter.
|
// filterQuery is an XPath query for predicate filter.
|
||||||
type filterQuery struct {
|
type filterQuery struct {
|
||||||
Input query
|
Input query
|
||||||
Predicate query
|
Predicate query
|
||||||
posit int
|
NoPosition bool
|
||||||
positmap map[int]int
|
|
||||||
|
posit int
|
||||||
|
positmap map[int]int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *filterQuery) do(t iterator) bool {
|
func (f *filterQuery) do(t iterator) bool {
|
||||||
@ -558,8 +779,8 @@ func (f *filterQuery) do(t iterator) bool {
|
|||||||
pt := getNodePosition(f.Input)
|
pt := getNodePosition(f.Input)
|
||||||
return int(val.Float()) == pt
|
return int(val.Float()) == pt
|
||||||
default:
|
default:
|
||||||
if q, ok := f.Predicate.(query); ok {
|
if f.Predicate != nil {
|
||||||
return q.Select(t) != nil
|
return f.Predicate.Select(t) != nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
@ -577,7 +798,7 @@ func (f *filterQuery) Select(t iterator) NodeNavigator {
|
|||||||
|
|
||||||
node := f.Input.Select(t)
|
node := f.Input.Select(t)
|
||||||
if node == nil {
|
if node == nil {
|
||||||
return node
|
return nil
|
||||||
}
|
}
|
||||||
node = node.Copy()
|
node = node.Copy()
|
||||||
|
|
||||||
@ -602,6 +823,14 @@ func (f *filterQuery) Clone() query {
|
|||||||
return &filterQuery{Input: f.Input.Clone(), Predicate: f.Predicate.Clone()}
|
return &filterQuery{Input: f.Input.Clone(), Predicate: f.Predicate.Clone()}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *filterQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *filterQuery) Properties() queryProp {
|
||||||
|
return (queryProps.Position | f.Input.Properties()) & (queryProps.Reverse | queryProps.Merge)
|
||||||
|
}
|
||||||
|
|
||||||
// functionQuery is an XPath function that returns a computed value for
|
// functionQuery is an XPath function that returns a computed value for
|
||||||
// the Evaluate call of the current NodeNavigator node. Select call isn't
|
// the Evaluate call of the current NodeNavigator node. Select call isn't
|
||||||
// applicable for functionQuery.
|
// applicable for functionQuery.
|
||||||
@ -624,6 +853,14 @@ func (f *functionQuery) Clone() query {
|
|||||||
return &functionQuery{Input: f.Input.Clone(), Func: f.Func}
|
return &functionQuery{Input: f.Input.Clone(), Func: f.Func}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *functionQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.Any
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *functionQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
// transformFunctionQuery diffs from functionQuery where the latter computes a scalar
|
// transformFunctionQuery diffs from functionQuery where the latter computes a scalar
|
||||||
// value (number,string,boolean) for the current NodeNavigator node while the former
|
// value (number,string,boolean) for the current NodeNavigator node while the former
|
||||||
// (transformFunctionQuery) performs a mapping or transform of the current NodeNavigator
|
// (transformFunctionQuery) performs a mapping or transform of the current NodeNavigator
|
||||||
@ -652,6 +889,14 @@ func (f *transformFunctionQuery) Clone() query {
|
|||||||
return &transformFunctionQuery{Input: f.Input.Clone(), Func: f.Func}
|
return &transformFunctionQuery{Input: f.Input.Clone(), Func: f.Func}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *transformFunctionQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.Any
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *transformFunctionQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
// constantQuery is an XPath constant operand.
|
// constantQuery is an XPath constant operand.
|
||||||
type constantQuery struct {
|
type constantQuery struct {
|
||||||
Val interface{}
|
Val interface{}
|
||||||
@ -669,6 +914,49 @@ func (c *constantQuery) Clone() query {
|
|||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *constantQuery) ValueType() resultType {
|
||||||
|
return getXPathType(c.Val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *constantQuery) Properties() queryProp {
|
||||||
|
return queryProps.Position | queryProps.Count | queryProps.Cached | queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
|
type groupQuery struct {
|
||||||
|
posit int
|
||||||
|
|
||||||
|
Input query
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *groupQuery) Select(t iterator) NodeNavigator {
|
||||||
|
node := g.Input.Select(t)
|
||||||
|
if node == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
g.posit++
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *groupQuery) Evaluate(t iterator) interface{} {
|
||||||
|
return g.Input.Evaluate(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *groupQuery) Clone() query {
|
||||||
|
return &groupQuery{Input: g.Input.Clone()}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *groupQuery) ValueType() resultType {
|
||||||
|
return g.Input.ValueType()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *groupQuery) Properties() queryProp {
|
||||||
|
return queryProps.Position
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *groupQuery) position() int {
|
||||||
|
return g.posit
|
||||||
|
}
|
||||||
|
|
||||||
// logicalQuery is an XPath logical expression.
|
// logicalQuery is an XPath logical expression.
|
||||||
type logicalQuery struct {
|
type logicalQuery struct {
|
||||||
Left, Right query
|
Left, Right query
|
||||||
@ -699,11 +987,19 @@ func (l *logicalQuery) Clone() query {
|
|||||||
return &logicalQuery{Left: l.Left.Clone(), Right: l.Right.Clone(), Do: l.Do}
|
return &logicalQuery{Left: l.Left.Clone(), Right: l.Right.Clone(), Do: l.Do}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *logicalQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.Boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *logicalQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
// numericQuery is an XPath numeric operator expression.
|
// numericQuery is an XPath numeric operator expression.
|
||||||
type numericQuery struct {
|
type numericQuery struct {
|
||||||
Left, Right query
|
Left, Right query
|
||||||
|
|
||||||
Do func(interface{}, interface{}) interface{}
|
Do func(iterator, interface{}, interface{}) interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *numericQuery) Select(t iterator) NodeNavigator {
|
func (n *numericQuery) Select(t iterator) NodeNavigator {
|
||||||
@ -713,13 +1009,21 @@ func (n *numericQuery) Select(t iterator) NodeNavigator {
|
|||||||
func (n *numericQuery) Evaluate(t iterator) interface{} {
|
func (n *numericQuery) Evaluate(t iterator) interface{} {
|
||||||
m := n.Left.Evaluate(t)
|
m := n.Left.Evaluate(t)
|
||||||
k := n.Right.Evaluate(t)
|
k := n.Right.Evaluate(t)
|
||||||
return n.Do(m, k)
|
return n.Do(t, m, k)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *numericQuery) Clone() query {
|
func (n *numericQuery) Clone() query {
|
||||||
return &numericQuery{Left: n.Left.Clone(), Right: n.Right.Clone(), Do: n.Do}
|
return &numericQuery{Left: n.Left.Clone(), Right: n.Right.Clone(), Do: n.Do}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (n *numericQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.Number
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *numericQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
type booleanQuery struct {
|
type booleanQuery struct {
|
||||||
IsOr bool
|
IsOr bool
|
||||||
Left, Right query
|
Left, Right query
|
||||||
@ -791,6 +1095,8 @@ func (b *booleanQuery) Select(t iterator) NodeNavigator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (b *booleanQuery) Evaluate(t iterator) interface{} {
|
func (b *booleanQuery) Evaluate(t iterator) interface{} {
|
||||||
|
n := t.Current().Copy()
|
||||||
|
|
||||||
m := b.Left.Evaluate(t)
|
m := b.Left.Evaluate(t)
|
||||||
left := asBool(t, m)
|
left := asBool(t, m)
|
||||||
if b.IsOr && left {
|
if b.IsOr && left {
|
||||||
@ -798,6 +1104,8 @@ func (b *booleanQuery) Evaluate(t iterator) interface{} {
|
|||||||
} else if !b.IsOr && !left {
|
} else if !b.IsOr && !left {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
t.Current().MoveTo(n)
|
||||||
m = b.Right.Evaluate(t)
|
m = b.Right.Evaluate(t)
|
||||||
return asBool(t, m)
|
return asBool(t, m)
|
||||||
}
|
}
|
||||||
@ -806,6 +1114,14 @@ func (b *booleanQuery) Clone() query {
|
|||||||
return &booleanQuery{IsOr: b.IsOr, Left: b.Left.Clone(), Right: b.Right.Clone()}
|
return &booleanQuery{IsOr: b.IsOr, Left: b.Left.Clone(), Right: b.Right.Clone()}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *booleanQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.Boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *booleanQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
type unionQuery struct {
|
type unionQuery struct {
|
||||||
Left, Right query
|
Left, Right query
|
||||||
iterator func() NodeNavigator
|
iterator func() NodeNavigator
|
||||||
@ -863,6 +1179,184 @@ func (u *unionQuery) Clone() query {
|
|||||||
return &unionQuery{Left: u.Left.Clone(), Right: u.Right.Clone()}
|
return &unionQuery{Left: u.Left.Clone(), Right: u.Right.Clone()}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (u *unionQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *unionQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
|
type lastQuery struct {
|
||||||
|
buffer []NodeNavigator
|
||||||
|
counted bool
|
||||||
|
|
||||||
|
Input query
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *lastQuery) Select(t iterator) NodeNavigator {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *lastQuery) Evaluate(t iterator) interface{} {
|
||||||
|
if !q.counted {
|
||||||
|
for {
|
||||||
|
node := q.Input.Select(t)
|
||||||
|
if node == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
q.buffer = append(q.buffer, node.Copy())
|
||||||
|
}
|
||||||
|
q.counted = true
|
||||||
|
}
|
||||||
|
return float64(len(q.buffer))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *lastQuery) Clone() query {
|
||||||
|
return &lastQuery{Input: q.Input.Clone()}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *lastQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.Number
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *lastQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
|
type descendantOverDescendantQuery struct {
|
||||||
|
name string
|
||||||
|
level int
|
||||||
|
posit int
|
||||||
|
currentNode NodeNavigator
|
||||||
|
|
||||||
|
Input query
|
||||||
|
MatchSelf bool
|
||||||
|
Predicate func(NodeNavigator) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) moveToFirstChild() bool {
|
||||||
|
if d.currentNode.MoveToChild() {
|
||||||
|
d.level++
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) moveUpUntilNext() bool {
|
||||||
|
for !d.currentNode.MoveToNext() {
|
||||||
|
d.level--
|
||||||
|
if d.level == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
d.currentNode.MoveToParent()
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) Select(t iterator) NodeNavigator {
|
||||||
|
for {
|
||||||
|
if d.level == 0 {
|
||||||
|
node := d.Input.Select(t)
|
||||||
|
if node == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
d.currentNode = node.Copy()
|
||||||
|
d.posit = 0
|
||||||
|
if d.MatchSelf && d.Predicate(d.currentNode) {
|
||||||
|
d.posit = 1
|
||||||
|
return d.currentNode
|
||||||
|
}
|
||||||
|
d.moveToFirstChild()
|
||||||
|
} else if !d.moveUpUntilNext() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for ok := true; ok; ok = d.moveToFirstChild() {
|
||||||
|
if d.Predicate(d.currentNode) {
|
||||||
|
d.posit++
|
||||||
|
return d.currentNode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) Evaluate(t iterator) interface{} {
|
||||||
|
d.Input.Evaluate(t)
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) Clone() query {
|
||||||
|
return &descendantOverDescendantQuery{Input: d.Input.Clone(), Predicate: d.Predicate, MatchSelf: d.MatchSelf}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) Properties() queryProp {
|
||||||
|
return queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *descendantOverDescendantQuery) position() int {
|
||||||
|
return d.posit
|
||||||
|
}
|
||||||
|
|
||||||
|
type mergeQuery struct {
|
||||||
|
Input query
|
||||||
|
Child query
|
||||||
|
|
||||||
|
iterator func() NodeNavigator
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeQuery) Select(t iterator) NodeNavigator {
|
||||||
|
for {
|
||||||
|
if m.iterator == nil {
|
||||||
|
root := m.Input.Select(t)
|
||||||
|
if root == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
m.Child.Evaluate(t)
|
||||||
|
root = root.Copy()
|
||||||
|
t.Current().MoveTo(root)
|
||||||
|
var list []NodeNavigator
|
||||||
|
for node := m.Child.Select(t); node != nil; node = m.Child.Select(t) {
|
||||||
|
list = append(list, node.Copy())
|
||||||
|
}
|
||||||
|
i := 0
|
||||||
|
m.iterator = func() NodeNavigator {
|
||||||
|
if i >= len(list) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
result := list[i]
|
||||||
|
i++
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if node := m.iterator(); node != nil {
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
m.iterator = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeQuery) Evaluate(t iterator) interface{} {
|
||||||
|
m.Input.Evaluate(t)
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeQuery) Clone() query {
|
||||||
|
return &mergeQuery{Input: m.Input.Clone(), Child: m.Child.Clone()}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeQuery) ValueType() resultType {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeQuery) Properties() queryProp {
|
||||||
|
return queryProps.Position | queryProps.Count | queryProps.Cached | queryProps.Merge
|
||||||
|
}
|
||||||
|
|
||||||
func getHashCode(n NodeNavigator) uint64 {
|
func getHashCode(n NodeNavigator) uint64 {
|
||||||
var sb bytes.Buffer
|
var sb bytes.Buffer
|
||||||
switch n.NodeType() {
|
switch n.NodeType() {
|
||||||
@ -898,7 +1392,7 @@ func getHashCode(n NodeNavigator) uint64 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
h := fnv.New64a()
|
h := fnv.New64a()
|
||||||
h.Write([]byte(sb.String()))
|
h.Write(sb.Bytes())
|
||||||
return h.Sum64()
|
return h.Sum64()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -921,3 +1415,20 @@ func getNodeDepth(q query) int {
|
|||||||
}
|
}
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getXPathType(i interface{}) resultType {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
switch v.Kind() {
|
||||||
|
case reflect.Float64:
|
||||||
|
return xpathResultType.Number
|
||||||
|
case reflect.String:
|
||||||
|
return xpathResultType.String
|
||||||
|
case reflect.Bool:
|
||||||
|
return xpathResultType.Boolean
|
||||||
|
default:
|
||||||
|
if _, ok := i.(query); ok {
|
||||||
|
return xpathResultType.NodeSet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic(fmt.Errorf("xpath unknown value type: %v", v.Kind()))
|
||||||
|
}
|
||||||
|
29
vendor/github.com/antchfx/xpath/xpath.go
generated
vendored
29
vendor/github.com/antchfx/xpath/xpath.go
generated
vendored
@ -84,13 +84,13 @@ func (t *NodeIterator) Current() NodeNavigator {
|
|||||||
// MoveNext moves Navigator to the next match node.
|
// MoveNext moves Navigator to the next match node.
|
||||||
func (t *NodeIterator) MoveNext() bool {
|
func (t *NodeIterator) MoveNext() bool {
|
||||||
n := t.query.Select(t)
|
n := t.query.Select(t)
|
||||||
if n != nil {
|
if n == nil {
|
||||||
if !t.node.MoveTo(n) {
|
return false
|
||||||
t.node = n.Copy()
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
return false
|
if !t.node.MoveTo(n) {
|
||||||
|
t.node = n.Copy()
|
||||||
|
}
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Select selects a node set using the specified XPath expression.
|
// Select selects a node set using the specified XPath expression.
|
||||||
@ -141,7 +141,7 @@ func Compile(expr string) (*Expr, error) {
|
|||||||
if expr == "" {
|
if expr == "" {
|
||||||
return nil, errors.New("expr expression is nil")
|
return nil, errors.New("expr expression is nil")
|
||||||
}
|
}
|
||||||
qy, err := build(expr)
|
qy, err := build(expr, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -159,3 +159,18 @@ func MustCompile(expr string) *Expr {
|
|||||||
}
|
}
|
||||||
return exp
|
return exp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CompileWithNS compiles an XPath expression string, using given namespaces map.
|
||||||
|
func CompileWithNS(expr string, namespaces map[string]string) (*Expr, error) {
|
||||||
|
if expr == "" {
|
||||||
|
return nil, errors.New("expr expression is nil")
|
||||||
|
}
|
||||||
|
qy, err := build(expr, namespaces)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if qy == nil {
|
||||||
|
return nil, fmt.Errorf(fmt.Sprintf("undeclared variable in XPath expression: %s", expr))
|
||||||
|
}
|
||||||
|
return &Expr{s: expr, q: qy}, nil
|
||||||
|
}
|
||||||
|
5
vendor/github.com/influxdata/influxdb1-client/v2/client.go
generated
vendored
5
vendor/github.com/influxdata/influxdb1-client/v2/client.go
generated
vendored
@ -556,7 +556,10 @@ func (c *client) Query(q Query) (*Response, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer func() {
|
||||||
|
io.Copy(ioutil.Discard, resp.Body) // https://github.com/influxdata/influxdb1-client/issues/58
|
||||||
|
resp.Body.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
if err := checkResponse(resp); err != nil {
|
if err := checkResponse(resp); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
3
vendor/golang.org/x/net/AUTHORS
generated
vendored
3
vendor/golang.org/x/net/AUTHORS
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
# This source code refers to The Go Authors for copyright purposes.
|
|
||||||
# The master list of authors is in the main Go distribution,
|
|
||||||
# visible at http://tip.golang.org/AUTHORS.
|
|
3
vendor/golang.org/x/net/CONTRIBUTORS
generated
vendored
3
vendor/golang.org/x/net/CONTRIBUTORS
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
# This source code was written by the Go contributors.
|
|
||||||
# The master list of contributors is in the main Go distribution,
|
|
||||||
# visible at http://tip.golang.org/CONTRIBUTORS.
|
|
4
vendor/golang.org/x/net/LICENSE
generated
vendored
4
vendor/golang.org/x/net/LICENSE
generated
vendored
@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
Copyright 2009 The Go Authors.
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
modification, are permitted provided that the following conditions are
|
modification, are permitted provided that the following conditions are
|
||||||
@ -10,7 +10,7 @@ notice, this list of conditions and the following disclaimer.
|
|||||||
copyright notice, this list of conditions and the following disclaimer
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
in the documentation and/or other materials provided with the
|
in the documentation and/or other materials provided with the
|
||||||
distribution.
|
distribution.
|
||||||
* Neither the name of Google Inc. nor the names of its
|
* Neither the name of Google LLC nor the names of its
|
||||||
contributors may be used to endorse or promote products derived from
|
contributors may be used to endorse or promote products derived from
|
||||||
this software without specific prior written permission.
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
21
vendor/golang.org/x/net/html/doc.go
generated
vendored
21
vendor/golang.org/x/net/html/doc.go
generated
vendored
@ -92,6 +92,27 @@ example, to process each anchor node in depth-first order:
|
|||||||
The relevant specifications include:
|
The relevant specifications include:
|
||||||
https://html.spec.whatwg.org/multipage/syntax.html and
|
https://html.spec.whatwg.org/multipage/syntax.html and
|
||||||
https://html.spec.whatwg.org/multipage/syntax.html#tokenization
|
https://html.spec.whatwg.org/multipage/syntax.html#tokenization
|
||||||
|
|
||||||
|
# Security Considerations
|
||||||
|
|
||||||
|
Care should be taken when parsing and interpreting HTML, whether full documents
|
||||||
|
or fragments, within the framework of the HTML specification, especially with
|
||||||
|
regard to untrusted inputs.
|
||||||
|
|
||||||
|
This package provides both a tokenizer and a parser, which implement the
|
||||||
|
tokenization, and tokenization and tree construction stages of the WHATWG HTML
|
||||||
|
parsing specification respectively. While the tokenizer parses and normalizes
|
||||||
|
individual HTML tokens, only the parser constructs the DOM tree from the
|
||||||
|
tokenized HTML, as described in the tree construction stage of the
|
||||||
|
specification, dynamically modifying or extending the document's DOM tree.
|
||||||
|
|
||||||
|
If your use case requires semantically well-formed HTML documents, as defined by
|
||||||
|
the WHATWG specification, the parser should be used rather than the tokenizer.
|
||||||
|
|
||||||
|
In security contexts, if trust decisions are being made using the tokenized or
|
||||||
|
parsed content, the input must be re-serialized (for instance by using Render or
|
||||||
|
Token.String) in order for those trust decisions to hold, as the process of
|
||||||
|
tokenization or parsing may alter the content.
|
||||||
*/
|
*/
|
||||||
package html // import "golang.org/x/net/html"
|
package html // import "golang.org/x/net/html"
|
||||||
|
|
||||||
|
81
vendor/golang.org/x/net/html/escape.go
generated
vendored
81
vendor/golang.org/x/net/html/escape.go
generated
vendored
@ -193,6 +193,87 @@ func lower(b []byte) []byte {
|
|||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// escapeComment is like func escape but escapes its input bytes less often.
|
||||||
|
// Per https://github.com/golang/go/issues/58246 some HTML comments are (1)
|
||||||
|
// meaningful and (2) contain angle brackets that we'd like to avoid escaping
|
||||||
|
// unless we have to.
|
||||||
|
//
|
||||||
|
// "We have to" includes the '&' byte, since that introduces other escapes.
|
||||||
|
//
|
||||||
|
// It also includes those bytes (not including EOF) that would otherwise end
|
||||||
|
// the comment. Per the summary table at the bottom of comment_test.go, this is
|
||||||
|
// the '>' byte that, per above, we'd like to avoid escaping unless we have to.
|
||||||
|
//
|
||||||
|
// Studying the summary table (and T actions in its '>' column) closely, we
|
||||||
|
// only need to escape in states 43, 44, 49, 51 and 52. State 43 is at the
|
||||||
|
// start of the comment data. State 52 is after a '!'. The other three states
|
||||||
|
// are after a '-'.
|
||||||
|
//
|
||||||
|
// Our algorithm is thus to escape every '&' and to escape '>' if and only if:
|
||||||
|
// - The '>' is after a '!' or '-' (in the unescaped data) or
|
||||||
|
// - The '>' is at the start of the comment data (after the opening "<!--").
|
||||||
|
func escapeComment(w writer, s string) error {
|
||||||
|
// When modifying this function, consider manually increasing the
|
||||||
|
// maxSuffixLen constant in func TestComments, from 6 to e.g. 9 or more.
|
||||||
|
// That increase should only be temporary, not committed, as it
|
||||||
|
// exponentially affects the test running time.
|
||||||
|
|
||||||
|
if len(s) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loop:
|
||||||
|
// - Grow j such that s[i:j] does not need escaping.
|
||||||
|
// - If s[j] does need escaping, output s[i:j] and an escaped s[j],
|
||||||
|
// resetting i and j to point past that s[j] byte.
|
||||||
|
i := 0
|
||||||
|
for j := 0; j < len(s); j++ {
|
||||||
|
escaped := ""
|
||||||
|
switch s[j] {
|
||||||
|
case '&':
|
||||||
|
escaped = "&"
|
||||||
|
|
||||||
|
case '>':
|
||||||
|
if j > 0 {
|
||||||
|
if prev := s[j-1]; (prev != '!') && (prev != '-') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
escaped = ">"
|
||||||
|
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if i < j {
|
||||||
|
if _, err := w.WriteString(s[i:j]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(escaped); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
i = j + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if i < len(s) {
|
||||||
|
if _, err := w.WriteString(s[i:]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// escapeCommentString is to EscapeString as escapeComment is to escape.
|
||||||
|
func escapeCommentString(s string) string {
|
||||||
|
if strings.IndexAny(s, "&>") == -1 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
escapeComment(&buf, s)
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
const escapedChars = "&'<>\"\r"
|
const escapedChars = "&'<>\"\r"
|
||||||
|
|
||||||
func escape(w writer, s string) error {
|
func escape(w writer, s string) error {
|
||||||
|
28
vendor/golang.org/x/net/html/parse.go
generated
vendored
28
vendor/golang.org/x/net/html/parse.go
generated
vendored
@ -184,7 +184,7 @@ func (p *parser) clearStackToContext(s scope) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseGenericRawTextElements implements the generic raw text element parsing
|
// parseGenericRawTextElement implements the generic raw text element parsing
|
||||||
// algorithm defined in 12.2.6.2.
|
// algorithm defined in 12.2.6.2.
|
||||||
// https://html.spec.whatwg.org/multipage/parsing.html#parsing-elements-that-contain-only-text
|
// https://html.spec.whatwg.org/multipage/parsing.html#parsing-elements-that-contain-only-text
|
||||||
// TODO: Since both RAWTEXT and RCDATA states are treated as tokenizer's part
|
// TODO: Since both RAWTEXT and RCDATA states are treated as tokenizer's part
|
||||||
@ -663,6 +663,24 @@ func inHeadIM(p *parser) bool {
|
|||||||
// Ignore the token.
|
// Ignore the token.
|
||||||
return true
|
return true
|
||||||
case a.Template:
|
case a.Template:
|
||||||
|
// TODO: remove this divergence from the HTML5 spec.
|
||||||
|
//
|
||||||
|
// We don't handle all of the corner cases when mixing foreign
|
||||||
|
// content (i.e. <math> or <svg>) with <template>. Without this
|
||||||
|
// early return, we can get into an infinite loop, possibly because
|
||||||
|
// of the "TODO... further divergence" a little below.
|
||||||
|
//
|
||||||
|
// As a workaround, if we are mixing foreign content and templates,
|
||||||
|
// just ignore the rest of the HTML. Foreign content is rare and a
|
||||||
|
// relatively old HTML feature. Templates are also rare and a
|
||||||
|
// relatively new HTML feature. Their combination is very rare.
|
||||||
|
for _, e := range p.oe {
|
||||||
|
if e.Namespace != "" {
|
||||||
|
p.im = ignoreTheRemainingTokens
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
p.addElement()
|
p.addElement()
|
||||||
p.afe = append(p.afe, &scopeMarker)
|
p.afe = append(p.afe, &scopeMarker)
|
||||||
p.framesetOK = false
|
p.framesetOK = false
|
||||||
@ -683,7 +701,7 @@ func inHeadIM(p *parser) bool {
|
|||||||
if !p.oe.contains(a.Template) {
|
if !p.oe.contains(a.Template) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// TODO: remove this divergence from the HTML5 spec.
|
// TODO: remove this further divergence from the HTML5 spec.
|
||||||
//
|
//
|
||||||
// See https://bugs.chromium.org/p/chromium/issues/detail?id=829668
|
// See https://bugs.chromium.org/p/chromium/issues/detail?id=829668
|
||||||
p.generateImpliedEndTags()
|
p.generateImpliedEndTags()
|
||||||
@ -716,7 +734,7 @@ func inHeadIM(p *parser) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// 12.2.6.4.5.
|
// Section 12.2.6.4.5.
|
||||||
func inHeadNoscriptIM(p *parser) bool {
|
func inHeadNoscriptIM(p *parser) bool {
|
||||||
switch p.tok.Type {
|
switch p.tok.Type {
|
||||||
case DoctypeToken:
|
case DoctypeToken:
|
||||||
@ -2127,6 +2145,10 @@ func afterAfterFramesetIM(p *parser) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ignoreTheRemainingTokens(p *parser) bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
const whitespaceOrNUL = whitespace + "\x00"
|
const whitespaceOrNUL = whitespace + "\x00"
|
||||||
|
|
||||||
// Section 12.2.6.5
|
// Section 12.2.6.5
|
||||||
|
32
vendor/golang.org/x/net/html/render.go
generated
vendored
32
vendor/golang.org/x/net/html/render.go
generated
vendored
@ -85,7 +85,7 @@ func render1(w writer, n *Node) error {
|
|||||||
if _, err := w.WriteString("<!--"); err != nil {
|
if _, err := w.WriteString("<!--"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := w.WriteString(n.Data); err != nil {
|
if err := escapeComment(w, n.Data); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := w.WriteString("-->"); err != nil {
|
if _, err := w.WriteString("-->"); err != nil {
|
||||||
@ -96,7 +96,7 @@ func render1(w writer, n *Node) error {
|
|||||||
if _, err := w.WriteString("<!DOCTYPE "); err != nil {
|
if _, err := w.WriteString("<!DOCTYPE "); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := w.WriteString(n.Data); err != nil {
|
if err := escape(w, n.Data); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if n.Attr != nil {
|
if n.Attr != nil {
|
||||||
@ -194,9 +194,8 @@ func render1(w writer, n *Node) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Render any child nodes.
|
// Render any child nodes
|
||||||
switch n.Data {
|
if childTextNodesAreLiteral(n) {
|
||||||
case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "xmp":
|
|
||||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
if c.Type == TextNode {
|
if c.Type == TextNode {
|
||||||
if _, err := w.WriteString(c.Data); err != nil {
|
if _, err := w.WriteString(c.Data); err != nil {
|
||||||
@ -213,7 +212,7 @@ func render1(w writer, n *Node) error {
|
|||||||
// last element in the file, with no closing tag.
|
// last element in the file, with no closing tag.
|
||||||
return plaintextAbort
|
return plaintextAbort
|
||||||
}
|
}
|
||||||
default:
|
} else {
|
||||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
if err := render1(w, c); err != nil {
|
if err := render1(w, c); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -231,6 +230,27 @@ func render1(w writer, n *Node) error {
|
|||||||
return w.WriteByte('>')
|
return w.WriteByte('>')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func childTextNodesAreLiteral(n *Node) bool {
|
||||||
|
// Per WHATWG HTML 13.3, if the parent of the current node is a style,
|
||||||
|
// script, xmp, iframe, noembed, noframes, or plaintext element, and the
|
||||||
|
// current node is a text node, append the value of the node's data
|
||||||
|
// literally. The specification is not explicit about it, but we only
|
||||||
|
// enforce this if we are in the HTML namespace (i.e. when the namespace is
|
||||||
|
// "").
|
||||||
|
// NOTE: we also always include noscript elements, although the
|
||||||
|
// specification states that they should only be rendered as such if
|
||||||
|
// scripting is enabled for the node (which is not something we track).
|
||||||
|
if n.Namespace != "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
switch n.Data {
|
||||||
|
case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "xmp":
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// writeQuoted writes s to w surrounded by quotes. Normally it will use double
|
// writeQuoted writes s to w surrounded by quotes. Normally it will use double
|
||||||
// quotes, but if s contains a double quote, it will use single quotes.
|
// quotes, but if s contains a double quote, it will use single quotes.
|
||||||
// It is used for writing the identifiers in a doctype declaration.
|
// It is used for writing the identifiers in a doctype declaration.
|
||||||
|
80
vendor/golang.org/x/net/html/token.go
generated
vendored
80
vendor/golang.org/x/net/html/token.go
generated
vendored
@ -110,9 +110,9 @@ func (t Token) String() string {
|
|||||||
case SelfClosingTagToken:
|
case SelfClosingTagToken:
|
||||||
return "<" + t.tagString() + "/>"
|
return "<" + t.tagString() + "/>"
|
||||||
case CommentToken:
|
case CommentToken:
|
||||||
return "<!--" + t.Data + "-->"
|
return "<!--" + escapeCommentString(t.Data) + "-->"
|
||||||
case DoctypeToken:
|
case DoctypeToken:
|
||||||
return "<!DOCTYPE " + t.Data + ">"
|
return "<!DOCTYPE " + EscapeString(t.Data) + ">"
|
||||||
}
|
}
|
||||||
return "Invalid(" + strconv.Itoa(int(t.Type)) + ")"
|
return "Invalid(" + strconv.Itoa(int(t.Type)) + ")"
|
||||||
}
|
}
|
||||||
@ -598,6 +598,11 @@ scriptDataDoubleEscapeEnd:
|
|||||||
// readComment reads the next comment token starting with "<!--". The opening
|
// readComment reads the next comment token starting with "<!--". The opening
|
||||||
// "<!--" has already been consumed.
|
// "<!--" has already been consumed.
|
||||||
func (z *Tokenizer) readComment() {
|
func (z *Tokenizer) readComment() {
|
||||||
|
// When modifying this function, consider manually increasing the
|
||||||
|
// maxSuffixLen constant in func TestComments, from 6 to e.g. 9 or more.
|
||||||
|
// That increase should only be temporary, not committed, as it
|
||||||
|
// exponentially affects the test running time.
|
||||||
|
|
||||||
z.data.start = z.raw.end
|
z.data.start = z.raw.end
|
||||||
defer func() {
|
defer func() {
|
||||||
if z.data.end < z.data.start {
|
if z.data.end < z.data.start {
|
||||||
@ -605,14 +610,13 @@ func (z *Tokenizer) readComment() {
|
|||||||
z.data.end = z.data.start
|
z.data.end = z.data.start
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
for dashCount := 2; ; {
|
|
||||||
|
var dashCount int
|
||||||
|
beginning := true
|
||||||
|
for {
|
||||||
c := z.readByte()
|
c := z.readByte()
|
||||||
if z.err != nil {
|
if z.err != nil {
|
||||||
// Ignore up to two dashes at EOF.
|
z.data.end = z.calculateAbruptCommentDataEnd()
|
||||||
if dashCount > 2 {
|
|
||||||
dashCount = 2
|
|
||||||
}
|
|
||||||
z.data.end = z.raw.end - dashCount
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
switch c {
|
switch c {
|
||||||
@ -620,7 +624,7 @@ func (z *Tokenizer) readComment() {
|
|||||||
dashCount++
|
dashCount++
|
||||||
continue
|
continue
|
||||||
case '>':
|
case '>':
|
||||||
if dashCount >= 2 {
|
if dashCount >= 2 || beginning {
|
||||||
z.data.end = z.raw.end - len("-->")
|
z.data.end = z.raw.end - len("-->")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -628,19 +632,52 @@ func (z *Tokenizer) readComment() {
|
|||||||
if dashCount >= 2 {
|
if dashCount >= 2 {
|
||||||
c = z.readByte()
|
c = z.readByte()
|
||||||
if z.err != nil {
|
if z.err != nil {
|
||||||
z.data.end = z.raw.end
|
z.data.end = z.calculateAbruptCommentDataEnd()
|
||||||
return
|
return
|
||||||
}
|
} else if c == '>' {
|
||||||
if c == '>' {
|
|
||||||
z.data.end = z.raw.end - len("--!>")
|
z.data.end = z.raw.end - len("--!>")
|
||||||
return
|
return
|
||||||
|
} else if c == '-' {
|
||||||
|
dashCount = 1
|
||||||
|
beginning = false
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
dashCount = 0
|
dashCount = 0
|
||||||
|
beginning = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (z *Tokenizer) calculateAbruptCommentDataEnd() int {
|
||||||
|
raw := z.Raw()
|
||||||
|
const prefixLen = len("<!--")
|
||||||
|
if len(raw) >= prefixLen {
|
||||||
|
raw = raw[prefixLen:]
|
||||||
|
if hasSuffix(raw, "--!") {
|
||||||
|
return z.raw.end - 3
|
||||||
|
} else if hasSuffix(raw, "--") {
|
||||||
|
return z.raw.end - 2
|
||||||
|
} else if hasSuffix(raw, "-") {
|
||||||
|
return z.raw.end - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return z.raw.end
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasSuffix(b []byte, suffix string) bool {
|
||||||
|
if len(b) < len(suffix) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
b = b[len(b)-len(suffix):]
|
||||||
|
for i := range b {
|
||||||
|
if b[i] != suffix[i] {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// readUntilCloseAngle reads until the next ">".
|
// readUntilCloseAngle reads until the next ">".
|
||||||
func (z *Tokenizer) readUntilCloseAngle() {
|
func (z *Tokenizer) readUntilCloseAngle() {
|
||||||
z.data.start = z.raw.end
|
z.data.start = z.raw.end
|
||||||
@ -873,10 +910,16 @@ func (z *Tokenizer) readTagAttrKey() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
switch c {
|
switch c {
|
||||||
case ' ', '\n', '\r', '\t', '\f', '/':
|
case '=':
|
||||||
z.pendingAttr[0].end = z.raw.end - 1
|
if z.pendingAttr[0].start+1 == z.raw.end {
|
||||||
return
|
// WHATWG 13.2.5.32, if we see an equals sign before the attribute name
|
||||||
case '=', '>':
|
// begins, we treat it as a character in the attribute name and continue.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
case ' ', '\n', '\r', '\t', '\f', '/', '>':
|
||||||
|
// WHATWG 13.2.5.33 Attribute name state
|
||||||
|
// We need to reconsume the char in the after attribute name state to support the / character
|
||||||
z.raw.end--
|
z.raw.end--
|
||||||
z.pendingAttr[0].end = z.raw.end
|
z.pendingAttr[0].end = z.raw.end
|
||||||
return
|
return
|
||||||
@ -895,6 +938,11 @@ func (z *Tokenizer) readTagAttrVal() {
|
|||||||
if z.err != nil {
|
if z.err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if c == '/' {
|
||||||
|
// WHATWG 13.2.5.34 After attribute name state
|
||||||
|
// U+002F SOLIDUS (/) - Switch to the self-closing start tag state.
|
||||||
|
return
|
||||||
|
}
|
||||||
if c != '=' {
|
if c != '=' {
|
||||||
z.raw.end--
|
z.raw.end--
|
||||||
return
|
return
|
||||||
|
3
vendor/golang.org/x/text/AUTHORS
generated
vendored
3
vendor/golang.org/x/text/AUTHORS
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
# This source code refers to The Go Authors for copyright purposes.
|
|
||||||
# The master list of authors is in the main Go distribution,
|
|
||||||
# visible at http://tip.golang.org/AUTHORS.
|
|
3
vendor/golang.org/x/text/CONTRIBUTORS
generated
vendored
3
vendor/golang.org/x/text/CONTRIBUTORS
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
# This source code was written by the Go contributors.
|
|
||||||
# The master list of contributors is in the main Go distribution,
|
|
||||||
# visible at http://tip.golang.org/CONTRIBUTORS.
|
|
4
vendor/golang.org/x/text/LICENSE
generated
vendored
4
vendor/golang.org/x/text/LICENSE
generated
vendored
@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
Copyright 2009 The Go Authors.
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
modification, are permitted provided that the following conditions are
|
modification, are permitted provided that the following conditions are
|
||||||
@ -10,7 +10,7 @@ notice, this list of conditions and the following disclaimer.
|
|||||||
copyright notice, this list of conditions and the following disclaimer
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
in the documentation and/or other materials provided with the
|
in the documentation and/or other materials provided with the
|
||||||
distribution.
|
distribution.
|
||||||
* Neither the name of Google Inc. nor the names of its
|
* Neither the name of Google LLC nor the names of its
|
||||||
contributors may be used to endorse or promote products derived from
|
contributors may be used to endorse or promote products derived from
|
||||||
this software without specific prior written permission.
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
9
vendor/golang.org/x/text/encoding/htmlindex/tables.go
generated
vendored
9
vendor/golang.org/x/text/encoding/htmlindex/tables.go
generated
vendored
@ -93,8 +93,11 @@ var canonical = [numEncodings]string{
|
|||||||
|
|
||||||
var nameMap = map[string]htmlEncoding{
|
var nameMap = map[string]htmlEncoding{
|
||||||
"unicode-1-1-utf-8": utf8,
|
"unicode-1-1-utf-8": utf8,
|
||||||
|
"unicode11utf8": utf8,
|
||||||
|
"unicode20utf8": utf8,
|
||||||
"utf-8": utf8,
|
"utf-8": utf8,
|
||||||
"utf8": utf8,
|
"utf8": utf8,
|
||||||
|
"x-unicode20utf8": utf8,
|
||||||
"866": ibm866,
|
"866": ibm866,
|
||||||
"cp866": ibm866,
|
"cp866": ibm866,
|
||||||
"csibm866": ibm866,
|
"csibm866": ibm866,
|
||||||
@ -307,7 +310,13 @@ var nameMap = map[string]htmlEncoding{
|
|||||||
"iso-2022-cn-ext": replacement,
|
"iso-2022-cn-ext": replacement,
|
||||||
"iso-2022-kr": replacement,
|
"iso-2022-kr": replacement,
|
||||||
"replacement": replacement,
|
"replacement": replacement,
|
||||||
|
"unicodefffe": utf16be,
|
||||||
"utf-16be": utf16be,
|
"utf-16be": utf16be,
|
||||||
|
"csunicode": utf16le,
|
||||||
|
"iso-10646-ucs-2": utf16le,
|
||||||
|
"ucs-2": utf16le,
|
||||||
|
"unicode": utf16le,
|
||||||
|
"unicodefeff": utf16le,
|
||||||
"utf-16": utf16le,
|
"utf-16": utf16le,
|
||||||
"utf-16le": utf16le,
|
"utf-16le": utf16le,
|
||||||
"x-user-defined": xUserDefined,
|
"x-user-defined": xUserDefined,
|
||||||
|
8
vendor/golang.org/x/text/encoding/internal/identifier/mib.go
generated
vendored
8
vendor/golang.org/x/text/encoding/internal/identifier/mib.go
generated
vendored
@ -905,6 +905,14 @@ const (
|
|||||||
// https://www.unicode.org/notes/tn6/
|
// https://www.unicode.org/notes/tn6/
|
||||||
BOCU1 MIB = 1020
|
BOCU1 MIB = 1020
|
||||||
|
|
||||||
|
// UTF7IMAP is the MIB identifier with IANA name UTF-7-IMAP.
|
||||||
|
//
|
||||||
|
// Note: This charset is used to encode Unicode in IMAP mailbox names;
|
||||||
|
// see section 5.1.3 of rfc3501 . It should never be used
|
||||||
|
// outside this context. A name has been assigned so that charset processing
|
||||||
|
// implementations can refer to it in a consistent way.
|
||||||
|
UTF7IMAP MIB = 1021
|
||||||
|
|
||||||
// Windows30Latin1 is the MIB identifier with IANA name ISO-8859-1-Windows-3.0-Latin-1.
|
// Windows30Latin1 is the MIB identifier with IANA name ISO-8859-1-Windows-3.0-Latin-1.
|
||||||
//
|
//
|
||||||
// Extended ISO 8859-1 Latin-1 for Windows 3.0.
|
// Extended ISO 8859-1 Latin-1 for Windows 3.0.
|
||||||
|
2
vendor/golang.org/x/text/encoding/internal/internal.go
generated
vendored
2
vendor/golang.org/x/text/encoding/internal/internal.go
generated
vendored
@ -64,7 +64,7 @@ func (e FuncEncoding) NewEncoder() *encoding.Encoder {
|
|||||||
// byte.
|
// byte.
|
||||||
type RepertoireError byte
|
type RepertoireError byte
|
||||||
|
|
||||||
// Error implements the error interrface.
|
// Error implements the error interface.
|
||||||
func (r RepertoireError) Error() string {
|
func (r RepertoireError) Error() string {
|
||||||
return "encoding: rune not supported by encoding."
|
return "encoding: rune not supported by encoding."
|
||||||
}
|
}
|
||||||
|
6
vendor/golang.org/x/text/encoding/simplifiedchinese/gbk.go
generated
vendored
6
vendor/golang.org/x/text/encoding/simplifiedchinese/gbk.go
generated
vendored
@ -55,6 +55,8 @@ loop:
|
|||||||
// Microsoft's Code Page 936 extends GBK 1.0 to encode the euro sign U+20AC
|
// Microsoft's Code Page 936 extends GBK 1.0 to encode the euro sign U+20AC
|
||||||
// as 0x80. The HTML5 specification at http://encoding.spec.whatwg.org/#gbk
|
// as 0x80. The HTML5 specification at http://encoding.spec.whatwg.org/#gbk
|
||||||
// says to treat "gbk" as Code Page 936.
|
// says to treat "gbk" as Code Page 936.
|
||||||
|
// GBK’s decoder is gb18030’s decoder. https://encoding.spec.whatwg.org/#gbk-decoder
|
||||||
|
// If byte is 0x80, return code point U+20AC. https://encoding.spec.whatwg.org/#gb18030-decoder
|
||||||
case c0 == 0x80:
|
case c0 == 0x80:
|
||||||
r, size = '€', 1
|
r, size = '€', 1
|
||||||
|
|
||||||
@ -180,7 +182,9 @@ func (e gbkEncoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err
|
|||||||
// Microsoft's Code Page 936 extends GBK 1.0 to encode the euro sign U+20AC
|
// Microsoft's Code Page 936 extends GBK 1.0 to encode the euro sign U+20AC
|
||||||
// as 0x80. The HTML5 specification at http://encoding.spec.whatwg.org/#gbk
|
// as 0x80. The HTML5 specification at http://encoding.spec.whatwg.org/#gbk
|
||||||
// says to treat "gbk" as Code Page 936.
|
// says to treat "gbk" as Code Page 936.
|
||||||
if r == '€' {
|
// GBK’s encoder is gb18030’s encoder with its _is GBK_ set to true. https://encoding.spec.whatwg.org/#gbk-encoder
|
||||||
|
// If _is GBK_ is true and code point is U+20AC, return byte 0x80. https://encoding.spec.whatwg.org/#gb18030-encoder
|
||||||
|
if !e.gb18030 && r == '€' {
|
||||||
r = 0x80
|
r = 0x80
|
||||||
goto write1
|
goto write1
|
||||||
}
|
}
|
||||||
|
2
vendor/golang.org/x/text/internal/language/compact/language.go
generated
vendored
2
vendor/golang.org/x/text/internal/language/compact/language.go
generated
vendored
@ -118,7 +118,7 @@ func (t Tag) Parent() Tag {
|
|||||||
return Tag{language: lang, locale: lang}
|
return Tag{language: lang, locale: lang}
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns token t and the rest of the string.
|
// nextToken returns token t and the rest of the string.
|
||||||
func nextToken(s string) (t, tail string) {
|
func nextToken(s string) (t, tail string) {
|
||||||
p := strings.Index(s[1:], "-")
|
p := strings.Index(s[1:], "-")
|
||||||
if p == -1 {
|
if p == -1 {
|
||||||
|
356
vendor/golang.org/x/text/internal/language/compact/tables.go
generated
vendored
356
vendor/golang.org/x/text/internal/language/compact/tables.go
generated
vendored
@ -790,226 +790,226 @@ const (
|
|||||||
|
|
||||||
var coreTags = []language.CompactCoreInfo{ // 773 elements
|
var coreTags = []language.CompactCoreInfo{ // 773 elements
|
||||||
// Entry 0 - 1F
|
// Entry 0 - 1F
|
||||||
0x00000000, 0x01600000, 0x016000d2, 0x01600161,
|
0x00000000, 0x01600000, 0x016000d3, 0x01600162,
|
||||||
0x01c00000, 0x01c00052, 0x02100000, 0x02100080,
|
0x01c00000, 0x01c00052, 0x02100000, 0x02100081,
|
||||||
0x02700000, 0x0270006f, 0x03a00000, 0x03a00001,
|
0x02700000, 0x02700070, 0x03a00000, 0x03a00001,
|
||||||
0x03a00023, 0x03a00039, 0x03a00062, 0x03a00067,
|
0x03a00023, 0x03a00039, 0x03a00063, 0x03a00068,
|
||||||
0x03a0006b, 0x03a0006c, 0x03a0006d, 0x03a00097,
|
0x03a0006c, 0x03a0006d, 0x03a0006e, 0x03a00098,
|
||||||
0x03a0009b, 0x03a000a1, 0x03a000a8, 0x03a000ac,
|
0x03a0009c, 0x03a000a2, 0x03a000a9, 0x03a000ad,
|
||||||
0x03a000b0, 0x03a000b9, 0x03a000ba, 0x03a000c9,
|
0x03a000b1, 0x03a000ba, 0x03a000bb, 0x03a000ca,
|
||||||
0x03a000e1, 0x03a000ed, 0x03a000f3, 0x03a00108,
|
0x03a000e2, 0x03a000ee, 0x03a000f4, 0x03a00109,
|
||||||
// Entry 20 - 3F
|
// Entry 20 - 3F
|
||||||
0x03a0010b, 0x03a00115, 0x03a00117, 0x03a0011c,
|
0x03a0010c, 0x03a00116, 0x03a00118, 0x03a0011d,
|
||||||
0x03a00120, 0x03a00128, 0x03a0015e, 0x04000000,
|
0x03a00121, 0x03a00129, 0x03a0015f, 0x04000000,
|
||||||
0x04300000, 0x04300099, 0x04400000, 0x0440012f,
|
0x04300000, 0x0430009a, 0x04400000, 0x04400130,
|
||||||
0x04800000, 0x0480006e, 0x05800000, 0x05820000,
|
0x04800000, 0x0480006f, 0x05800000, 0x05820000,
|
||||||
0x05820032, 0x0585a000, 0x0585a032, 0x05e00000,
|
0x05820032, 0x0585b000, 0x0585b032, 0x05e00000,
|
||||||
0x05e00052, 0x07100000, 0x07100047, 0x07500000,
|
0x05e00052, 0x07100000, 0x07100047, 0x07500000,
|
||||||
0x07500162, 0x07900000, 0x0790012f, 0x07e00000,
|
0x07500163, 0x07900000, 0x07900130, 0x07e00000,
|
||||||
0x07e00038, 0x08200000, 0x0a000000, 0x0a0000c3,
|
0x07e00038, 0x08200000, 0x0a000000, 0x0a0000c4,
|
||||||
// Entry 40 - 5F
|
// Entry 40 - 5F
|
||||||
0x0a500000, 0x0a500035, 0x0a500099, 0x0a900000,
|
0x0a500000, 0x0a500035, 0x0a50009a, 0x0a900000,
|
||||||
0x0a900053, 0x0a900099, 0x0b200000, 0x0b200078,
|
0x0a900053, 0x0a90009a, 0x0b200000, 0x0b200079,
|
||||||
0x0b500000, 0x0b500099, 0x0b700000, 0x0b720000,
|
0x0b500000, 0x0b50009a, 0x0b700000, 0x0b720000,
|
||||||
0x0b720033, 0x0b75a000, 0x0b75a033, 0x0d700000,
|
0x0b720033, 0x0b75b000, 0x0b75b033, 0x0d700000,
|
||||||
0x0d700022, 0x0d70006e, 0x0d700078, 0x0d70009e,
|
0x0d700022, 0x0d70006f, 0x0d700079, 0x0d70009f,
|
||||||
0x0db00000, 0x0db00035, 0x0db00099, 0x0dc00000,
|
0x0db00000, 0x0db00035, 0x0db0009a, 0x0dc00000,
|
||||||
0x0dc00106, 0x0df00000, 0x0df00131, 0x0e500000,
|
0x0dc00107, 0x0df00000, 0x0df00132, 0x0e500000,
|
||||||
0x0e500135, 0x0e900000, 0x0e90009b, 0x0e90009c,
|
0x0e500136, 0x0e900000, 0x0e90009c, 0x0e90009d,
|
||||||
// Entry 60 - 7F
|
// Entry 60 - 7F
|
||||||
0x0fa00000, 0x0fa0005e, 0x0fe00000, 0x0fe00106,
|
0x0fa00000, 0x0fa0005f, 0x0fe00000, 0x0fe00107,
|
||||||
0x10000000, 0x1000007b, 0x10100000, 0x10100063,
|
0x10000000, 0x1000007c, 0x10100000, 0x10100064,
|
||||||
0x10100082, 0x10800000, 0x108000a4, 0x10d00000,
|
0x10100083, 0x10800000, 0x108000a5, 0x10d00000,
|
||||||
0x10d0002e, 0x10d00036, 0x10d0004e, 0x10d00060,
|
0x10d0002e, 0x10d00036, 0x10d0004e, 0x10d00061,
|
||||||
0x10d0009e, 0x10d000b2, 0x10d000b7, 0x11700000,
|
0x10d0009f, 0x10d000b3, 0x10d000b8, 0x11700000,
|
||||||
0x117000d4, 0x11f00000, 0x11f00060, 0x12400000,
|
0x117000d5, 0x11f00000, 0x11f00061, 0x12400000,
|
||||||
0x12400052, 0x12800000, 0x12b00000, 0x12b00114,
|
0x12400052, 0x12800000, 0x12b00000, 0x12b00115,
|
||||||
0x12d00000, 0x12d00043, 0x12f00000, 0x12f000a4,
|
0x12d00000, 0x12d00043, 0x12f00000, 0x12f000a5,
|
||||||
// Entry 80 - 9F
|
// Entry 80 - 9F
|
||||||
0x13000000, 0x13000080, 0x13000122, 0x13600000,
|
0x13000000, 0x13000081, 0x13000123, 0x13600000,
|
||||||
0x1360005d, 0x13600087, 0x13900000, 0x13900001,
|
0x1360005e, 0x13600088, 0x13900000, 0x13900001,
|
||||||
0x1390001a, 0x13900025, 0x13900026, 0x1390002d,
|
0x1390001a, 0x13900025, 0x13900026, 0x1390002d,
|
||||||
0x1390002e, 0x1390002f, 0x13900034, 0x13900036,
|
0x1390002e, 0x1390002f, 0x13900034, 0x13900036,
|
||||||
0x1390003a, 0x1390003d, 0x13900042, 0x13900046,
|
0x1390003a, 0x1390003d, 0x13900042, 0x13900046,
|
||||||
0x13900048, 0x13900049, 0x1390004a, 0x1390004e,
|
0x13900048, 0x13900049, 0x1390004a, 0x1390004e,
|
||||||
0x13900050, 0x13900052, 0x1390005c, 0x1390005d,
|
0x13900050, 0x13900052, 0x1390005d, 0x1390005e,
|
||||||
0x13900060, 0x13900061, 0x13900063, 0x13900064,
|
0x13900061, 0x13900062, 0x13900064, 0x13900065,
|
||||||
// Entry A0 - BF
|
// Entry A0 - BF
|
||||||
0x1390006d, 0x13900072, 0x13900073, 0x13900074,
|
0x1390006e, 0x13900073, 0x13900074, 0x13900075,
|
||||||
0x13900075, 0x1390007b, 0x1390007c, 0x1390007f,
|
0x13900076, 0x1390007c, 0x1390007d, 0x13900080,
|
||||||
0x13900080, 0x13900081, 0x13900083, 0x1390008a,
|
0x13900081, 0x13900082, 0x13900084, 0x1390008b,
|
||||||
0x1390008c, 0x1390008d, 0x13900096, 0x13900097,
|
0x1390008d, 0x1390008e, 0x13900097, 0x13900098,
|
||||||
0x13900098, 0x13900099, 0x1390009a, 0x1390009f,
|
0x13900099, 0x1390009a, 0x1390009b, 0x139000a0,
|
||||||
0x139000a0, 0x139000a4, 0x139000a7, 0x139000a9,
|
0x139000a1, 0x139000a5, 0x139000a8, 0x139000aa,
|
||||||
0x139000ad, 0x139000b1, 0x139000b4, 0x139000b5,
|
0x139000ae, 0x139000b2, 0x139000b5, 0x139000b6,
|
||||||
0x139000bf, 0x139000c0, 0x139000c6, 0x139000c7,
|
0x139000c0, 0x139000c1, 0x139000c7, 0x139000c8,
|
||||||
// Entry C0 - DF
|
// Entry C0 - DF
|
||||||
0x139000ca, 0x139000cb, 0x139000cc, 0x139000ce,
|
0x139000cb, 0x139000cc, 0x139000cd, 0x139000cf,
|
||||||
0x139000d0, 0x139000d2, 0x139000d5, 0x139000d6,
|
0x139000d1, 0x139000d3, 0x139000d6, 0x139000d7,
|
||||||
0x139000d9, 0x139000dd, 0x139000df, 0x139000e0,
|
0x139000da, 0x139000de, 0x139000e0, 0x139000e1,
|
||||||
0x139000e6, 0x139000e7, 0x139000e8, 0x139000eb,
|
0x139000e7, 0x139000e8, 0x139000e9, 0x139000ec,
|
||||||
0x139000ec, 0x139000f0, 0x13900107, 0x13900109,
|
0x139000ed, 0x139000f1, 0x13900108, 0x1390010a,
|
||||||
0x1390010a, 0x1390010b, 0x1390010c, 0x1390010d,
|
0x1390010b, 0x1390010c, 0x1390010d, 0x1390010e,
|
||||||
0x1390010e, 0x1390010f, 0x13900112, 0x13900117,
|
0x1390010f, 0x13900110, 0x13900113, 0x13900118,
|
||||||
0x1390011b, 0x1390011d, 0x1390011f, 0x13900125,
|
0x1390011c, 0x1390011e, 0x13900120, 0x13900126,
|
||||||
// Entry E0 - FF
|
// Entry E0 - FF
|
||||||
0x13900129, 0x1390012c, 0x1390012d, 0x1390012f,
|
0x1390012a, 0x1390012d, 0x1390012e, 0x13900130,
|
||||||
0x13900131, 0x13900133, 0x13900135, 0x13900139,
|
0x13900132, 0x13900134, 0x13900136, 0x1390013a,
|
||||||
0x1390013c, 0x1390013d, 0x1390013f, 0x13900142,
|
0x1390013d, 0x1390013e, 0x13900140, 0x13900143,
|
||||||
0x13900161, 0x13900162, 0x13900164, 0x13c00000,
|
0x13900162, 0x13900163, 0x13900165, 0x13c00000,
|
||||||
0x13c00001, 0x13e00000, 0x13e0001f, 0x13e0002c,
|
0x13c00001, 0x13e00000, 0x13e0001f, 0x13e0002c,
|
||||||
0x13e0003f, 0x13e00041, 0x13e00048, 0x13e00051,
|
0x13e0003f, 0x13e00041, 0x13e00048, 0x13e00051,
|
||||||
0x13e00054, 0x13e00056, 0x13e00059, 0x13e00065,
|
0x13e00054, 0x13e00057, 0x13e0005a, 0x13e00066,
|
||||||
0x13e00068, 0x13e00069, 0x13e0006e, 0x13e00086,
|
0x13e00069, 0x13e0006a, 0x13e0006f, 0x13e00087,
|
||||||
// Entry 100 - 11F
|
// Entry 100 - 11F
|
||||||
0x13e00089, 0x13e0008f, 0x13e00094, 0x13e000cf,
|
0x13e0008a, 0x13e00090, 0x13e00095, 0x13e000d0,
|
||||||
0x13e000d8, 0x13e000e2, 0x13e000e4, 0x13e000e7,
|
0x13e000d9, 0x13e000e3, 0x13e000e5, 0x13e000e8,
|
||||||
0x13e000ec, 0x13e000f1, 0x13e0011a, 0x13e00135,
|
0x13e000ed, 0x13e000f2, 0x13e0011b, 0x13e00136,
|
||||||
0x13e00136, 0x13e0013b, 0x14000000, 0x1400006a,
|
0x13e00137, 0x13e0013c, 0x14000000, 0x1400006b,
|
||||||
0x14500000, 0x1450006e, 0x14600000, 0x14600052,
|
0x14500000, 0x1450006f, 0x14600000, 0x14600052,
|
||||||
0x14800000, 0x14800024, 0x1480009c, 0x14e00000,
|
0x14800000, 0x14800024, 0x1480009d, 0x14e00000,
|
||||||
0x14e00052, 0x14e00084, 0x14e000c9, 0x14e00114,
|
0x14e00052, 0x14e00085, 0x14e000ca, 0x14e00115,
|
||||||
0x15100000, 0x15100072, 0x15300000, 0x153000e7,
|
0x15100000, 0x15100073, 0x15300000, 0x153000e8,
|
||||||
// Entry 120 - 13F
|
// Entry 120 - 13F
|
||||||
0x15800000, 0x15800063, 0x15800076, 0x15e00000,
|
0x15800000, 0x15800064, 0x15800077, 0x15e00000,
|
||||||
0x15e00036, 0x15e00037, 0x15e0003a, 0x15e0003b,
|
0x15e00036, 0x15e00037, 0x15e0003a, 0x15e0003b,
|
||||||
0x15e0003c, 0x15e00049, 0x15e0004b, 0x15e0004c,
|
0x15e0003c, 0x15e00049, 0x15e0004b, 0x15e0004c,
|
||||||
0x15e0004d, 0x15e0004e, 0x15e0004f, 0x15e00052,
|
0x15e0004d, 0x15e0004e, 0x15e0004f, 0x15e00052,
|
||||||
0x15e00062, 0x15e00067, 0x15e00078, 0x15e0007a,
|
0x15e00063, 0x15e00068, 0x15e00079, 0x15e0007b,
|
||||||
0x15e0007e, 0x15e00084, 0x15e00085, 0x15e00086,
|
0x15e0007f, 0x15e00085, 0x15e00086, 0x15e00087,
|
||||||
0x15e00091, 0x15e000a8, 0x15e000b7, 0x15e000ba,
|
0x15e00092, 0x15e000a9, 0x15e000b8, 0x15e000bb,
|
||||||
0x15e000bb, 0x15e000be, 0x15e000bf, 0x15e000c3,
|
0x15e000bc, 0x15e000bf, 0x15e000c0, 0x15e000c4,
|
||||||
// Entry 140 - 15F
|
// Entry 140 - 15F
|
||||||
0x15e000c8, 0x15e000c9, 0x15e000cc, 0x15e000d3,
|
0x15e000c9, 0x15e000ca, 0x15e000cd, 0x15e000d4,
|
||||||
0x15e000d4, 0x15e000e5, 0x15e000ea, 0x15e00102,
|
0x15e000d5, 0x15e000e6, 0x15e000eb, 0x15e00103,
|
||||||
0x15e00107, 0x15e0010a, 0x15e00114, 0x15e0011c,
|
0x15e00108, 0x15e0010b, 0x15e00115, 0x15e0011d,
|
||||||
0x15e00120, 0x15e00122, 0x15e00128, 0x15e0013f,
|
0x15e00121, 0x15e00123, 0x15e00129, 0x15e00140,
|
||||||
0x15e00140, 0x15e0015f, 0x16900000, 0x1690009e,
|
0x15e00141, 0x15e00160, 0x16900000, 0x1690009f,
|
||||||
0x16d00000, 0x16d000d9, 0x16e00000, 0x16e00096,
|
0x16d00000, 0x16d000da, 0x16e00000, 0x16e00097,
|
||||||
0x17e00000, 0x17e0007b, 0x19000000, 0x1900006e,
|
0x17e00000, 0x17e0007c, 0x19000000, 0x1900006f,
|
||||||
0x1a300000, 0x1a30004e, 0x1a300078, 0x1a3000b2,
|
0x1a300000, 0x1a30004e, 0x1a300079, 0x1a3000b3,
|
||||||
// Entry 160 - 17F
|
// Entry 160 - 17F
|
||||||
0x1a400000, 0x1a400099, 0x1a900000, 0x1ab00000,
|
0x1a400000, 0x1a40009a, 0x1a900000, 0x1ab00000,
|
||||||
0x1ab000a4, 0x1ac00000, 0x1ac00098, 0x1b400000,
|
0x1ab000a5, 0x1ac00000, 0x1ac00099, 0x1b400000,
|
||||||
0x1b400080, 0x1b4000d4, 0x1b4000d6, 0x1b800000,
|
0x1b400081, 0x1b4000d5, 0x1b4000d7, 0x1b800000,
|
||||||
0x1b800135, 0x1bc00000, 0x1bc00097, 0x1be00000,
|
0x1b800136, 0x1bc00000, 0x1bc00098, 0x1be00000,
|
||||||
0x1be00099, 0x1d100000, 0x1d100033, 0x1d100090,
|
0x1be0009a, 0x1d100000, 0x1d100033, 0x1d100091,
|
||||||
0x1d200000, 0x1d200060, 0x1d500000, 0x1d500092,
|
0x1d200000, 0x1d200061, 0x1d500000, 0x1d500093,
|
||||||
0x1d700000, 0x1d700028, 0x1e100000, 0x1e100095,
|
0x1d700000, 0x1d700028, 0x1e100000, 0x1e100096,
|
||||||
0x1e700000, 0x1e7000d6, 0x1ea00000, 0x1ea00053,
|
0x1e700000, 0x1e7000d7, 0x1ea00000, 0x1ea00053,
|
||||||
// Entry 180 - 19F
|
// Entry 180 - 19F
|
||||||
0x1f300000, 0x1f500000, 0x1f800000, 0x1f80009d,
|
0x1f300000, 0x1f500000, 0x1f800000, 0x1f80009e,
|
||||||
0x1f900000, 0x1f90004e, 0x1f90009e, 0x1f900113,
|
0x1f900000, 0x1f90004e, 0x1f90009f, 0x1f900114,
|
||||||
0x1f900138, 0x1fa00000, 0x1fb00000, 0x20000000,
|
0x1f900139, 0x1fa00000, 0x1fb00000, 0x20000000,
|
||||||
0x200000a2, 0x20300000, 0x20700000, 0x20700052,
|
0x200000a3, 0x20300000, 0x20700000, 0x20700052,
|
||||||
0x20800000, 0x20a00000, 0x20a0012f, 0x20e00000,
|
0x20800000, 0x20a00000, 0x20a00130, 0x20e00000,
|
||||||
0x20f00000, 0x21000000, 0x2100007d, 0x21200000,
|
0x20f00000, 0x21000000, 0x2100007e, 0x21200000,
|
||||||
0x21200067, 0x21600000, 0x21700000, 0x217000a4,
|
0x21200068, 0x21600000, 0x21700000, 0x217000a5,
|
||||||
0x21f00000, 0x22300000, 0x2230012f, 0x22700000,
|
0x21f00000, 0x22300000, 0x22300130, 0x22700000,
|
||||||
// Entry 1A0 - 1BF
|
// Entry 1A0 - 1BF
|
||||||
0x2270005a, 0x23400000, 0x234000c3, 0x23900000,
|
0x2270005b, 0x23400000, 0x234000c4, 0x23900000,
|
||||||
0x239000a4, 0x24200000, 0x242000ae, 0x24400000,
|
0x239000a5, 0x24200000, 0x242000af, 0x24400000,
|
||||||
0x24400052, 0x24500000, 0x24500082, 0x24600000,
|
0x24400052, 0x24500000, 0x24500083, 0x24600000,
|
||||||
0x246000a4, 0x24a00000, 0x24a000a6, 0x25100000,
|
0x246000a5, 0x24a00000, 0x24a000a7, 0x25100000,
|
||||||
0x25100099, 0x25400000, 0x254000aa, 0x254000ab,
|
0x2510009a, 0x25400000, 0x254000ab, 0x254000ac,
|
||||||
0x25600000, 0x25600099, 0x26a00000, 0x26a00099,
|
0x25600000, 0x2560009a, 0x26a00000, 0x26a0009a,
|
||||||
0x26b00000, 0x26b0012f, 0x26d00000, 0x26d00052,
|
0x26b00000, 0x26b00130, 0x26d00000, 0x26d00052,
|
||||||
0x26e00000, 0x26e00060, 0x27400000, 0x28100000,
|
0x26e00000, 0x26e00061, 0x27400000, 0x28100000,
|
||||||
// Entry 1C0 - 1DF
|
// Entry 1C0 - 1DF
|
||||||
0x2810007b, 0x28a00000, 0x28a000a5, 0x29100000,
|
0x2810007c, 0x28a00000, 0x28a000a6, 0x29100000,
|
||||||
0x2910012f, 0x29500000, 0x295000b7, 0x2a300000,
|
0x29100130, 0x29500000, 0x295000b8, 0x2a300000,
|
||||||
0x2a300131, 0x2af00000, 0x2af00135, 0x2b500000,
|
0x2a300132, 0x2af00000, 0x2af00136, 0x2b500000,
|
||||||
0x2b50002a, 0x2b50004b, 0x2b50004c, 0x2b50004d,
|
0x2b50002a, 0x2b50004b, 0x2b50004c, 0x2b50004d,
|
||||||
0x2b800000, 0x2b8000af, 0x2bf00000, 0x2bf0009b,
|
0x2b800000, 0x2b8000b0, 0x2bf00000, 0x2bf0009c,
|
||||||
0x2bf0009c, 0x2c000000, 0x2c0000b6, 0x2c200000,
|
0x2bf0009d, 0x2c000000, 0x2c0000b7, 0x2c200000,
|
||||||
0x2c20004b, 0x2c400000, 0x2c4000a4, 0x2c500000,
|
0x2c20004b, 0x2c400000, 0x2c4000a5, 0x2c500000,
|
||||||
0x2c5000a4, 0x2c700000, 0x2c7000b8, 0x2d100000,
|
0x2c5000a5, 0x2c700000, 0x2c7000b9, 0x2d100000,
|
||||||
// Entry 1E0 - 1FF
|
// Entry 1E0 - 1FF
|
||||||
0x2d1000a4, 0x2d10012f, 0x2e900000, 0x2e9000a4,
|
0x2d1000a5, 0x2d100130, 0x2e900000, 0x2e9000a5,
|
||||||
0x2ed00000, 0x2ed000cc, 0x2f100000, 0x2f1000bf,
|
0x2ed00000, 0x2ed000cd, 0x2f100000, 0x2f1000c0,
|
||||||
0x2f200000, 0x2f2000d1, 0x2f400000, 0x2f400052,
|
0x2f200000, 0x2f2000d2, 0x2f400000, 0x2f400052,
|
||||||
0x2ff00000, 0x2ff000c2, 0x30400000, 0x30400099,
|
0x2ff00000, 0x2ff000c3, 0x30400000, 0x3040009a,
|
||||||
0x30b00000, 0x30b000c5, 0x31000000, 0x31b00000,
|
0x30b00000, 0x30b000c6, 0x31000000, 0x31b00000,
|
||||||
0x31b00099, 0x31f00000, 0x31f0003e, 0x31f000d0,
|
0x31b0009a, 0x31f00000, 0x31f0003e, 0x31f000d1,
|
||||||
0x31f0010d, 0x32000000, 0x320000cb, 0x32500000,
|
0x31f0010e, 0x32000000, 0x320000cc, 0x32500000,
|
||||||
0x32500052, 0x33100000, 0x331000c4, 0x33a00000,
|
0x32500052, 0x33100000, 0x331000c5, 0x33a00000,
|
||||||
// Entry 200 - 21F
|
// Entry 200 - 21F
|
||||||
0x33a0009c, 0x34100000, 0x34500000, 0x345000d2,
|
0x33a0009d, 0x34100000, 0x34500000, 0x345000d3,
|
||||||
0x34700000, 0x347000da, 0x34700110, 0x34e00000,
|
0x34700000, 0x347000db, 0x34700111, 0x34e00000,
|
||||||
0x34e00164, 0x35000000, 0x35000060, 0x350000d9,
|
0x34e00165, 0x35000000, 0x35000061, 0x350000da,
|
||||||
0x35100000, 0x35100099, 0x351000db, 0x36700000,
|
0x35100000, 0x3510009a, 0x351000dc, 0x36700000,
|
||||||
0x36700030, 0x36700036, 0x36700040, 0x3670005b,
|
0x36700030, 0x36700036, 0x36700040, 0x3670005c,
|
||||||
0x367000d9, 0x36700116, 0x3670011b, 0x36800000,
|
0x367000da, 0x36700117, 0x3670011c, 0x36800000,
|
||||||
0x36800052, 0x36a00000, 0x36a000da, 0x36c00000,
|
0x36800052, 0x36a00000, 0x36a000db, 0x36c00000,
|
||||||
0x36c00052, 0x36f00000, 0x37500000, 0x37600000,
|
0x36c00052, 0x36f00000, 0x37500000, 0x37600000,
|
||||||
// Entry 220 - 23F
|
// Entry 220 - 23F
|
||||||
0x37a00000, 0x38000000, 0x38000117, 0x38700000,
|
0x37a00000, 0x38000000, 0x38000118, 0x38700000,
|
||||||
0x38900000, 0x38900131, 0x39000000, 0x3900006f,
|
0x38900000, 0x38900132, 0x39000000, 0x39000070,
|
||||||
0x390000a4, 0x39500000, 0x39500099, 0x39800000,
|
0x390000a5, 0x39500000, 0x3950009a, 0x39800000,
|
||||||
0x3980007d, 0x39800106, 0x39d00000, 0x39d05000,
|
0x3980007e, 0x39800107, 0x39d00000, 0x39d05000,
|
||||||
0x39d050e8, 0x39d36000, 0x39d36099, 0x3a100000,
|
0x39d050e9, 0x39d36000, 0x39d3609a, 0x3a100000,
|
||||||
0x3b300000, 0x3b3000e9, 0x3bd00000, 0x3bd00001,
|
0x3b300000, 0x3b3000ea, 0x3bd00000, 0x3bd00001,
|
||||||
0x3be00000, 0x3be00024, 0x3c000000, 0x3c00002a,
|
0x3be00000, 0x3be00024, 0x3c000000, 0x3c00002a,
|
||||||
0x3c000041, 0x3c00004e, 0x3c00005a, 0x3c000086,
|
0x3c000041, 0x3c00004e, 0x3c00005b, 0x3c000087,
|
||||||
// Entry 240 - 25F
|
// Entry 240 - 25F
|
||||||
0x3c00008b, 0x3c0000b7, 0x3c0000c6, 0x3c0000d1,
|
0x3c00008c, 0x3c0000b8, 0x3c0000c7, 0x3c0000d2,
|
||||||
0x3c0000ee, 0x3c000118, 0x3c000126, 0x3c400000,
|
0x3c0000ef, 0x3c000119, 0x3c000127, 0x3c400000,
|
||||||
0x3c40003f, 0x3c400069, 0x3c4000e4, 0x3d400000,
|
0x3c40003f, 0x3c40006a, 0x3c4000e5, 0x3d400000,
|
||||||
0x3d40004e, 0x3d900000, 0x3d90003a, 0x3dc00000,
|
0x3d40004e, 0x3d900000, 0x3d90003a, 0x3dc00000,
|
||||||
0x3dc000bc, 0x3dc00104, 0x3de00000, 0x3de0012f,
|
0x3dc000bd, 0x3dc00105, 0x3de00000, 0x3de00130,
|
||||||
0x3e200000, 0x3e200047, 0x3e2000a5, 0x3e2000ae,
|
0x3e200000, 0x3e200047, 0x3e2000a6, 0x3e2000af,
|
||||||
0x3e2000bc, 0x3e200106, 0x3e200130, 0x3e500000,
|
0x3e2000bd, 0x3e200107, 0x3e200131, 0x3e500000,
|
||||||
0x3e500107, 0x3e600000, 0x3e60012f, 0x3eb00000,
|
0x3e500108, 0x3e600000, 0x3e600130, 0x3eb00000,
|
||||||
// Entry 260 - 27F
|
// Entry 260 - 27F
|
||||||
0x3eb00106, 0x3ec00000, 0x3ec000a4, 0x3f300000,
|
0x3eb00107, 0x3ec00000, 0x3ec000a5, 0x3f300000,
|
||||||
0x3f30012f, 0x3fa00000, 0x3fa000e8, 0x3fc00000,
|
0x3f300130, 0x3fa00000, 0x3fa000e9, 0x3fc00000,
|
||||||
0x3fd00000, 0x3fd00072, 0x3fd000da, 0x3fd0010c,
|
0x3fd00000, 0x3fd00073, 0x3fd000db, 0x3fd0010d,
|
||||||
0x3ff00000, 0x3ff000d1, 0x40100000, 0x401000c3,
|
0x3ff00000, 0x3ff000d2, 0x40100000, 0x401000c4,
|
||||||
0x40200000, 0x4020004c, 0x40700000, 0x40800000,
|
0x40200000, 0x4020004c, 0x40700000, 0x40800000,
|
||||||
0x4085a000, 0x4085a0ba, 0x408e3000, 0x408e30ba,
|
0x4085b000, 0x4085b0bb, 0x408eb000, 0x408eb0bb,
|
||||||
0x40c00000, 0x40c000b3, 0x41200000, 0x41200111,
|
0x40c00000, 0x40c000b4, 0x41200000, 0x41200112,
|
||||||
0x41600000, 0x4160010f, 0x41c00000, 0x41d00000,
|
0x41600000, 0x41600110, 0x41c00000, 0x41d00000,
|
||||||
// Entry 280 - 29F
|
// Entry 280 - 29F
|
||||||
0x41e00000, 0x41f00000, 0x41f00072, 0x42200000,
|
0x41e00000, 0x41f00000, 0x41f00073, 0x42200000,
|
||||||
0x42300000, 0x42300164, 0x42900000, 0x42900062,
|
0x42300000, 0x42300165, 0x42900000, 0x42900063,
|
||||||
0x4290006f, 0x429000a4, 0x42900115, 0x43100000,
|
0x42900070, 0x429000a5, 0x42900116, 0x43100000,
|
||||||
0x43100027, 0x431000c2, 0x4310014d, 0x43200000,
|
0x43100027, 0x431000c3, 0x4310014e, 0x43200000,
|
||||||
0x43220000, 0x43220033, 0x432200bd, 0x43220105,
|
0x43220000, 0x43220033, 0x432200be, 0x43220106,
|
||||||
0x4322014d, 0x4325a000, 0x4325a033, 0x4325a0bd,
|
0x4322014e, 0x4325b000, 0x4325b033, 0x4325b0be,
|
||||||
0x4325a105, 0x4325a14d, 0x43700000, 0x43a00000,
|
0x4325b106, 0x4325b14e, 0x43700000, 0x43a00000,
|
||||||
0x43b00000, 0x44400000, 0x44400031, 0x44400072,
|
0x43b00000, 0x44400000, 0x44400031, 0x44400073,
|
||||||
// Entry 2A0 - 2BF
|
// Entry 2A0 - 2BF
|
||||||
0x4440010c, 0x44500000, 0x4450004b, 0x445000a4,
|
0x4440010d, 0x44500000, 0x4450004b, 0x445000a5,
|
||||||
0x4450012f, 0x44500131, 0x44e00000, 0x45000000,
|
0x44500130, 0x44500132, 0x44e00000, 0x45000000,
|
||||||
0x45000099, 0x450000b3, 0x450000d0, 0x4500010d,
|
0x4500009a, 0x450000b4, 0x450000d1, 0x4500010e,
|
||||||
0x46100000, 0x46100099, 0x46400000, 0x464000a4,
|
0x46100000, 0x4610009a, 0x46400000, 0x464000a5,
|
||||||
0x46400131, 0x46700000, 0x46700124, 0x46b00000,
|
0x46400132, 0x46700000, 0x46700125, 0x46b00000,
|
||||||
0x46b00123, 0x46f00000, 0x46f0006d, 0x46f0006f,
|
0x46b00124, 0x46f00000, 0x46f0006e, 0x46f00070,
|
||||||
0x47100000, 0x47600000, 0x47600127, 0x47a00000,
|
0x47100000, 0x47600000, 0x47600128, 0x47a00000,
|
||||||
0x48000000, 0x48200000, 0x48200129, 0x48a00000,
|
0x48000000, 0x48200000, 0x4820012a, 0x48a00000,
|
||||||
// Entry 2C0 - 2DF
|
// Entry 2C0 - 2DF
|
||||||
0x48a0005d, 0x48a0012b, 0x48e00000, 0x49400000,
|
0x48a0005e, 0x48a0012c, 0x48e00000, 0x49400000,
|
||||||
0x49400106, 0x4a400000, 0x4a4000d4, 0x4a900000,
|
0x49400107, 0x4a400000, 0x4a4000d5, 0x4a900000,
|
||||||
0x4a9000ba, 0x4ac00000, 0x4ac00053, 0x4ae00000,
|
0x4a9000bb, 0x4ac00000, 0x4ac00053, 0x4ae00000,
|
||||||
0x4ae00130, 0x4b400000, 0x4b400099, 0x4b4000e8,
|
0x4ae00131, 0x4b400000, 0x4b40009a, 0x4b4000e9,
|
||||||
0x4bc00000, 0x4bc05000, 0x4bc05024, 0x4bc20000,
|
0x4bc00000, 0x4bc05000, 0x4bc05024, 0x4bc20000,
|
||||||
0x4bc20137, 0x4bc5a000, 0x4bc5a137, 0x4be00000,
|
0x4bc20138, 0x4bc5b000, 0x4bc5b138, 0x4be00000,
|
||||||
0x4be5a000, 0x4be5a0b4, 0x4beeb000, 0x4beeb0b4,
|
0x4be5b000, 0x4be5b0b5, 0x4bef4000, 0x4bef40b5,
|
||||||
0x4c000000, 0x4c300000, 0x4c30013e, 0x4c900000,
|
0x4c000000, 0x4c300000, 0x4c30013f, 0x4c900000,
|
||||||
// Entry 2E0 - 2FF
|
// Entry 2E0 - 2FF
|
||||||
0x4c900001, 0x4cc00000, 0x4cc0012f, 0x4ce00000,
|
0x4c900001, 0x4cc00000, 0x4cc00130, 0x4ce00000,
|
||||||
0x4cf00000, 0x4cf0004e, 0x4e500000, 0x4e500114,
|
0x4cf00000, 0x4cf0004e, 0x4e500000, 0x4e500115,
|
||||||
0x4f200000, 0x4fb00000, 0x4fb00131, 0x50900000,
|
0x4f200000, 0x4fb00000, 0x4fb00132, 0x50900000,
|
||||||
0x50900052, 0x51200000, 0x51200001, 0x51800000,
|
0x50900052, 0x51200000, 0x51200001, 0x51800000,
|
||||||
0x5180003b, 0x518000d6, 0x51f00000, 0x51f3b000,
|
0x5180003b, 0x518000d7, 0x51f00000, 0x51f3b000,
|
||||||
0x51f3b053, 0x51f3c000, 0x51f3c08d, 0x52800000,
|
0x51f3b053, 0x51f3c000, 0x51f3c08e, 0x52800000,
|
||||||
0x528000ba, 0x52900000, 0x5293b000, 0x5293b053,
|
0x528000bb, 0x52900000, 0x5293b000, 0x5293b053,
|
||||||
0x5293b08d, 0x5293b0c6, 0x5293b10d, 0x5293c000,
|
0x5293b08e, 0x5293b0c7, 0x5293b10e, 0x5293c000,
|
||||||
// Entry 300 - 31F
|
// Entry 300 - 31F
|
||||||
0x5293c08d, 0x5293c0c6, 0x5293c12e, 0x52f00000,
|
0x5293c08e, 0x5293c0c7, 0x5293c12f, 0x52f00000,
|
||||||
0x52f00161,
|
0x52f00162,
|
||||||
} // Size: 3116 bytes
|
} // Size: 3116 bytes
|
||||||
|
|
||||||
const specialTagsStr string = "ca-ES-valencia en-US-u-va-posix"
|
const specialTagsStr string = "ca-ES-valencia en-US-u-va-posix"
|
||||||
|
|
||||||
// Total table size 3147 bytes (3KiB); checksum: BE816D44
|
// Total table size 3147 bytes (3KiB); checksum: 5A8FFFA5
|
||||||
|
135
vendor/golang.org/x/text/internal/language/language.go
generated
vendored
135
vendor/golang.org/x/text/internal/language/language.go
generated
vendored
@ -251,6 +251,13 @@ func (t Tag) Parent() Tag {
|
|||||||
|
|
||||||
// ParseExtension parses s as an extension and returns it on success.
|
// ParseExtension parses s as an extension and returns it on success.
|
||||||
func ParseExtension(s string) (ext string, err error) {
|
func ParseExtension(s string) (ext string, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
ext = ""
|
||||||
|
err = ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
scan := makeScannerString(s)
|
scan := makeScannerString(s)
|
||||||
var end int
|
var end int
|
||||||
if n := len(scan.token); n != 1 {
|
if n := len(scan.token); n != 1 {
|
||||||
@ -303,9 +310,17 @@ func (t Tag) Extensions() []string {
|
|||||||
// are of the allowed values defined for the Unicode locale extension ('u') in
|
// are of the allowed values defined for the Unicode locale extension ('u') in
|
||||||
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
||||||
// TypeForKey will traverse the inheritance chain to get the correct value.
|
// TypeForKey will traverse the inheritance chain to get the correct value.
|
||||||
|
//
|
||||||
|
// If there are multiple types associated with a key, only the first will be
|
||||||
|
// returned. If there is no type associated with a key, it returns the empty
|
||||||
|
// string.
|
||||||
func (t Tag) TypeForKey(key string) string {
|
func (t Tag) TypeForKey(key string) string {
|
||||||
if start, end, _ := t.findTypeForKey(key); end != start {
|
if _, start, end, _ := t.findTypeForKey(key); end != start {
|
||||||
return t.str[start:end]
|
s := t.str[start:end]
|
||||||
|
if p := strings.IndexByte(s, '-'); p >= 0 {
|
||||||
|
s = s[:p]
|
||||||
|
}
|
||||||
|
return s
|
||||||
}
|
}
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
@ -329,13 +344,13 @@ func (t Tag) SetTypeForKey(key, value string) (Tag, error) {
|
|||||||
|
|
||||||
// Remove the setting if value is "".
|
// Remove the setting if value is "".
|
||||||
if value == "" {
|
if value == "" {
|
||||||
start, end, _ := t.findTypeForKey(key)
|
start, sep, end, _ := t.findTypeForKey(key)
|
||||||
if start != end {
|
if start != sep {
|
||||||
// Remove key tag and leading '-'.
|
|
||||||
start -= 4
|
|
||||||
|
|
||||||
// Remove a possible empty extension.
|
// Remove a possible empty extension.
|
||||||
if (end == len(t.str) || t.str[end+2] == '-') && t.str[start-2] == '-' {
|
switch {
|
||||||
|
case t.str[start-2] != '-': // has previous elements.
|
||||||
|
case end == len(t.str), // end of string
|
||||||
|
end+2 < len(t.str) && t.str[end+2] == '-': // end of extension
|
||||||
start -= 2
|
start -= 2
|
||||||
}
|
}
|
||||||
if start == int(t.pVariant) && end == len(t.str) {
|
if start == int(t.pVariant) && end == len(t.str) {
|
||||||
@ -381,28 +396,28 @@ func (t Tag) SetTypeForKey(key, value string) (Tag, error) {
|
|||||||
t.str = string(buf[:uStart+len(b)])
|
t.str = string(buf[:uStart+len(b)])
|
||||||
} else {
|
} else {
|
||||||
s := t.str
|
s := t.str
|
||||||
start, end, hasExt := t.findTypeForKey(key)
|
start, sep, end, hasExt := t.findTypeForKey(key)
|
||||||
if start == end {
|
if start == sep {
|
||||||
if hasExt {
|
if hasExt {
|
||||||
b = b[2:]
|
b = b[2:]
|
||||||
}
|
}
|
||||||
t.str = fmt.Sprintf("%s-%s%s", s[:start], b, s[end:])
|
t.str = fmt.Sprintf("%s-%s%s", s[:sep], b, s[end:])
|
||||||
} else {
|
} else {
|
||||||
t.str = fmt.Sprintf("%s%s%s", s[:start], value, s[end:])
|
t.str = fmt.Sprintf("%s-%s%s", s[:start+3], value, s[end:])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return t, nil
|
return t, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// findKeyAndType returns the start and end position for the type corresponding
|
// findTypeForKey returns the start and end position for the type corresponding
|
||||||
// to key or the point at which to insert the key-value pair if the type
|
// to key or the point at which to insert the key-value pair if the type
|
||||||
// wasn't found. The hasExt return value reports whether an -u extension was present.
|
// wasn't found. The hasExt return value reports whether an -u extension was present.
|
||||||
// Note: the extensions are typically very small and are likely to contain
|
// Note: the extensions are typically very small and are likely to contain
|
||||||
// only one key-type pair.
|
// only one key-type pair.
|
||||||
func (t Tag) findTypeForKey(key string) (start, end int, hasExt bool) {
|
func (t Tag) findTypeForKey(key string) (start, sep, end int, hasExt bool) {
|
||||||
p := int(t.pExt)
|
p := int(t.pExt)
|
||||||
if len(key) != 2 || p == len(t.str) || p == 0 {
|
if len(key) != 2 || p == len(t.str) || p == 0 {
|
||||||
return p, p, false
|
return p, p, p, false
|
||||||
}
|
}
|
||||||
s := t.str
|
s := t.str
|
||||||
|
|
||||||
@ -410,10 +425,10 @@ func (t Tag) findTypeForKey(key string) (start, end int, hasExt bool) {
|
|||||||
for p++; s[p] != 'u'; p++ {
|
for p++; s[p] != 'u'; p++ {
|
||||||
if s[p] > 'u' {
|
if s[p] > 'u' {
|
||||||
p--
|
p--
|
||||||
return p, p, false
|
return p, p, p, false
|
||||||
}
|
}
|
||||||
if p = nextExtension(s, p); p == len(s) {
|
if p = nextExtension(s, p); p == len(s) {
|
||||||
return len(s), len(s), false
|
return len(s), len(s), len(s), false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Proceed to the hyphen following the extension name.
|
// Proceed to the hyphen following the extension name.
|
||||||
@ -424,40 +439,28 @@ func (t Tag) findTypeForKey(key string) (start, end int, hasExt bool) {
|
|||||||
|
|
||||||
// Iterate over keys until we get the end of a section.
|
// Iterate over keys until we get the end of a section.
|
||||||
for {
|
for {
|
||||||
// p points to the hyphen preceding the current token.
|
end = p
|
||||||
if p3 := p + 3; s[p3] == '-' {
|
for p++; p < len(s) && s[p] != '-'; p++ {
|
||||||
// Found a key.
|
}
|
||||||
// Check whether we just processed the key that was requested.
|
n := p - end - 1
|
||||||
if curKey == key {
|
if n <= 2 && curKey == key {
|
||||||
return start, p, true
|
if sep < end {
|
||||||
|
sep++
|
||||||
}
|
}
|
||||||
// Set to the next key and continue scanning type tokens.
|
return start, sep, end, true
|
||||||
curKey = s[p+1 : p3]
|
}
|
||||||
|
switch n {
|
||||||
|
case 0, // invalid string
|
||||||
|
1: // next extension
|
||||||
|
return end, end, end, true
|
||||||
|
case 2:
|
||||||
|
// next key
|
||||||
|
curKey = s[end+1 : p]
|
||||||
if curKey > key {
|
if curKey > key {
|
||||||
return p, p, true
|
return end, end, end, true
|
||||||
}
|
}
|
||||||
// Start of the type token sequence.
|
start = end
|
||||||
start = p + 4
|
sep = p
|
||||||
// A type is at least 3 characters long.
|
|
||||||
p += 7 // 4 + 3
|
|
||||||
} else {
|
|
||||||
// Attribute or type, which is at least 3 characters long.
|
|
||||||
p += 4
|
|
||||||
}
|
|
||||||
// p points past the third character of a type or attribute.
|
|
||||||
max := p + 5 // maximum length of token plus hyphen.
|
|
||||||
if len(s) < max {
|
|
||||||
max = len(s)
|
|
||||||
}
|
|
||||||
for ; p < max && s[p] != '-'; p++ {
|
|
||||||
}
|
|
||||||
// Bail if we have exhausted all tokens or if the next token starts
|
|
||||||
// a new extension.
|
|
||||||
if p == len(s) || s[p+2] == '-' {
|
|
||||||
if curKey == key {
|
|
||||||
return start, p, true
|
|
||||||
}
|
|
||||||
return p, p, true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -465,7 +468,14 @@ func (t Tag) findTypeForKey(key string) (start, end int, hasExt bool) {
|
|||||||
// ParseBase parses a 2- or 3-letter ISO 639 code.
|
// ParseBase parses a 2- or 3-letter ISO 639 code.
|
||||||
// It returns a ValueError if s is a well-formed but unknown language identifier
|
// It returns a ValueError if s is a well-formed but unknown language identifier
|
||||||
// or another error if another error occurred.
|
// or another error if another error occurred.
|
||||||
func ParseBase(s string) (Language, error) {
|
func ParseBase(s string) (l Language, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
l = 0
|
||||||
|
err = ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
if n := len(s); n < 2 || 3 < n {
|
if n := len(s); n < 2 || 3 < n {
|
||||||
return 0, ErrSyntax
|
return 0, ErrSyntax
|
||||||
}
|
}
|
||||||
@ -476,7 +486,14 @@ func ParseBase(s string) (Language, error) {
|
|||||||
// ParseScript parses a 4-letter ISO 15924 code.
|
// ParseScript parses a 4-letter ISO 15924 code.
|
||||||
// It returns a ValueError if s is a well-formed but unknown script identifier
|
// It returns a ValueError if s is a well-formed but unknown script identifier
|
||||||
// or another error if another error occurred.
|
// or another error if another error occurred.
|
||||||
func ParseScript(s string) (Script, error) {
|
func ParseScript(s string) (scr Script, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
scr = 0
|
||||||
|
err = ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
if len(s) != 4 {
|
if len(s) != 4 {
|
||||||
return 0, ErrSyntax
|
return 0, ErrSyntax
|
||||||
}
|
}
|
||||||
@ -493,7 +510,14 @@ func EncodeM49(r int) (Region, error) {
|
|||||||
// ParseRegion parses a 2- or 3-letter ISO 3166-1 or a UN M.49 code.
|
// ParseRegion parses a 2- or 3-letter ISO 3166-1 or a UN M.49 code.
|
||||||
// It returns a ValueError if s is a well-formed but unknown region identifier
|
// It returns a ValueError if s is a well-formed but unknown region identifier
|
||||||
// or another error if another error occurred.
|
// or another error if another error occurred.
|
||||||
func ParseRegion(s string) (Region, error) {
|
func ParseRegion(s string) (r Region, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
r = 0
|
||||||
|
err = ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
if n := len(s); n < 2 || 3 < n {
|
if n := len(s); n < 2 || 3 < n {
|
||||||
return 0, ErrSyntax
|
return 0, ErrSyntax
|
||||||
}
|
}
|
||||||
@ -582,7 +606,14 @@ type Variant struct {
|
|||||||
|
|
||||||
// ParseVariant parses and returns a Variant. An error is returned if s is not
|
// ParseVariant parses and returns a Variant. An error is returned if s is not
|
||||||
// a valid variant.
|
// a valid variant.
|
||||||
func ParseVariant(s string) (Variant, error) {
|
func ParseVariant(s string) (v Variant, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
v = Variant{}
|
||||||
|
err = ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
s = strings.ToLower(s)
|
s = strings.ToLower(s)
|
||||||
if id, ok := variantIndex[s]; ok {
|
if id, ok := variantIndex[s]; ok {
|
||||||
return Variant{id, s}, nil
|
return Variant{id, s}, nil
|
||||||
|
4
vendor/golang.org/x/text/internal/language/lookup.go
generated
vendored
4
vendor/golang.org/x/text/internal/language/lookup.go
generated
vendored
@ -50,7 +50,7 @@ func (id Language) Canonicalize() (Language, AliasType) {
|
|||||||
return normLang(id)
|
return normLang(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
// mapLang returns the mapped langID of id according to mapping m.
|
// normLang returns the mapped langID of id according to mapping m.
|
||||||
func normLang(id Language) (Language, AliasType) {
|
func normLang(id Language) (Language, AliasType) {
|
||||||
k := sort.Search(len(AliasMap), func(i int) bool {
|
k := sort.Search(len(AliasMap), func(i int) bool {
|
||||||
return AliasMap[i].From >= uint16(id)
|
return AliasMap[i].From >= uint16(id)
|
||||||
@ -328,7 +328,7 @@ func (r Region) IsPrivateUse() bool {
|
|||||||
return r.typ()&iso3166UserAssigned != 0
|
return r.typ()&iso3166UserAssigned != 0
|
||||||
}
|
}
|
||||||
|
|
||||||
type Script uint8
|
type Script uint16
|
||||||
|
|
||||||
// getScriptID returns the script id for string s. It assumes that s
|
// getScriptID returns the script id for string s. It assumes that s
|
||||||
// is of the format [A-Z][a-z]{3}.
|
// is of the format [A-Z][a-z]{3}.
|
||||||
|
59
vendor/golang.org/x/text/internal/language/parse.go
generated
vendored
59
vendor/golang.org/x/text/internal/language/parse.go
generated
vendored
@ -138,7 +138,7 @@ func (s *scanner) resizeRange(oldStart, oldEnd, newSize int) {
|
|||||||
b = make([]byte, n)
|
b = make([]byte, n)
|
||||||
copy(b, s.b[:oldStart])
|
copy(b, s.b[:oldStart])
|
||||||
} else {
|
} else {
|
||||||
b = s.b[:n:n]
|
b = s.b[:n]
|
||||||
}
|
}
|
||||||
copy(b[end:], s.b[oldEnd:])
|
copy(b[end:], s.b[oldEnd:])
|
||||||
s.b = b
|
s.b = b
|
||||||
@ -232,6 +232,13 @@ func Parse(s string) (t Tag, err error) {
|
|||||||
if s == "" {
|
if s == "" {
|
||||||
return Und, ErrSyntax
|
return Und, ErrSyntax
|
||||||
}
|
}
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
t = Und
|
||||||
|
err = ErrSyntax
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}()
|
||||||
if len(s) <= maxAltTaglen {
|
if len(s) <= maxAltTaglen {
|
||||||
b := [maxAltTaglen]byte{}
|
b := [maxAltTaglen]byte{}
|
||||||
for i, c := range s {
|
for i, c := range s {
|
||||||
@ -263,7 +270,7 @@ func parse(scan *scanner, s string) (t Tag, err error) {
|
|||||||
} else if n >= 4 {
|
} else if n >= 4 {
|
||||||
return Und, ErrSyntax
|
return Und, ErrSyntax
|
||||||
} else { // the usual case
|
} else { // the usual case
|
||||||
t, end = parseTag(scan)
|
t, end = parseTag(scan, true)
|
||||||
if n := len(scan.token); n == 1 {
|
if n := len(scan.token); n == 1 {
|
||||||
t.pExt = uint16(end)
|
t.pExt = uint16(end)
|
||||||
end = parseExtensions(scan)
|
end = parseExtensions(scan)
|
||||||
@ -289,7 +296,8 @@ func parse(scan *scanner, s string) (t Tag, err error) {
|
|||||||
|
|
||||||
// parseTag parses language, script, region and variants.
|
// parseTag parses language, script, region and variants.
|
||||||
// It returns a Tag and the end position in the input that was parsed.
|
// It returns a Tag and the end position in the input that was parsed.
|
||||||
func parseTag(scan *scanner) (t Tag, end int) {
|
// If doNorm is true, then <lang>-<extlang> will be normalized to <extlang>.
|
||||||
|
func parseTag(scan *scanner, doNorm bool) (t Tag, end int) {
|
||||||
var e error
|
var e error
|
||||||
// TODO: set an error if an unknown lang, script or region is encountered.
|
// TODO: set an error if an unknown lang, script or region is encountered.
|
||||||
t.LangID, e = getLangID(scan.token)
|
t.LangID, e = getLangID(scan.token)
|
||||||
@ -300,14 +308,17 @@ func parseTag(scan *scanner) (t Tag, end int) {
|
|||||||
for len(scan.token) == 3 && isAlpha(scan.token[0]) {
|
for len(scan.token) == 3 && isAlpha(scan.token[0]) {
|
||||||
// From http://tools.ietf.org/html/bcp47, <lang>-<extlang> tags are equivalent
|
// From http://tools.ietf.org/html/bcp47, <lang>-<extlang> tags are equivalent
|
||||||
// to a tag of the form <extlang>.
|
// to a tag of the form <extlang>.
|
||||||
lang, e := getLangID(scan.token)
|
if doNorm {
|
||||||
if lang != 0 {
|
lang, e := getLangID(scan.token)
|
||||||
t.LangID = lang
|
if lang != 0 {
|
||||||
copy(scan.b[langStart:], lang.String())
|
t.LangID = lang
|
||||||
scan.b[langStart+3] = '-'
|
langStr := lang.String()
|
||||||
scan.start = langStart + 4
|
copy(scan.b[langStart:], langStr)
|
||||||
|
scan.b[langStart+len(langStr)] = '-'
|
||||||
|
scan.start = langStart + len(langStr) + 1
|
||||||
|
}
|
||||||
|
scan.gobble(e)
|
||||||
}
|
}
|
||||||
scan.gobble(e)
|
|
||||||
end = scan.scan()
|
end = scan.scan()
|
||||||
}
|
}
|
||||||
if len(scan.token) == 4 && isAlpha(scan.token[0]) {
|
if len(scan.token) == 4 && isAlpha(scan.token[0]) {
|
||||||
@ -483,7 +494,7 @@ func parseExtensions(scan *scanner) int {
|
|||||||
func parseExtension(scan *scanner) int {
|
func parseExtension(scan *scanner) int {
|
||||||
start, end := scan.start, scan.end
|
start, end := scan.start, scan.end
|
||||||
switch scan.token[0] {
|
switch scan.token[0] {
|
||||||
case 'u':
|
case 'u': // https://www.ietf.org/rfc/rfc6067.txt
|
||||||
attrStart := end
|
attrStart := end
|
||||||
scan.scan()
|
scan.scan()
|
||||||
for last := []byte{}; len(scan.token) > 2; scan.scan() {
|
for last := []byte{}; len(scan.token) > 2; scan.scan() {
|
||||||
@ -503,27 +514,29 @@ func parseExtension(scan *scanner) int {
|
|||||||
last = scan.token
|
last = scan.token
|
||||||
end = scan.end
|
end = scan.end
|
||||||
}
|
}
|
||||||
|
// Scan key-type sequences. A key is of length 2 and may be followed
|
||||||
|
// by 0 or more "type" subtags from 3 to the maximum of 8 letters.
|
||||||
var last, key []byte
|
var last, key []byte
|
||||||
for attrEnd := end; len(scan.token) == 2; last = key {
|
for attrEnd := end; len(scan.token) == 2; last = key {
|
||||||
key = scan.token
|
key = scan.token
|
||||||
keyEnd := scan.end
|
end = scan.end
|
||||||
end = scan.acceptMinSize(3)
|
for scan.scan(); end < scan.end && len(scan.token) > 2; scan.scan() {
|
||||||
|
end = scan.end
|
||||||
|
}
|
||||||
// TODO: check key value validity
|
// TODO: check key value validity
|
||||||
if keyEnd == end || bytes.Compare(key, last) != 1 {
|
if bytes.Compare(key, last) != 1 || scan.err != nil {
|
||||||
// We have an invalid key or the keys are not sorted.
|
// We have an invalid key or the keys are not sorted.
|
||||||
// Start scanning keys from scratch and reorder.
|
// Start scanning keys from scratch and reorder.
|
||||||
p := attrEnd + 1
|
p := attrEnd + 1
|
||||||
scan.next = p
|
scan.next = p
|
||||||
keys := [][]byte{}
|
keys := [][]byte{}
|
||||||
for scan.scan(); len(scan.token) == 2; {
|
for scan.scan(); len(scan.token) == 2; {
|
||||||
keyStart, keyEnd := scan.start, scan.end
|
keyStart := scan.start
|
||||||
end = scan.acceptMinSize(3)
|
end = scan.end
|
||||||
if keyEnd != end {
|
for scan.scan(); end < scan.end && len(scan.token) > 2; scan.scan() {
|
||||||
keys = append(keys, scan.b[keyStart:end])
|
end = scan.end
|
||||||
} else {
|
|
||||||
scan.setError(ErrSyntax)
|
|
||||||
end = keyStart
|
|
||||||
}
|
}
|
||||||
|
keys = append(keys, scan.b[keyStart:end])
|
||||||
}
|
}
|
||||||
sort.Stable(bytesSort{keys, 2})
|
sort.Stable(bytesSort{keys, 2})
|
||||||
if n := len(keys); n > 0 {
|
if n := len(keys); n > 0 {
|
||||||
@ -547,10 +560,10 @@ func parseExtension(scan *scanner) int {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 't':
|
case 't': // https://www.ietf.org/rfc/rfc6497.txt
|
||||||
scan.scan()
|
scan.scan()
|
||||||
if n := len(scan.token); n >= 2 && n <= 3 && isAlpha(scan.token[1]) {
|
if n := len(scan.token); n >= 2 && n <= 3 && isAlpha(scan.token[1]) {
|
||||||
_, end = parseTag(scan)
|
_, end = parseTag(scan, false)
|
||||||
scan.toLower(start, end)
|
scan.toLower(start, end)
|
||||||
}
|
}
|
||||||
for len(scan.token) == 2 && !isAlpha(scan.token[1]) {
|
for len(scan.token) == 2 && !isAlpha(scan.token[1]) {
|
||||||
|
4778
vendor/golang.org/x/text/internal/language/tables.go
generated
vendored
4778
vendor/golang.org/x/text/internal/language/tables.go
generated
vendored
File diff suppressed because it is too large
Load Diff
2
vendor/golang.org/x/text/internal/utf8internal/utf8internal.go
generated
vendored
2
vendor/golang.org/x/text/internal/utf8internal/utf8internal.go
generated
vendored
@ -74,7 +74,7 @@ type AcceptRange struct {
|
|||||||
|
|
||||||
// AcceptRanges is a slice of AcceptRange values. For a given byte sequence b
|
// AcceptRanges is a slice of AcceptRange values. For a given byte sequence b
|
||||||
//
|
//
|
||||||
// AcceptRanges[First[b[0]]>>AcceptShift]
|
// AcceptRanges[First[b[0]]>>AcceptShift]
|
||||||
//
|
//
|
||||||
// will give the value of AcceptRange for the multi-byte UTF-8 sequence starting
|
// will give the value of AcceptRange for the multi-byte UTF-8 sequence starting
|
||||||
// at b[0].
|
// at b[0].
|
||||||
|
44
vendor/golang.org/x/text/language/doc.go
generated
vendored
44
vendor/golang.org/x/text/language/doc.go
generated
vendored
@ -10,18 +10,17 @@
|
|||||||
// and provides the user with the best experience
|
// and provides the user with the best experience
|
||||||
// (see https://blog.golang.org/matchlang).
|
// (see https://blog.golang.org/matchlang).
|
||||||
//
|
//
|
||||||
//
|
// # Matching preferred against supported languages
|
||||||
// Matching preferred against supported languages
|
|
||||||
//
|
//
|
||||||
// A Matcher for an application that supports English, Australian English,
|
// A Matcher for an application that supports English, Australian English,
|
||||||
// Danish, and standard Mandarin can be created as follows:
|
// Danish, and standard Mandarin can be created as follows:
|
||||||
//
|
//
|
||||||
// var matcher = language.NewMatcher([]language.Tag{
|
// var matcher = language.NewMatcher([]language.Tag{
|
||||||
// language.English, // The first language is used as fallback.
|
// language.English, // The first language is used as fallback.
|
||||||
// language.MustParse("en-AU"),
|
// language.MustParse("en-AU"),
|
||||||
// language.Danish,
|
// language.Danish,
|
||||||
// language.Chinese,
|
// language.Chinese,
|
||||||
// })
|
// })
|
||||||
//
|
//
|
||||||
// This list of supported languages is typically implied by the languages for
|
// This list of supported languages is typically implied by the languages for
|
||||||
// which there exists translations of the user interface.
|
// which there exists translations of the user interface.
|
||||||
@ -30,14 +29,14 @@
|
|||||||
// language tags.
|
// language tags.
|
||||||
// The MatchString finds best matches for such strings:
|
// The MatchString finds best matches for such strings:
|
||||||
//
|
//
|
||||||
// handler(w http.ResponseWriter, r *http.Request) {
|
// handler(w http.ResponseWriter, r *http.Request) {
|
||||||
// lang, _ := r.Cookie("lang")
|
// lang, _ := r.Cookie("lang")
|
||||||
// accept := r.Header.Get("Accept-Language")
|
// accept := r.Header.Get("Accept-Language")
|
||||||
// tag, _ := language.MatchStrings(matcher, lang.String(), accept)
|
// tag, _ := language.MatchStrings(matcher, lang.String(), accept)
|
||||||
//
|
//
|
||||||
// // tag should now be used for the initialization of any
|
// // tag should now be used for the initialization of any
|
||||||
// // locale-specific service.
|
// // locale-specific service.
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// The Matcher's Match method can be used to match Tags directly.
|
// The Matcher's Match method can be used to match Tags directly.
|
||||||
//
|
//
|
||||||
@ -48,8 +47,7 @@
|
|||||||
// For instance, it will know that a reader of Bokmål Danish can read Norwegian
|
// For instance, it will know that a reader of Bokmål Danish can read Norwegian
|
||||||
// and will know that Cantonese ("yue") is a good match for "zh-HK".
|
// and will know that Cantonese ("yue") is a good match for "zh-HK".
|
||||||
//
|
//
|
||||||
//
|
// # Using match results
|
||||||
// Using match results
|
|
||||||
//
|
//
|
||||||
// To guarantee a consistent user experience to the user it is important to
|
// To guarantee a consistent user experience to the user it is important to
|
||||||
// use the same language tag for the selection of any locale-specific services.
|
// use the same language tag for the selection of any locale-specific services.
|
||||||
@ -58,9 +56,9 @@
|
|||||||
// More subtly confusing is using the wrong sorting order or casing
|
// More subtly confusing is using the wrong sorting order or casing
|
||||||
// algorithm for a certain language.
|
// algorithm for a certain language.
|
||||||
//
|
//
|
||||||
// All the packages in x/text that provide locale-specific services
|
// All the packages in x/text that provide locale-specific services
|
||||||
// (e.g. collate, cases) should be initialized with the tag that was
|
// (e.g. collate, cases) should be initialized with the tag that was
|
||||||
// obtained at the start of an interaction with the user.
|
// obtained at the start of an interaction with the user.
|
||||||
//
|
//
|
||||||
// Note that Tag that is returned by Match and MatchString may differ from any
|
// Note that Tag that is returned by Match and MatchString may differ from any
|
||||||
// of the supported languages, as it may contain carried over settings from
|
// of the supported languages, as it may contain carried over settings from
|
||||||
@ -70,8 +68,7 @@
|
|||||||
// Match and MatchString both return the index of the matched supported tag
|
// Match and MatchString both return the index of the matched supported tag
|
||||||
// to simplify associating such data with the matched tag.
|
// to simplify associating such data with the matched tag.
|
||||||
//
|
//
|
||||||
//
|
// # Canonicalization
|
||||||
// Canonicalization
|
|
||||||
//
|
//
|
||||||
// If one uses the Matcher to compare languages one does not need to
|
// If one uses the Matcher to compare languages one does not need to
|
||||||
// worry about canonicalization.
|
// worry about canonicalization.
|
||||||
@ -92,10 +89,9 @@
|
|||||||
// equivalence relations. The CanonType type can be used to alter the
|
// equivalence relations. The CanonType type can be used to alter the
|
||||||
// canonicalization form.
|
// canonicalization form.
|
||||||
//
|
//
|
||||||
// References
|
// # References
|
||||||
//
|
//
|
||||||
// BCP 47 - Tags for Identifying Languages http://tools.ietf.org/html/bcp47
|
// BCP 47 - Tags for Identifying Languages http://tools.ietf.org/html/bcp47
|
||||||
//
|
|
||||||
package language // import "golang.org/x/text/language"
|
package language // import "golang.org/x/text/language"
|
||||||
|
|
||||||
// TODO: explanation on how to match languages for your own locale-specific
|
// TODO: explanation on how to match languages for your own locale-specific
|
||||||
|
38
vendor/golang.org/x/text/language/go1_1.go
generated
vendored
38
vendor/golang.org/x/text/language/go1_1.go
generated
vendored
@ -1,38 +0,0 @@
|
|||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build !go1.2
|
|
||||||
|
|
||||||
package language
|
|
||||||
|
|
||||||
import "sort"
|
|
||||||
|
|
||||||
func sortStable(s sort.Interface) {
|
|
||||||
ss := stableSort{
|
|
||||||
s: s,
|
|
||||||
pos: make([]int, s.Len()),
|
|
||||||
}
|
|
||||||
for i := range ss.pos {
|
|
||||||
ss.pos[i] = i
|
|
||||||
}
|
|
||||||
sort.Sort(&ss)
|
|
||||||
}
|
|
||||||
|
|
||||||
type stableSort struct {
|
|
||||||
s sort.Interface
|
|
||||||
pos []int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stableSort) Len() int {
|
|
||||||
return len(s.pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stableSort) Less(i, j int) bool {
|
|
||||||
return s.s.Less(i, j) || !s.s.Less(j, i) && s.pos[i] < s.pos[j]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stableSort) Swap(i, j int) {
|
|
||||||
s.s.Swap(i, j)
|
|
||||||
s.pos[i], s.pos[j] = s.pos[j], s.pos[i]
|
|
||||||
}
|
|
11
vendor/golang.org/x/text/language/go1_2.go
generated
vendored
11
vendor/golang.org/x/text/language/go1_2.go
generated
vendored
@ -1,11 +0,0 @@
|
|||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build go1.2
|
|
||||||
|
|
||||||
package language
|
|
||||||
|
|
||||||
import "sort"
|
|
||||||
|
|
||||||
var sortStable = sort.Stable
|
|
6
vendor/golang.org/x/text/language/language.go
generated
vendored
6
vendor/golang.org/x/text/language/language.go
generated
vendored
@ -344,7 +344,7 @@ func (t Tag) Parent() Tag {
|
|||||||
return Tag(compact.Tag(t).Parent())
|
return Tag(compact.Tag(t).Parent())
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns token t and the rest of the string.
|
// nextToken returns token t and the rest of the string.
|
||||||
func nextToken(s string) (t, tail string) {
|
func nextToken(s string) (t, tail string) {
|
||||||
p := strings.Index(s[1:], "-")
|
p := strings.Index(s[1:], "-")
|
||||||
if p == -1 {
|
if p == -1 {
|
||||||
@ -412,6 +412,10 @@ func (t Tag) Extensions() []Extension {
|
|||||||
// are of the allowed values defined for the Unicode locale extension ('u') in
|
// are of the allowed values defined for the Unicode locale extension ('u') in
|
||||||
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
||||||
// TypeForKey will traverse the inheritance chain to get the correct value.
|
// TypeForKey will traverse the inheritance chain to get the correct value.
|
||||||
|
//
|
||||||
|
// If there are multiple types associated with a key, only the first will be
|
||||||
|
// returned. If there is no type associated with a key, it returns the empty
|
||||||
|
// string.
|
||||||
func (t Tag) TypeForKey(key string) string {
|
func (t Tag) TypeForKey(key string) string {
|
||||||
if !compact.Tag(t).MayHaveExtensions() {
|
if !compact.Tag(t).MayHaveExtensions() {
|
||||||
if key != "rg" && key != "va" {
|
if key != "rg" && key != "va" {
|
||||||
|
4
vendor/golang.org/x/text/language/match.go
generated
vendored
4
vendor/golang.org/x/text/language/match.go
generated
vendored
@ -434,7 +434,7 @@ func newMatcher(supported []Tag, options []MatchOption) *matcher {
|
|||||||
// (their canonicalization simply substitutes a different language code, but
|
// (their canonicalization simply substitutes a different language code, but
|
||||||
// nothing else), the match confidence is Exact, otherwise it is High.
|
// nothing else), the match confidence is Exact, otherwise it is High.
|
||||||
for i, lm := range language.AliasMap {
|
for i, lm := range language.AliasMap {
|
||||||
// If deprecated codes match and there is no fiddling with the script or
|
// If deprecated codes match and there is no fiddling with the script
|
||||||
// or region, we consider it an exact match.
|
// or region, we consider it an exact match.
|
||||||
conf := Exact
|
conf := Exact
|
||||||
if language.AliasTypes[i] != language.Macro {
|
if language.AliasTypes[i] != language.Macro {
|
||||||
@ -545,7 +545,7 @@ type bestMatch struct {
|
|||||||
// match as the preferred match.
|
// match as the preferred match.
|
||||||
//
|
//
|
||||||
// If pin is true and have and tag are a strong match, it will henceforth only
|
// If pin is true and have and tag are a strong match, it will henceforth only
|
||||||
// consider matches for this language. This corresponds to the nothing that most
|
// consider matches for this language. This corresponds to the idea that most
|
||||||
// users have a strong preference for the first defined language. A user can
|
// users have a strong preference for the first defined language. A user can
|
||||||
// still prefer a second language over a dialect of the preferred language by
|
// still prefer a second language over a dialect of the preferred language by
|
||||||
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
|
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
|
||||||
|
30
vendor/golang.org/x/text/language/parse.go
generated
vendored
30
vendor/golang.org/x/text/language/parse.go
generated
vendored
@ -6,6 +6,7 @@ package language
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -43,6 +44,13 @@ func Parse(s string) (t Tag, err error) {
|
|||||||
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
||||||
// The resulting tag is canonicalized using the canonicalization type c.
|
// The resulting tag is canonicalized using the canonicalization type c.
|
||||||
func (c CanonType) Parse(s string) (t Tag, err error) {
|
func (c CanonType) Parse(s string) (t Tag, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
t = Tag{}
|
||||||
|
err = language.ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
tt, err := language.Parse(s)
|
tt, err := language.Parse(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return makeTag(tt), err
|
return makeTag(tt), err
|
||||||
@ -79,6 +87,13 @@ func Compose(part ...interface{}) (t Tag, err error) {
|
|||||||
// tag is returned after canonicalizing using CanonType c. If one or more errors
|
// tag is returned after canonicalizing using CanonType c. If one or more errors
|
||||||
// are encountered, one of the errors is returned.
|
// are encountered, one of the errors is returned.
|
||||||
func (c CanonType) Compose(part ...interface{}) (t Tag, err error) {
|
func (c CanonType) Compose(part ...interface{}) (t Tag, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
t = Tag{}
|
||||||
|
err = language.ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
var b language.Builder
|
var b language.Builder
|
||||||
if err = update(&b, part...); err != nil {
|
if err = update(&b, part...); err != nil {
|
||||||
return und, err
|
return und, err
|
||||||
@ -133,6 +148,7 @@ func update(b *language.Builder, part ...interface{}) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var errInvalidWeight = errors.New("ParseAcceptLanguage: invalid weight")
|
var errInvalidWeight = errors.New("ParseAcceptLanguage: invalid weight")
|
||||||
|
var errTagListTooLarge = errors.New("tag list exceeds max length")
|
||||||
|
|
||||||
// ParseAcceptLanguage parses the contents of an Accept-Language header as
|
// ParseAcceptLanguage parses the contents of an Accept-Language header as
|
||||||
// defined in http://www.ietf.org/rfc/rfc2616.txt and returns a list of Tags and
|
// defined in http://www.ietf.org/rfc/rfc2616.txt and returns a list of Tags and
|
||||||
@ -142,6 +158,18 @@ var errInvalidWeight = errors.New("ParseAcceptLanguage: invalid weight")
|
|||||||
// Tags with a weight of zero will be dropped. An error will be returned if the
|
// Tags with a weight of zero will be dropped. An error will be returned if the
|
||||||
// input could not be parsed.
|
// input could not be parsed.
|
||||||
func ParseAcceptLanguage(s string) (tag []Tag, q []float32, err error) {
|
func ParseAcceptLanguage(s string) (tag []Tag, q []float32, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != nil {
|
||||||
|
tag = nil
|
||||||
|
q = nil
|
||||||
|
err = language.ErrSyntax
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
if strings.Count(s, "-") > 1000 {
|
||||||
|
return nil, nil, errTagListTooLarge
|
||||||
|
}
|
||||||
|
|
||||||
var entry string
|
var entry string
|
||||||
for s != "" {
|
for s != "" {
|
||||||
if entry, s = split(s, ','); entry == "" {
|
if entry, s = split(s, ','); entry == "" {
|
||||||
@ -179,7 +207,7 @@ func ParseAcceptLanguage(s string) (tag []Tag, q []float32, err error) {
|
|||||||
tag = append(tag, t)
|
tag = append(tag, t)
|
||||||
q = append(q, float32(w))
|
q = append(q, float32(w))
|
||||||
}
|
}
|
||||||
sortStable(&tagSort{tag, q})
|
sort.Stable(&tagSort{tag, q})
|
||||||
return tag, q, nil
|
return tag, q, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
146
vendor/golang.org/x/text/language/tables.go
generated
vendored
146
vendor/golang.org/x/text/language/tables.go
generated
vendored
@ -23,31 +23,31 @@ const (
|
|||||||
_419 = 31
|
_419 = 31
|
||||||
_BR = 65
|
_BR = 65
|
||||||
_CA = 73
|
_CA = 73
|
||||||
_ES = 110
|
_ES = 111
|
||||||
_GB = 123
|
_GB = 124
|
||||||
_MD = 188
|
_MD = 189
|
||||||
_PT = 238
|
_PT = 239
|
||||||
_UK = 306
|
_UK = 307
|
||||||
_US = 309
|
_US = 310
|
||||||
_ZZ = 357
|
_ZZ = 358
|
||||||
_XA = 323
|
_XA = 324
|
||||||
_XC = 325
|
_XC = 326
|
||||||
_XK = 333
|
_XK = 334
|
||||||
)
|
)
|
||||||
const (
|
const (
|
||||||
_Latn = 90
|
_Latn = 91
|
||||||
_Hani = 57
|
_Hani = 57
|
||||||
_Hans = 59
|
_Hans = 59
|
||||||
_Hant = 60
|
_Hant = 60
|
||||||
_Qaaa = 143
|
_Qaaa = 149
|
||||||
_Qaai = 151
|
_Qaai = 157
|
||||||
_Qabx = 192
|
_Qabx = 198
|
||||||
_Zinh = 245
|
_Zinh = 255
|
||||||
_Zyyy = 250
|
_Zyyy = 260
|
||||||
_Zzzz = 251
|
_Zzzz = 261
|
||||||
)
|
)
|
||||||
|
|
||||||
var regionToGroups = []uint8{ // 357 elements
|
var regionToGroups = []uint8{ // 359 elements
|
||||||
// Entry 0 - 3F
|
// Entry 0 - 3F
|
||||||
0x00, 0x00, 0x00, 0x04, 0x04, 0x00, 0x00, 0x04,
|
0x00, 0x00, 0x00, 0x04, 0x04, 0x00, 0x00, 0x04,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x04, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x04, 0x00,
|
||||||
@ -60,51 +60,51 @@ var regionToGroups = []uint8{ // 357 elements
|
|||||||
// Entry 40 - 7F
|
// Entry 40 - 7F
|
||||||
0x04, 0x04, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x04, 0x04, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x04, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x04, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x04, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00,
|
0x00, 0x04, 0x00, 0x00, 0x04, 0x00, 0x00, 0x04,
|
||||||
0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00,
|
|
||||||
0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x00, 0x08,
|
|
||||||
0x00, 0x04, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
|
|
||||||
0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
|
|
||||||
0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00,
|
|
||||||
// Entry 80 - BF
|
|
||||||
0x00, 0x00, 0x04, 0x00, 0x00, 0x04, 0x00, 0x00,
|
|
||||||
0x00, 0x04, 0x01, 0x00, 0x04, 0x02, 0x00, 0x04,
|
|
||||||
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
|
||||||
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
|
||||||
0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
|
|
||||||
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
|
||||||
0x00, 0x08, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
|
|
||||||
// Entry C0 - FF
|
|
||||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01,
|
|
||||||
0x04, 0x08, 0x04, 0x00, 0x00, 0x00, 0x00, 0x04,
|
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
|
||||||
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
|
||||||
0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x04, 0x00, 0x05, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x00,
|
||||||
|
0x08, 0x00, 0x04, 0x00, 0x00, 0x08, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x04,
|
||||||
|
// Entry 80 - BF
|
||||||
|
0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x04, 0x00,
|
||||||
|
0x00, 0x00, 0x04, 0x01, 0x00, 0x04, 0x02, 0x00,
|
||||||
|
0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00,
|
||||||
|
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x08, 0x08, 0x00, 0x00, 0x00, 0x04,
|
||||||
|
// Entry C0 - FF
|
||||||
|
0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
|
||||||
|
0x01, 0x04, 0x08, 0x04, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x04, 0x00, 0x05, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
// Entry 100 - 13F
|
// Entry 100 - 13F
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
|
||||||
0x00, 0x00, 0x04, 0x04, 0x00, 0x00, 0x00, 0x04,
|
0x00, 0x00, 0x00, 0x04, 0x04, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
|
0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x01, 0x00, 0x05, 0x04, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x05, 0x04,
|
||||||
0x00, 0x04, 0x00, 0x04, 0x04, 0x05, 0x00, 0x00,
|
0x00, 0x00, 0x04, 0x00, 0x04, 0x04, 0x05, 0x00,
|
||||||
// Entry 140 - 17F
|
// Entry 140 - 17F
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
} // Size: 381 bytes
|
} // Size: 383 bytes
|
||||||
|
|
||||||
var paradigmLocales = [][3]uint16{ // 3 elements
|
var paradigmLocales = [][3]uint16{ // 3 elements
|
||||||
0: [3]uint16{0x139, 0x0, 0x7b},
|
0: [3]uint16{0x139, 0x0, 0x7c},
|
||||||
1: [3]uint16{0x13e, 0x0, 0x1f},
|
1: [3]uint16{0x13e, 0x0, 0x1f},
|
||||||
2: [3]uint16{0x3c0, 0x41, 0xee},
|
2: [3]uint16{0x3c0, 0x41, 0xef},
|
||||||
} // Size: 42 bytes
|
} // Size: 42 bytes
|
||||||
|
|
||||||
type mutualIntelligibility struct {
|
type mutualIntelligibility struct {
|
||||||
@ -249,30 +249,30 @@ var matchLang = []mutualIntelligibility{ // 113 elements
|
|||||||
// matchScript holds pairs of scriptIDs where readers of one script
|
// matchScript holds pairs of scriptIDs where readers of one script
|
||||||
// can typically also read the other. Each is associated with a confidence.
|
// can typically also read the other. Each is associated with a confidence.
|
||||||
var matchScript = []scriptIntelligibility{ // 26 elements
|
var matchScript = []scriptIntelligibility{ // 26 elements
|
||||||
0: {wantLang: 0x432, haveLang: 0x432, wantScript: 0x5a, haveScript: 0x20, distance: 0x5},
|
0: {wantLang: 0x432, haveLang: 0x432, wantScript: 0x5b, haveScript: 0x20, distance: 0x5},
|
||||||
1: {wantLang: 0x432, haveLang: 0x432, wantScript: 0x20, haveScript: 0x5a, distance: 0x5},
|
1: {wantLang: 0x432, haveLang: 0x432, wantScript: 0x20, haveScript: 0x5b, distance: 0x5},
|
||||||
2: {wantLang: 0x58, haveLang: 0x3e2, wantScript: 0x5a, haveScript: 0x20, distance: 0xa},
|
2: {wantLang: 0x58, haveLang: 0x3e2, wantScript: 0x5b, haveScript: 0x20, distance: 0xa},
|
||||||
3: {wantLang: 0xa5, haveLang: 0x139, wantScript: 0xe, haveScript: 0x5a, distance: 0xa},
|
3: {wantLang: 0xa5, haveLang: 0x139, wantScript: 0xe, haveScript: 0x5b, distance: 0xa},
|
||||||
4: {wantLang: 0x1d7, haveLang: 0x3e2, wantScript: 0x8, haveScript: 0x20, distance: 0xa},
|
4: {wantLang: 0x1d7, haveLang: 0x3e2, wantScript: 0x8, haveScript: 0x20, distance: 0xa},
|
||||||
5: {wantLang: 0x210, haveLang: 0x139, wantScript: 0x2e, haveScript: 0x5a, distance: 0xa},
|
5: {wantLang: 0x210, haveLang: 0x139, wantScript: 0x2e, haveScript: 0x5b, distance: 0xa},
|
||||||
6: {wantLang: 0x24a, haveLang: 0x139, wantScript: 0x4e, haveScript: 0x5a, distance: 0xa},
|
6: {wantLang: 0x24a, haveLang: 0x139, wantScript: 0x4f, haveScript: 0x5b, distance: 0xa},
|
||||||
7: {wantLang: 0x251, haveLang: 0x139, wantScript: 0x52, haveScript: 0x5a, distance: 0xa},
|
7: {wantLang: 0x251, haveLang: 0x139, wantScript: 0x53, haveScript: 0x5b, distance: 0xa},
|
||||||
8: {wantLang: 0x2b8, haveLang: 0x139, wantScript: 0x57, haveScript: 0x5a, distance: 0xa},
|
8: {wantLang: 0x2b8, haveLang: 0x139, wantScript: 0x58, haveScript: 0x5b, distance: 0xa},
|
||||||
9: {wantLang: 0x304, haveLang: 0x139, wantScript: 0x6e, haveScript: 0x5a, distance: 0xa},
|
9: {wantLang: 0x304, haveLang: 0x139, wantScript: 0x6f, haveScript: 0x5b, distance: 0xa},
|
||||||
10: {wantLang: 0x331, haveLang: 0x139, wantScript: 0x75, haveScript: 0x5a, distance: 0xa},
|
10: {wantLang: 0x331, haveLang: 0x139, wantScript: 0x76, haveScript: 0x5b, distance: 0xa},
|
||||||
11: {wantLang: 0x351, haveLang: 0x139, wantScript: 0x22, haveScript: 0x5a, distance: 0xa},
|
11: {wantLang: 0x351, haveLang: 0x139, wantScript: 0x22, haveScript: 0x5b, distance: 0xa},
|
||||||
12: {wantLang: 0x395, haveLang: 0x139, wantScript: 0x81, haveScript: 0x5a, distance: 0xa},
|
12: {wantLang: 0x395, haveLang: 0x139, wantScript: 0x83, haveScript: 0x5b, distance: 0xa},
|
||||||
13: {wantLang: 0x39d, haveLang: 0x139, wantScript: 0x36, haveScript: 0x5a, distance: 0xa},
|
13: {wantLang: 0x39d, haveLang: 0x139, wantScript: 0x36, haveScript: 0x5b, distance: 0xa},
|
||||||
14: {wantLang: 0x3be, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5a, distance: 0xa},
|
14: {wantLang: 0x3be, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5b, distance: 0xa},
|
||||||
15: {wantLang: 0x3fa, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5a, distance: 0xa},
|
15: {wantLang: 0x3fa, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5b, distance: 0xa},
|
||||||
16: {wantLang: 0x40c, haveLang: 0x139, wantScript: 0xcf, haveScript: 0x5a, distance: 0xa},
|
16: {wantLang: 0x40c, haveLang: 0x139, wantScript: 0xd6, haveScript: 0x5b, distance: 0xa},
|
||||||
17: {wantLang: 0x450, haveLang: 0x139, wantScript: 0xde, haveScript: 0x5a, distance: 0xa},
|
17: {wantLang: 0x450, haveLang: 0x139, wantScript: 0xe6, haveScript: 0x5b, distance: 0xa},
|
||||||
18: {wantLang: 0x461, haveLang: 0x139, wantScript: 0xe1, haveScript: 0x5a, distance: 0xa},
|
18: {wantLang: 0x461, haveLang: 0x139, wantScript: 0xe9, haveScript: 0x5b, distance: 0xa},
|
||||||
19: {wantLang: 0x46f, haveLang: 0x139, wantScript: 0x2c, haveScript: 0x5a, distance: 0xa},
|
19: {wantLang: 0x46f, haveLang: 0x139, wantScript: 0x2c, haveScript: 0x5b, distance: 0xa},
|
||||||
20: {wantLang: 0x476, haveLang: 0x3e2, wantScript: 0x5a, haveScript: 0x20, distance: 0xa},
|
20: {wantLang: 0x476, haveLang: 0x3e2, wantScript: 0x5b, haveScript: 0x20, distance: 0xa},
|
||||||
21: {wantLang: 0x4b4, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5a, distance: 0xa},
|
21: {wantLang: 0x4b4, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5b, distance: 0xa},
|
||||||
22: {wantLang: 0x4bc, haveLang: 0x3e2, wantScript: 0x5a, haveScript: 0x20, distance: 0xa},
|
22: {wantLang: 0x4bc, haveLang: 0x3e2, wantScript: 0x5b, haveScript: 0x20, distance: 0xa},
|
||||||
23: {wantLang: 0x512, haveLang: 0x139, wantScript: 0x3e, haveScript: 0x5a, distance: 0xa},
|
23: {wantLang: 0x512, haveLang: 0x139, wantScript: 0x3e, haveScript: 0x5b, distance: 0xa},
|
||||||
24: {wantLang: 0x529, haveLang: 0x529, wantScript: 0x3b, haveScript: 0x3c, distance: 0xf},
|
24: {wantLang: 0x529, haveLang: 0x529, wantScript: 0x3b, haveScript: 0x3c, distance: 0xf},
|
||||||
25: {wantLang: 0x529, haveLang: 0x529, wantScript: 0x3c, haveScript: 0x3b, distance: 0x13},
|
25: {wantLang: 0x529, haveLang: 0x529, wantScript: 0x3c, haveScript: 0x3b, distance: 0x13},
|
||||||
} // Size: 232 bytes
|
} // Size: 232 bytes
|
||||||
@ -295,4 +295,4 @@ var matchRegion = []regionIntelligibility{ // 15 elements
|
|||||||
14: {lang: 0x529, script: 0x3c, group: 0x80, distance: 0x5},
|
14: {lang: 0x529, script: 0x3c, group: 0x80, distance: 0x5},
|
||||||
} // Size: 114 bytes
|
} // Size: 114 bytes
|
||||||
|
|
||||||
// Total table size 1471 bytes (1KiB); checksum: 4CB1CD46
|
// Total table size 1473 bytes (1KiB); checksum: 7BB90B5C
|
||||||
|
2
vendor/golang.org/x/text/runes/runes.go
generated
vendored
2
vendor/golang.org/x/text/runes/runes.go
generated
vendored
@ -33,7 +33,7 @@ func In(rt *unicode.RangeTable) Set {
|
|||||||
return setFunc(func(r rune) bool { return unicode.Is(rt, r) })
|
return setFunc(func(r rune) bool { return unicode.Is(rt, r) })
|
||||||
}
|
}
|
||||||
|
|
||||||
// In creates a Set with a Contains method that returns true for all runes not
|
// NotIn creates a Set with a Contains method that returns true for all runes not
|
||||||
// in the given RangeTable.
|
// in the given RangeTable.
|
||||||
func NotIn(rt *unicode.RangeTable) Set {
|
func NotIn(rt *unicode.RangeTable) Set {
|
||||||
return setFunc(func(r rune) bool { return !unicode.Is(rt, r) })
|
return setFunc(func(r rune) bool { return !unicode.Is(rt, r) })
|
||||||
|
12
vendor/gopkg.in/ini.v1/.editorconfig
generated
vendored
Normal file
12
vendor/gopkg.in/ini.v1/.editorconfig
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# http://editorconfig.org
|
||||||
|
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*_test.go]
|
||||||
|
trim_trailing_whitespace = false
|
1
vendor/gopkg.in/ini.v1/.gitignore
generated
vendored
1
vendor/gopkg.in/ini.v1/.gitignore
generated
vendored
@ -4,3 +4,4 @@ ini.sublime-workspace
|
|||||||
testdata/conf_reflect.ini
|
testdata/conf_reflect.ini
|
||||||
.idea
|
.idea
|
||||||
/.vscode
|
/.vscode
|
||||||
|
.DS_Store
|
||||||
|
27
vendor/gopkg.in/ini.v1/.golangci.yml
generated
vendored
Normal file
27
vendor/gopkg.in/ini.v1/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
linters-settings:
|
||||||
|
staticcheck:
|
||||||
|
checks: [
|
||||||
|
"all",
|
||||||
|
"-SA1019" # There are valid use cases of strings.Title
|
||||||
|
]
|
||||||
|
nakedret:
|
||||||
|
max-func-lines: 0 # Disallow any unnamed return statement
|
||||||
|
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- deadcode
|
||||||
|
- errcheck
|
||||||
|
- gosimple
|
||||||
|
- govet
|
||||||
|
- ineffassign
|
||||||
|
- staticcheck
|
||||||
|
- structcheck
|
||||||
|
- typecheck
|
||||||
|
- unused
|
||||||
|
- varcheck
|
||||||
|
- nakedret
|
||||||
|
- gofmt
|
||||||
|
- rowserrcheck
|
||||||
|
- unconvert
|
||||||
|
- goimports
|
||||||
|
- unparam
|
4
vendor/gopkg.in/ini.v1/README.md
generated
vendored
4
vendor/gopkg.in/ini.v1/README.md
generated
vendored
@ -1,6 +1,6 @@
|
|||||||
# INI
|
# INI
|
||||||
|
|
||||||
[](https://github.com/go-ini/ini/actions?query=workflow%3AGo)
|
[](https://github.com/go-ini/ini/actions?query=branch%3Amain)
|
||||||
[](https://codecov.io/gh/go-ini/ini)
|
[](https://codecov.io/gh/go-ini/ini)
|
||||||
[](https://pkg.go.dev/github.com/go-ini/ini?tab=doc)
|
[](https://pkg.go.dev/github.com/go-ini/ini?tab=doc)
|
||||||
[](https://sourcegraph.com/github.com/go-ini/ini)
|
[](https://sourcegraph.com/github.com/go-ini/ini)
|
||||||
@ -24,7 +24,7 @@ Package ini provides INI file read and write functionality in Go.
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
The minimum requirement of Go is **1.6**.
|
The minimum requirement of Go is **1.13**.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ go get gopkg.in/ini.v1
|
$ go get gopkg.in/ini.v1
|
||||||
|
9
vendor/gopkg.in/ini.v1/codecov.yml
generated
vendored
9
vendor/gopkg.in/ini.v1/codecov.yml
generated
vendored
@ -4,6 +4,13 @@ coverage:
|
|||||||
project:
|
project:
|
||||||
default:
|
default:
|
||||||
threshold: 1%
|
threshold: 1%
|
||||||
|
informational: true
|
||||||
|
patch:
|
||||||
|
defualt:
|
||||||
|
only_pulls: true
|
||||||
|
informational: true
|
||||||
|
|
||||||
comment:
|
comment:
|
||||||
layout: 'diff, files'
|
layout: 'diff'
|
||||||
|
|
||||||
|
github_checks: false
|
||||||
|
5
vendor/gopkg.in/ini.v1/deprecated.go
generated
vendored
5
vendor/gopkg.in/ini.v1/deprecated.go
generated
vendored
@ -14,12 +14,9 @@
|
|||||||
|
|
||||||
package ini
|
package ini
|
||||||
|
|
||||||
const (
|
var (
|
||||||
// Deprecated: Use "DefaultSection" instead.
|
// Deprecated: Use "DefaultSection" instead.
|
||||||
DEFAULT_SECTION = DefaultSection
|
DEFAULT_SECTION = DefaultSection
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// Deprecated: AllCapsUnderscore converts to format ALL_CAPS_UNDERSCORE.
|
// Deprecated: AllCapsUnderscore converts to format ALL_CAPS_UNDERSCORE.
|
||||||
AllCapsUnderscore = SnackCase
|
AllCapsUnderscore = SnackCase
|
||||||
)
|
)
|
||||||
|
15
vendor/gopkg.in/ini.v1/error.go
generated
vendored
15
vendor/gopkg.in/ini.v1/error.go
generated
vendored
@ -32,3 +32,18 @@ func IsErrDelimiterNotFound(err error) bool {
|
|||||||
func (err ErrDelimiterNotFound) Error() string {
|
func (err ErrDelimiterNotFound) Error() string {
|
||||||
return fmt.Sprintf("key-value delimiter not found: %s", err.Line)
|
return fmt.Sprintf("key-value delimiter not found: %s", err.Line)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrEmptyKeyName indicates the error type of no key name is found which there should be one.
|
||||||
|
type ErrEmptyKeyName struct {
|
||||||
|
Line string
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrEmptyKeyName returns true if the given error is an instance of ErrEmptyKeyName.
|
||||||
|
func IsErrEmptyKeyName(err error) bool {
|
||||||
|
_, ok := err.(ErrEmptyKeyName)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err ErrEmptyKeyName) Error() string {
|
||||||
|
return fmt.Sprintf("empty key name: %s", err.Line)
|
||||||
|
}
|
||||||
|
44
vendor/gopkg.in/ini.v1/file.go
generated
vendored
44
vendor/gopkg.in/ini.v1/file.go
generated
vendored
@ -142,6 +142,12 @@ func (f *File) GetSection(name string) (*Section, error) {
|
|||||||
return secs[0], err
|
return secs[0], err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HasSection returns true if the file contains a section with given name.
|
||||||
|
func (f *File) HasSection(name string) bool {
|
||||||
|
section, _ := f.GetSection(name)
|
||||||
|
return section != nil
|
||||||
|
}
|
||||||
|
|
||||||
// SectionsByName returns all sections with given name.
|
// SectionsByName returns all sections with given name.
|
||||||
func (f *File) SectionsByName(name string) ([]*Section, error) {
|
func (f *File) SectionsByName(name string) ([]*Section, error) {
|
||||||
if len(name) == 0 {
|
if len(name) == 0 {
|
||||||
@ -168,8 +174,9 @@ func (f *File) SectionsByName(name string) ([]*Section, error) {
|
|||||||
func (f *File) Section(name string) *Section {
|
func (f *File) Section(name string) *Section {
|
||||||
sec, err := f.GetSection(name)
|
sec, err := f.GetSection(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Note: It's OK here because the only possible error is empty section name,
|
if name == "" {
|
||||||
// but if it's empty, this piece of code won't be executed.
|
name = DefaultSection
|
||||||
|
}
|
||||||
sec, _ = f.NewSection(name)
|
sec, _ = f.NewSection(name)
|
||||||
return sec
|
return sec
|
||||||
}
|
}
|
||||||
@ -335,6 +342,7 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
|||||||
|
|
||||||
// Use buffer to make sure target is safe until finish encoding.
|
// Use buffer to make sure target is safe until finish encoding.
|
||||||
buf := bytes.NewBuffer(nil)
|
buf := bytes.NewBuffer(nil)
|
||||||
|
lastSectionIdx := len(f.sectionList) - 1
|
||||||
for i, sname := range f.sectionList {
|
for i, sname := range f.sectionList {
|
||||||
sec := f.SectionWithIndex(sname, f.sectionIndexes[i])
|
sec := f.SectionWithIndex(sname, f.sectionIndexes[i])
|
||||||
if len(sec.Comment) > 0 {
|
if len(sec.Comment) > 0 {
|
||||||
@ -364,12 +372,13 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
isLastSection := i == lastSectionIdx
|
||||||
if sec.isRawSection {
|
if sec.isRawSection {
|
||||||
if _, err := buf.WriteString(sec.rawBody); err != nil {
|
if _, err := buf.WriteString(sec.rawBody); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if PrettySection {
|
if PrettySection && !isLastSection {
|
||||||
// Put a line between sections
|
// Put a line between sections
|
||||||
if _, err := buf.WriteString(LineBreak); err != nil {
|
if _, err := buf.WriteString(LineBreak); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -435,16 +444,14 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
|||||||
kname = `"""` + kname + `"""`
|
kname = `"""` + kname + `"""`
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, val := range key.ValueWithShadows() {
|
writeKeyValue := func(val string) (bool, error) {
|
||||||
if _, err := buf.WriteString(kname); err != nil {
|
if _, err := buf.WriteString(kname); err != nil {
|
||||||
return nil, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if key.isBooleanType {
|
if key.isBooleanType {
|
||||||
if kname != sec.keyList[len(sec.keyList)-1] {
|
buf.WriteString(LineBreak)
|
||||||
buf.WriteString(LineBreak)
|
return true, nil
|
||||||
}
|
|
||||||
continue KeyList
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write out alignment spaces before "=" sign
|
// Write out alignment spaces before "=" sign
|
||||||
@ -461,10 +468,27 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
|||||||
val = `"` + val + `"`
|
val = `"` + val + `"`
|
||||||
}
|
}
|
||||||
if _, err := buf.WriteString(equalSign + val + LineBreak); err != nil {
|
if _, err := buf.WriteString(equalSign + val + LineBreak); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
shadows := key.ValueWithShadows()
|
||||||
|
if len(shadows) == 0 {
|
||||||
|
if _, err := writeKeyValue(""); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, val := range shadows {
|
||||||
|
exitLoop, err := writeKeyValue(val)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if exitLoop {
|
||||||
|
continue KeyList
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for _, val := range key.nestedValues {
|
for _, val := range key.nestedValues {
|
||||||
if _, err := buf.WriteString(indent + " " + val + LineBreak); err != nil {
|
if _, err := buf.WriteString(indent + " " + val + LineBreak); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -472,7 +496,7 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if PrettySection {
|
if PrettySection && !isLastSection {
|
||||||
// Put a line between sections
|
// Put a line between sections
|
||||||
if _, err := buf.WriteString(LineBreak); err != nil {
|
if _, err := buf.WriteString(LineBreak); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
12
vendor/gopkg.in/ini.v1/ini.go
generated
vendored
12
vendor/gopkg.in/ini.v1/ini.go
generated
vendored
@ -1,5 +1,3 @@
|
|||||||
// +build go1.6
|
|
||||||
|
|
||||||
// Copyright 2014 Unknwon
|
// Copyright 2014 Unknwon
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||||
@ -25,15 +23,15 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// DefaultSection is the name of default section. You can use this constant or the string literal.
|
|
||||||
// In most of cases, an empty string is all you need to access the section.
|
|
||||||
DefaultSection = "DEFAULT"
|
|
||||||
|
|
||||||
// Maximum allowed depth when recursively substituing variable names.
|
// Maximum allowed depth when recursively substituing variable names.
|
||||||
depthValues = 99
|
depthValues = 99
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
// DefaultSection is the name of default section. You can use this var or the string literal.
|
||||||
|
// In most of cases, an empty string is all you need to access the section.
|
||||||
|
DefaultSection = "DEFAULT"
|
||||||
|
|
||||||
// LineBreak is the delimiter to determine or compose a new line.
|
// LineBreak is the delimiter to determine or compose a new line.
|
||||||
// This variable will be changed to "\r\n" automatically on Windows at package init time.
|
// This variable will be changed to "\r\n" automatically on Windows at package init time.
|
||||||
LineBreak = "\n"
|
LineBreak = "\n"
|
||||||
@ -125,6 +123,8 @@ type LoadOptions struct {
|
|||||||
ReaderBufferSize int
|
ReaderBufferSize int
|
||||||
// AllowNonUniqueSections indicates whether to allow sections with the same name multiple times.
|
// AllowNonUniqueSections indicates whether to allow sections with the same name multiple times.
|
||||||
AllowNonUniqueSections bool
|
AllowNonUniqueSections bool
|
||||||
|
// AllowDuplicateShadowValues indicates whether values for shadowed keys should be deduplicated.
|
||||||
|
AllowDuplicateShadowValues bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// DebugFunc is the type of function called to log parse events.
|
// DebugFunc is the type of function called to log parse events.
|
||||||
|
36
vendor/gopkg.in/ini.v1/key.go
generated
vendored
36
vendor/gopkg.in/ini.v1/key.go
generated
vendored
@ -54,14 +54,16 @@ func (k *Key) addShadow(val string) error {
|
|||||||
return errors.New("cannot add shadow to auto-increment or boolean key")
|
return errors.New("cannot add shadow to auto-increment or boolean key")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Deduplicate shadows based on their values.
|
if !k.s.f.options.AllowDuplicateShadowValues {
|
||||||
if k.value == val {
|
// Deduplicate shadows based on their values.
|
||||||
return nil
|
if k.value == val {
|
||||||
}
|
|
||||||
for i := range k.shadows {
|
|
||||||
if k.shadows[i].value == val {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
for i := range k.shadows {
|
||||||
|
if k.shadows[i].value == val {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
shadow := newKey(k.s, k.name, val)
|
shadow := newKey(k.s, k.name, val)
|
||||||
@ -108,15 +110,24 @@ func (k *Key) Value() string {
|
|||||||
return k.value
|
return k.value
|
||||||
}
|
}
|
||||||
|
|
||||||
// ValueWithShadows returns raw values of key and its shadows if any.
|
// ValueWithShadows returns raw values of key and its shadows if any. Shadow
|
||||||
|
// keys with empty values are ignored from the returned list.
|
||||||
func (k *Key) ValueWithShadows() []string {
|
func (k *Key) ValueWithShadows() []string {
|
||||||
if len(k.shadows) == 0 {
|
if len(k.shadows) == 0 {
|
||||||
|
if k.value == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
return []string{k.value}
|
return []string{k.value}
|
||||||
}
|
}
|
||||||
vals := make([]string, len(k.shadows)+1)
|
|
||||||
vals[0] = k.value
|
vals := make([]string, 0, len(k.shadows)+1)
|
||||||
for i := range k.shadows {
|
if k.value != "" {
|
||||||
vals[i+1] = k.shadows[i].value
|
vals = append(vals, k.value)
|
||||||
|
}
|
||||||
|
for _, s := range k.shadows {
|
||||||
|
if s.value != "" {
|
||||||
|
vals = append(vals, s.value)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return vals
|
return vals
|
||||||
}
|
}
|
||||||
@ -781,10 +792,8 @@ func (k *Key) parseUint64s(strs []string, addInvalid, returnOnInvalid bool) ([]u
|
|||||||
return vals, err
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
type Parser func(str string) (interface{}, error)
|
type Parser func(str string) (interface{}, error)
|
||||||
|
|
||||||
|
|
||||||
// parseTimesFormat transforms strings to times in given format.
|
// parseTimesFormat transforms strings to times in given format.
|
||||||
func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnOnInvalid bool) ([]time.Time, error) {
|
func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnOnInvalid bool) ([]time.Time, error) {
|
||||||
vals := make([]time.Time, 0, len(strs))
|
vals := make([]time.Time, 0, len(strs))
|
||||||
@ -801,7 +810,6 @@ func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnO
|
|||||||
return vals, err
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// doParse transforms strings to different types
|
// doParse transforms strings to different types
|
||||||
func (k *Key) doParse(strs []string, addInvalid, returnOnInvalid bool, parser Parser) ([]interface{}, error) {
|
func (k *Key) doParse(strs []string, addInvalid, returnOnInvalid bool, parser Parser) ([]interface{}, error) {
|
||||||
vals := make([]interface{}, 0, len(strs))
|
vals := make([]interface{}, 0, len(strs))
|
||||||
|
47
vendor/gopkg.in/ini.v1/parser.go
generated
vendored
47
vendor/gopkg.in/ini.v1/parser.go
generated
vendored
@ -131,7 +131,7 @@ func readKeyName(delimiters string, in []byte) (string, int, error) {
|
|||||||
// Check if key name surrounded by quotes.
|
// Check if key name surrounded by quotes.
|
||||||
var keyQuote string
|
var keyQuote string
|
||||||
if line[0] == '"' {
|
if line[0] == '"' {
|
||||||
if len(line) > 6 && string(line[0:3]) == `"""` {
|
if len(line) > 6 && line[0:3] == `"""` {
|
||||||
keyQuote = `"""`
|
keyQuote = `"""`
|
||||||
} else {
|
} else {
|
||||||
keyQuote = `"`
|
keyQuote = `"`
|
||||||
@ -164,6 +164,10 @@ func readKeyName(delimiters string, in []byte) (string, int, error) {
|
|||||||
if endIdx < 0 {
|
if endIdx < 0 {
|
||||||
return "", -1, ErrDelimiterNotFound{line}
|
return "", -1, ErrDelimiterNotFound{line}
|
||||||
}
|
}
|
||||||
|
if endIdx == 0 {
|
||||||
|
return "", -1, ErrEmptyKeyName{line}
|
||||||
|
}
|
||||||
|
|
||||||
return strings.TrimSpace(line[0:endIdx]), endIdx + 1, nil
|
return strings.TrimSpace(line[0:endIdx]), endIdx + 1, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -232,7 +236,7 @@ func (p *parser) readValue(in []byte, bufferSize int) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var valQuote string
|
var valQuote string
|
||||||
if len(line) > 3 && string(line[0:3]) == `"""` {
|
if len(line) > 3 && line[0:3] == `"""` {
|
||||||
valQuote = `"""`
|
valQuote = `"""`
|
||||||
} else if line[0] == '`' {
|
} else if line[0] == '`' {
|
||||||
valQuote = "`"
|
valQuote = "`"
|
||||||
@ -289,12 +293,8 @@ func (p *parser) readValue(in []byte, bufferSize int) (string, error) {
|
|||||||
hasSurroundedQuote(line, '"')) && !p.options.PreserveSurroundedQuote {
|
hasSurroundedQuote(line, '"')) && !p.options.PreserveSurroundedQuote {
|
||||||
line = line[1 : len(line)-1]
|
line = line[1 : len(line)-1]
|
||||||
} else if len(valQuote) == 0 && p.options.UnescapeValueCommentSymbols {
|
} else if len(valQuote) == 0 && p.options.UnescapeValueCommentSymbols {
|
||||||
if strings.Contains(line, `\;`) {
|
line = strings.ReplaceAll(line, `\;`, ";")
|
||||||
line = strings.Replace(line, `\;`, ";", -1)
|
line = strings.ReplaceAll(line, `\#`, "#")
|
||||||
}
|
|
||||||
if strings.Contains(line, `\#`) {
|
|
||||||
line = strings.Replace(line, `\#`, "#", -1)
|
|
||||||
}
|
|
||||||
} else if p.options.AllowPythonMultilineValues && lastChar == '\n' {
|
} else if p.options.AllowPythonMultilineValues && lastChar == '\n' {
|
||||||
return p.readPythonMultilines(line, bufferSize)
|
return p.readPythonMultilines(line, bufferSize)
|
||||||
}
|
}
|
||||||
@ -306,15 +306,9 @@ func (p *parser) readPythonMultilines(line string, bufferSize int) (string, erro
|
|||||||
parserBufferPeekResult, _ := p.buf.Peek(bufferSize)
|
parserBufferPeekResult, _ := p.buf.Peek(bufferSize)
|
||||||
peekBuffer := bytes.NewBuffer(parserBufferPeekResult)
|
peekBuffer := bytes.NewBuffer(parserBufferPeekResult)
|
||||||
|
|
||||||
indentSize := 0
|
|
||||||
for {
|
for {
|
||||||
peekData, peekErr := peekBuffer.ReadBytes('\n')
|
peekData, peekErr := peekBuffer.ReadBytes('\n')
|
||||||
if peekErr != nil {
|
if peekErr != nil && peekErr != io.EOF {
|
||||||
if peekErr == io.EOF {
|
|
||||||
p.debug("readPythonMultilines: io.EOF, peekData: %q, line: %q", string(peekData), line)
|
|
||||||
return line, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
p.debug("readPythonMultilines: failed to peek with error: %v", peekErr)
|
p.debug("readPythonMultilines: failed to peek with error: %v", peekErr)
|
||||||
return "", peekErr
|
return "", peekErr
|
||||||
}
|
}
|
||||||
@ -333,19 +327,6 @@ func (p *parser) readPythonMultilines(line string, bufferSize int) (string, erro
|
|||||||
return line, nil
|
return line, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine indent size and line prefix.
|
|
||||||
currentIndentSize := len(peekMatches[1])
|
|
||||||
if indentSize < 1 {
|
|
||||||
indentSize = currentIndentSize
|
|
||||||
p.debug("readPythonMultilines: indent size is %d", indentSize)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure each line is indented at least as far as first line.
|
|
||||||
if currentIndentSize < indentSize {
|
|
||||||
p.debug("readPythonMultilines: end of value, current indent: %d, expected indent: %d, line: %q", currentIndentSize, indentSize, line)
|
|
||||||
return line, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Advance the parser reader (buffer) in-sync with the peek buffer.
|
// Advance the parser reader (buffer) in-sync with the peek buffer.
|
||||||
_, err := p.buf.Discard(len(peekData))
|
_, err := p.buf.Discard(len(peekData))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -353,8 +334,7 @@ func (p *parser) readPythonMultilines(line string, bufferSize int) (string, erro
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle indented empty line.
|
line += "\n" + peekMatches[0]
|
||||||
line += "\n" + peekMatches[1][indentSize:] + peekMatches[2]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -465,6 +445,8 @@ func (f *File) parse(reader io.Reader) (err error) {
|
|||||||
// Reset auto-counter and comments
|
// Reset auto-counter and comments
|
||||||
p.comment.Reset()
|
p.comment.Reset()
|
||||||
p.count = 1
|
p.count = 1
|
||||||
|
// Nested values can't span sections
|
||||||
|
isLastValueEmpty = false
|
||||||
|
|
||||||
inUnparseableSection = false
|
inUnparseableSection = false
|
||||||
for i := range f.options.UnparseableSections {
|
for i := range f.options.UnparseableSections {
|
||||||
@ -485,8 +467,9 @@ func (f *File) parse(reader io.Reader) (err error) {
|
|||||||
|
|
||||||
kname, offset, err := readKeyName(f.options.KeyValueDelimiters, line)
|
kname, offset, err := readKeyName(f.options.KeyValueDelimiters, line)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
switch {
|
||||||
// Treat as boolean key when desired, and whole line is key name.
|
// Treat as boolean key when desired, and whole line is key name.
|
||||||
if IsErrDelimiterNotFound(err) {
|
case IsErrDelimiterNotFound(err):
|
||||||
switch {
|
switch {
|
||||||
case f.options.AllowBooleanKeys:
|
case f.options.AllowBooleanKeys:
|
||||||
kname, err := p.readValue(line, parserBufferSize)
|
kname, err := p.readValue(line, parserBufferSize)
|
||||||
@ -504,6 +487,8 @@ func (f *File) parse(reader io.Reader) (err error) {
|
|||||||
case f.options.SkipUnrecognizableLines:
|
case f.options.SkipUnrecognizableLines:
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
case IsErrEmptyKeyName(err) && f.options.SkipUnrecognizableLines:
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
2
vendor/gopkg.in/ini.v1/section.go
generated
vendored
2
vendor/gopkg.in/ini.v1/section.go
generated
vendored
@ -217,7 +217,7 @@ func (s *Section) KeysHash() map[string]string {
|
|||||||
defer s.f.lock.RUnlock()
|
defer s.f.lock.RUnlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
hash := map[string]string{}
|
hash := make(map[string]string, len(s.keysHash))
|
||||||
for key, value := range s.keysHash {
|
for key, value := range s.keysHash {
|
||||||
hash[key] = value
|
hash[key] = value
|
||||||
}
|
}
|
||||||
|
27
vendor/modules.txt
vendored
27
vendor/modules.txt
vendored
@ -1,18 +1,28 @@
|
|||||||
# github.com/antchfx/xmlquery v1.3.3
|
# github.com/antchfx/xmlquery v1.4.1
|
||||||
|
## explicit; go 1.14
|
||||||
github.com/antchfx/xmlquery
|
github.com/antchfx/xmlquery
|
||||||
# github.com/antchfx/xpath v1.1.11
|
# github.com/antchfx/xpath v1.3.1
|
||||||
|
## explicit; go 1.14
|
||||||
github.com/antchfx/xpath
|
github.com/antchfx/xpath
|
||||||
# github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e
|
# github.com/davecgh/go-spew v1.1.1
|
||||||
|
## explicit
|
||||||
|
# github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
|
||||||
|
## explicit
|
||||||
github.com/golang/groupcache/lru
|
github.com/golang/groupcache/lru
|
||||||
# github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab
|
# github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c
|
||||||
|
## explicit
|
||||||
github.com/influxdata/influxdb1-client/models
|
github.com/influxdata/influxdb1-client/models
|
||||||
github.com/influxdata/influxdb1-client/pkg/escape
|
github.com/influxdata/influxdb1-client/pkg/escape
|
||||||
github.com/influxdata/influxdb1-client/v2
|
github.com/influxdata/influxdb1-client/v2
|
||||||
# golang.org/x/net v0.0.0-20210119194325-5f4716e94777
|
# github.com/stretchr/testify v1.7.0
|
||||||
|
## explicit; go 1.13
|
||||||
|
# golang.org/x/net v0.28.0
|
||||||
|
## explicit; go 1.18
|
||||||
golang.org/x/net/html
|
golang.org/x/net/html
|
||||||
golang.org/x/net/html/atom
|
golang.org/x/net/html/atom
|
||||||
golang.org/x/net/html/charset
|
golang.org/x/net/html/charset
|
||||||
# golang.org/x/text v0.3.5
|
# golang.org/x/text v0.17.0
|
||||||
|
## explicit; go 1.18
|
||||||
golang.org/x/text/encoding
|
golang.org/x/text/encoding
|
||||||
golang.org/x/text/encoding/charmap
|
golang.org/x/text/encoding/charmap
|
||||||
golang.org/x/text/encoding/htmlindex
|
golang.org/x/text/encoding/htmlindex
|
||||||
@ -30,5 +40,8 @@ golang.org/x/text/internal/utf8internal
|
|||||||
golang.org/x/text/language
|
golang.org/x/text/language
|
||||||
golang.org/x/text/runes
|
golang.org/x/text/runes
|
||||||
golang.org/x/text/transform
|
golang.org/x/text/transform
|
||||||
# gopkg.in/ini.v1 v1.62.0
|
# gopkg.in/ini.v1 v1.67.0
|
||||||
|
## explicit
|
||||||
gopkg.in/ini.v1
|
gopkg.in/ini.v1
|
||||||
|
# gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
||||||
|
## explicit
|
||||||
|
Loading…
Reference in New Issue
Block a user