go mod tidy & go mod vendor
This commit is contained in:
parent
cd21a3859d
commit
9051591899
10
go.mod
10
go.mod
|
@ -39,7 +39,8 @@ require (
|
||||||
github.com/golang/mock v1.3.1
|
github.com/golang/mock v1.3.1
|
||||||
github.com/golang/protobuf v1.3.2
|
github.com/golang/protobuf v1.3.2
|
||||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db // indirect
|
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db // indirect
|
||||||
github.com/google/go-cmp v0.3.0
|
github.com/google/btree v1.0.0 // indirect
|
||||||
|
github.com/google/go-cmp v0.3.1
|
||||||
github.com/google/uuid v1.1.1
|
github.com/google/uuid v1.1.1
|
||||||
github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968
|
github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968
|
||||||
github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01 // indirect
|
github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01 // indirect
|
||||||
|
@ -53,7 +54,7 @@ require (
|
||||||
github.com/hashicorp/errwrap v1.0.0
|
github.com/hashicorp/errwrap v1.0.0
|
||||||
github.com/hashicorp/go-azure-helpers v0.0.0-20190129193224-166dfd221bb2
|
github.com/hashicorp/go-azure-helpers v0.0.0-20190129193224-166dfd221bb2
|
||||||
github.com/hashicorp/go-checkpoint v0.5.0
|
github.com/hashicorp/go-checkpoint v0.5.0
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.0
|
github.com/hashicorp/go-cleanhttp v0.5.1
|
||||||
github.com/hashicorp/go-getter v1.4.0
|
github.com/hashicorp/go-getter v1.4.0
|
||||||
github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f
|
github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f
|
||||||
github.com/hashicorp/go-immutable-radix v0.0.0-20180129170900-7f3cd4390caa // indirect
|
github.com/hashicorp/go-immutable-radix v0.0.0-20180129170900-7f3cd4390caa // indirect
|
||||||
|
@ -65,7 +66,7 @@ require (
|
||||||
github.com/hashicorp/go-sockaddr v0.0.0-20180320115054-6d291a969b86 // indirect
|
github.com/hashicorp/go-sockaddr v0.0.0-20180320115054-6d291a969b86 // indirect
|
||||||
github.com/hashicorp/go-tfe v0.3.23
|
github.com/hashicorp/go-tfe v0.3.23
|
||||||
github.com/hashicorp/go-uuid v1.0.1
|
github.com/hashicorp/go-uuid v1.0.1
|
||||||
github.com/hashicorp/go-version v1.1.0
|
github.com/hashicorp/go-version v1.2.0
|
||||||
github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f
|
github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f
|
||||||
github.com/hashicorp/hcl/v2 v2.0.0
|
github.com/hashicorp/hcl/v2 v2.0.0
|
||||||
github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590
|
github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590
|
||||||
|
@ -73,6 +74,7 @@ require (
|
||||||
github.com/hashicorp/memberlist v0.1.0 // indirect
|
github.com/hashicorp/memberlist v0.1.0 // indirect
|
||||||
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb // indirect
|
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb // indirect
|
||||||
github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4
|
github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4
|
||||||
|
github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596
|
||||||
github.com/hashicorp/vault v0.10.4
|
github.com/hashicorp/vault v0.10.4
|
||||||
github.com/jonboulle/clockwork v0.1.0 // indirect
|
github.com/jonboulle/clockwork v0.1.0 // indirect
|
||||||
github.com/joyent/triton-go v0.0.0-20180313100802-d8f9c0314926
|
github.com/joyent/triton-go v0.0.0-20180313100802-d8f9c0314926
|
||||||
|
@ -124,7 +126,7 @@ require (
|
||||||
go.uber.org/multierr v1.1.0 // indirect
|
go.uber.org/multierr v1.1.0 // indirect
|
||||||
go.uber.org/zap v1.9.1 // indirect
|
go.uber.org/zap v1.9.1 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
|
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859
|
golang.org/x/net v0.0.0-20191009170851-d66e71096ffb
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
||||||
golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa
|
golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa
|
||||||
google.golang.org/api v0.9.0
|
google.golang.org/api v0.9.0
|
||||||
|
|
10
go.sum
10
go.sum
|
@ -135,6 +135,8 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
|
||||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
||||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
|
||||||
|
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=
|
github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=
|
||||||
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||||
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
|
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
|
||||||
|
@ -173,6 +175,8 @@ github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3m
|
||||||
github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg=
|
github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg=
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.0 h1:wvCrVc9TjDls6+YGAF2hAifE1E5U1+b4tH6KdvN3Gig=
|
github.com/hashicorp/go-cleanhttp v0.5.0 h1:wvCrVc9TjDls6+YGAF2hAifE1E5U1+b4tH6KdvN3Gig=
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
github.com/hashicorp/go-getter v1.4.0 h1:ENHNi8494porjD0ZhIrjlAHnveSFhY7hvOJrV/fsKkw=
|
github.com/hashicorp/go-getter v1.4.0 h1:ENHNi8494porjD0ZhIrjlAHnveSFhY7hvOJrV/fsKkw=
|
||||||
github.com/hashicorp/go-getter v1.4.0/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY=
|
github.com/hashicorp/go-getter v1.4.0/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY=
|
||||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
|
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
|
||||||
|
@ -204,6 +208,8 @@ github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1
|
||||||
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
github.com/hashicorp/go-version v1.1.0 h1:bPIoEKD27tNdebFGGxxYwcL4nepeY4j1QP23PFRGzg0=
|
github.com/hashicorp/go-version v1.1.0 h1:bPIoEKD27tNdebFGGxxYwcL4nepeY4j1QP23PFRGzg0=
|
||||||
github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||||
|
github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E=
|
||||||
|
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=
|
github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=
|
||||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
|
@ -223,6 +229,8 @@ github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb h1:ZbgmOQt8DOg796fi
|
||||||
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb/go.mod h1:h/Ru6tmZazX7WO/GDmwdpS975F019L4t5ng5IgwbNrE=
|
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb/go.mod h1:h/Ru6tmZazX7WO/GDmwdpS975F019L4t5ng5IgwbNrE=
|
||||||
github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4 h1:fTkL0YwjohGyN7AqsDhz6bwcGBpT+xBqi3Qhpw58Juw=
|
github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4 h1:fTkL0YwjohGyN7AqsDhz6bwcGBpT+xBqi3Qhpw58Juw=
|
||||||
github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4/go.mod h1:JDmizlhaP5P0rYTTZB0reDMefAiJyfWPEtugV4in1oI=
|
github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4/go.mod h1:JDmizlhaP5P0rYTTZB0reDMefAiJyfWPEtugV4in1oI=
|
||||||
|
github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596 h1:hjyO2JsNZUKT1ym+FAdlBEkGPevazYsmVgIMw7dVELg=
|
||||||
|
github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg=
|
||||||
github.com/hashicorp/vault v0.10.4 h1:4x0lHxui/ZRp/B3E0Auv1QNBJpzETqHR2kQD3mHSBJU=
|
github.com/hashicorp/vault v0.10.4 h1:4x0lHxui/ZRp/B3E0Auv1QNBJpzETqHR2kQD3mHSBJU=
|
||||||
github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0=
|
github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0=
|
||||||
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M=
|
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M=
|
||||||
|
@ -437,6 +445,8 @@ golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn
|
||||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20191009170851-d66e71096ffb h1:TR699M2v0qoKTOHxeLgp6zPqaQNs74f01a/ob9W0qko=
|
||||||
|
golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
|
||||||
|
|
307
vendor/github.com/apparentlymart/go-textseg/textseg/make_tables.go
generated
vendored
Normal file
307
vendor/github.com/apparentlymart/go-textseg/textseg/make_tables.go
generated
vendored
Normal file
|
@ -0,0 +1,307 @@
|
||||||
|
// Copyright (c) 2014 Couchbase, Inc.
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||||
|
// except in compliance with the License. You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
// Unless required by applicable law or agreed to in writing, software distributed under the
|
||||||
|
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||||
|
// either express or implied. See the License for the specific language governing permissions
|
||||||
|
// and limitations under the License.
|
||||||
|
|
||||||
|
// Modified by Martin Atkins to serve the needs of package textseg.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
var url = flag.String("url",
|
||||||
|
"http://www.unicode.org/Public/"+unicode.Version+"/ucd/auxiliary/",
|
||||||
|
"URL of Unicode database directory")
|
||||||
|
var verbose = flag.Bool("verbose",
|
||||||
|
false,
|
||||||
|
"write data to stdout as it is parsed")
|
||||||
|
var localFiles = flag.Bool("local",
|
||||||
|
false,
|
||||||
|
"data files have been copied to the current directory; for debugging only")
|
||||||
|
var outputFile = flag.String("output",
|
||||||
|
"",
|
||||||
|
"output file for generated tables; default stdout")
|
||||||
|
|
||||||
|
var output *bufio.Writer
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
setupOutput()
|
||||||
|
|
||||||
|
graphemePropertyRanges := make(map[string]*unicode.RangeTable)
|
||||||
|
loadUnicodeData("GraphemeBreakProperty.txt", graphemePropertyRanges)
|
||||||
|
wordPropertyRanges := make(map[string]*unicode.RangeTable)
|
||||||
|
loadUnicodeData("WordBreakProperty.txt", wordPropertyRanges)
|
||||||
|
sentencePropertyRanges := make(map[string]*unicode.RangeTable)
|
||||||
|
loadUnicodeData("SentenceBreakProperty.txt", sentencePropertyRanges)
|
||||||
|
|
||||||
|
fmt.Fprintf(output, fileHeader, *url)
|
||||||
|
generateTables("Grapheme", graphemePropertyRanges)
|
||||||
|
generateTables("Word", wordPropertyRanges)
|
||||||
|
generateTables("Sentence", sentencePropertyRanges)
|
||||||
|
|
||||||
|
flushOutput()
|
||||||
|
}
|
||||||
|
|
||||||
|
// WordBreakProperty.txt has the form:
|
||||||
|
// 05F0..05F2 ; Hebrew_Letter # Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW LIGATURE YIDDISH DOUBLE YOD
|
||||||
|
// FB1D ; Hebrew_Letter # Lo HEBREW LETTER YOD WITH HIRIQ
|
||||||
|
func openReader(file string) (input io.ReadCloser) {
|
||||||
|
if *localFiles {
|
||||||
|
f, err := os.Open(file)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
input = f
|
||||||
|
} else {
|
||||||
|
path := *url + file
|
||||||
|
resp, err := http.Get(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
log.Fatal("bad GET status for "+file, resp.Status)
|
||||||
|
}
|
||||||
|
input = resp.Body
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadUnicodeData(filename string, propertyRanges map[string]*unicode.RangeTable) {
|
||||||
|
f := openReader(filename)
|
||||||
|
defer f.Close()
|
||||||
|
bufioReader := bufio.NewReader(f)
|
||||||
|
line, err := bufioReader.ReadString('\n')
|
||||||
|
for err == nil {
|
||||||
|
parseLine(line, propertyRanges)
|
||||||
|
line, err = bufioReader.ReadString('\n')
|
||||||
|
}
|
||||||
|
// if the err was EOF still need to process last value
|
||||||
|
if err == io.EOF {
|
||||||
|
parseLine(line, propertyRanges)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const comment = "#"
|
||||||
|
const sep = ";"
|
||||||
|
const rnge = ".."
|
||||||
|
|
||||||
|
func parseLine(line string, propertyRanges map[string]*unicode.RangeTable) {
|
||||||
|
if strings.HasPrefix(line, comment) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if len(line) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
commentStart := strings.Index(line, comment)
|
||||||
|
if commentStart > 0 {
|
||||||
|
line = line[0:commentStart]
|
||||||
|
}
|
||||||
|
pieces := strings.Split(line, sep)
|
||||||
|
if len(pieces) != 2 {
|
||||||
|
log.Printf("unexpected %d pieces in %s", len(pieces), line)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
propertyName := strings.TrimSpace(pieces[1])
|
||||||
|
|
||||||
|
rangeTable, ok := propertyRanges[propertyName]
|
||||||
|
if !ok {
|
||||||
|
rangeTable = &unicode.RangeTable{
|
||||||
|
LatinOffset: 0,
|
||||||
|
}
|
||||||
|
propertyRanges[propertyName] = rangeTable
|
||||||
|
}
|
||||||
|
|
||||||
|
codepointRange := strings.TrimSpace(pieces[0])
|
||||||
|
rngeIndex := strings.Index(codepointRange, rnge)
|
||||||
|
|
||||||
|
if rngeIndex < 0 {
|
||||||
|
// single codepoint, not range
|
||||||
|
codepointInt, err := strconv.ParseUint(codepointRange, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("error parsing int: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if codepointInt < 0x10000 {
|
||||||
|
r16 := unicode.Range16{
|
||||||
|
Lo: uint16(codepointInt),
|
||||||
|
Hi: uint16(codepointInt),
|
||||||
|
Stride: 1,
|
||||||
|
}
|
||||||
|
addR16ToTable(rangeTable, r16)
|
||||||
|
} else {
|
||||||
|
r32 := unicode.Range32{
|
||||||
|
Lo: uint32(codepointInt),
|
||||||
|
Hi: uint32(codepointInt),
|
||||||
|
Stride: 1,
|
||||||
|
}
|
||||||
|
addR32ToTable(rangeTable, r32)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
rngeStart := codepointRange[0:rngeIndex]
|
||||||
|
rngeEnd := codepointRange[rngeIndex+2:]
|
||||||
|
rngeStartInt, err := strconv.ParseUint(rngeStart, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("error parsing int: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
rngeEndInt, err := strconv.ParseUint(rngeEnd, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("error parsing int: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if rngeStartInt < 0x10000 && rngeEndInt < 0x10000 {
|
||||||
|
r16 := unicode.Range16{
|
||||||
|
Lo: uint16(rngeStartInt),
|
||||||
|
Hi: uint16(rngeEndInt),
|
||||||
|
Stride: 1,
|
||||||
|
}
|
||||||
|
addR16ToTable(rangeTable, r16)
|
||||||
|
} else if rngeStartInt >= 0x10000 && rngeEndInt >= 0x10000 {
|
||||||
|
r32 := unicode.Range32{
|
||||||
|
Lo: uint32(rngeStartInt),
|
||||||
|
Hi: uint32(rngeEndInt),
|
||||||
|
Stride: 1,
|
||||||
|
}
|
||||||
|
addR32ToTable(rangeTable, r32)
|
||||||
|
} else {
|
||||||
|
log.Printf("unexpected range")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func addR16ToTable(r *unicode.RangeTable, r16 unicode.Range16) {
|
||||||
|
if r.R16 == nil {
|
||||||
|
r.R16 = make([]unicode.Range16, 0, 1)
|
||||||
|
}
|
||||||
|
r.R16 = append(r.R16, r16)
|
||||||
|
if r16.Hi <= unicode.MaxLatin1 {
|
||||||
|
r.LatinOffset++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func addR32ToTable(r *unicode.RangeTable, r32 unicode.Range32) {
|
||||||
|
if r.R32 == nil {
|
||||||
|
r.R32 = make([]unicode.Range32, 0, 1)
|
||||||
|
}
|
||||||
|
r.R32 = append(r.R32, r32)
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateTables(prefix string, propertyRanges map[string]*unicode.RangeTable) {
|
||||||
|
prNames := make([]string, 0, len(propertyRanges))
|
||||||
|
for k := range propertyRanges {
|
||||||
|
prNames = append(prNames, k)
|
||||||
|
}
|
||||||
|
sort.Strings(prNames)
|
||||||
|
for _, key := range prNames {
|
||||||
|
rt := propertyRanges[key]
|
||||||
|
fmt.Fprintf(output, "var _%s%s = %s\n", prefix, key, generateRangeTable(rt))
|
||||||
|
}
|
||||||
|
fmt.Fprintf(output, "type _%sRuneRange unicode.RangeTable\n", prefix)
|
||||||
|
|
||||||
|
fmt.Fprintf(output, "func _%sRuneType(r rune) *_%sRuneRange {\n", prefix, prefix)
|
||||||
|
fmt.Fprintf(output, "\tswitch {\n")
|
||||||
|
for _, key := range prNames {
|
||||||
|
fmt.Fprintf(output, "\tcase unicode.Is(_%s%s, r):\n\t\treturn (*_%sRuneRange)(_%s%s)\n", prefix, key, prefix, prefix, key)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(output, "\tdefault:\n\t\treturn nil\n")
|
||||||
|
fmt.Fprintf(output, "\t}\n")
|
||||||
|
fmt.Fprintf(output, "}\n")
|
||||||
|
|
||||||
|
fmt.Fprintf(output, "func (rng *_%sRuneRange) String() string {\n", prefix)
|
||||||
|
fmt.Fprintf(output, "\tswitch (*unicode.RangeTable)(rng) {\n")
|
||||||
|
for _, key := range prNames {
|
||||||
|
fmt.Fprintf(output, "\tcase _%s%s:\n\t\treturn %q\n", prefix, key, key)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(output, "\tdefault:\n\t\treturn \"Other\"\n")
|
||||||
|
fmt.Fprintf(output, "\t}\n")
|
||||||
|
fmt.Fprintf(output, "}\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateRangeTable(rt *unicode.RangeTable) string {
|
||||||
|
rv := "&unicode.RangeTable{\n"
|
||||||
|
if rt.R16 != nil {
|
||||||
|
rv += "\tR16: []unicode.Range16{\n"
|
||||||
|
for _, r16 := range rt.R16 {
|
||||||
|
rv += fmt.Sprintf("\t\t%#v,\n", r16)
|
||||||
|
}
|
||||||
|
rv += "\t},\n"
|
||||||
|
}
|
||||||
|
if rt.R32 != nil {
|
||||||
|
rv += "\tR32: []unicode.Range32{\n"
|
||||||
|
for _, r32 := range rt.R32 {
|
||||||
|
rv += fmt.Sprintf("\t\t%#v,\n", r32)
|
||||||
|
}
|
||||||
|
rv += "\t},\n"
|
||||||
|
}
|
||||||
|
rv += fmt.Sprintf("\t\tLatinOffset: %d,\n", rt.LatinOffset)
|
||||||
|
rv += "}\n"
|
||||||
|
return rv
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileHeader = `// Generated by running
|
||||||
|
// maketables --url=%s
|
||||||
|
// DO NOT EDIT
|
||||||
|
|
||||||
|
package textseg
|
||||||
|
|
||||||
|
import(
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
`
|
||||||
|
|
||||||
|
func setupOutput() {
|
||||||
|
output = bufio.NewWriter(startGofmt())
|
||||||
|
}
|
||||||
|
|
||||||
|
// startGofmt connects output to a gofmt process if -output is set.
|
||||||
|
func startGofmt() io.Writer {
|
||||||
|
if *outputFile == "" {
|
||||||
|
return os.Stdout
|
||||||
|
}
|
||||||
|
stdout, err := os.Create(*outputFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
// Pipe output to gofmt.
|
||||||
|
gofmt := exec.Command("gofmt")
|
||||||
|
fd, err := gofmt.StdinPipe()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
gofmt.Stdout = stdout
|
||||||
|
gofmt.Stderr = os.Stderr
|
||||||
|
err = gofmt.Start()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return fd
|
||||||
|
}
|
||||||
|
|
||||||
|
func flushOutput() {
|
||||||
|
err := output.Flush()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
212
vendor/github.com/apparentlymart/go-textseg/textseg/make_test_tables.go
generated
vendored
Normal file
212
vendor/github.com/apparentlymart/go-textseg/textseg/make_test_tables.go
generated
vendored
Normal file
|
@ -0,0 +1,212 @@
|
||||||
|
// Copyright (c) 2014 Couchbase, Inc.
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||||
|
// except in compliance with the License. You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
// Unless required by applicable law or agreed to in writing, software distributed under the
|
||||||
|
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||||
|
// either express or implied. See the License for the specific language governing permissions
|
||||||
|
// and limitations under the License.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
var url = flag.String("url",
|
||||||
|
"http://www.unicode.org/Public/"+unicode.Version+"/ucd/auxiliary/",
|
||||||
|
"URL of Unicode database directory")
|
||||||
|
var verbose = flag.Bool("verbose",
|
||||||
|
false,
|
||||||
|
"write data to stdout as it is parsed")
|
||||||
|
var localFiles = flag.Bool("local",
|
||||||
|
false,
|
||||||
|
"data files have been copied to the current directory; for debugging only")
|
||||||
|
|
||||||
|
var outputFile = flag.String("output",
|
||||||
|
"",
|
||||||
|
"output file for generated tables; default stdout")
|
||||||
|
|
||||||
|
var output *bufio.Writer
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
setupOutput()
|
||||||
|
|
||||||
|
graphemeTests := make([]test, 0)
|
||||||
|
graphemeTests = loadUnicodeData("GraphemeBreakTest.txt", graphemeTests)
|
||||||
|
wordTests := make([]test, 0)
|
||||||
|
wordTests = loadUnicodeData("WordBreakTest.txt", wordTests)
|
||||||
|
sentenceTests := make([]test, 0)
|
||||||
|
sentenceTests = loadUnicodeData("SentenceBreakTest.txt", sentenceTests)
|
||||||
|
|
||||||
|
fmt.Fprintf(output, fileHeader, *url)
|
||||||
|
generateTestTables("Grapheme", graphemeTests)
|
||||||
|
generateTestTables("Word", wordTests)
|
||||||
|
generateTestTables("Sentence", sentenceTests)
|
||||||
|
|
||||||
|
flushOutput()
|
||||||
|
}
|
||||||
|
|
||||||
|
// WordBreakProperty.txt has the form:
|
||||||
|
// 05F0..05F2 ; Hebrew_Letter # Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW LIGATURE YIDDISH DOUBLE YOD
|
||||||
|
// FB1D ; Hebrew_Letter # Lo HEBREW LETTER YOD WITH HIRIQ
|
||||||
|
func openReader(file string) (input io.ReadCloser) {
|
||||||
|
if *localFiles {
|
||||||
|
f, err := os.Open(file)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
input = f
|
||||||
|
} else {
|
||||||
|
path := *url + file
|
||||||
|
resp, err := http.Get(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
log.Fatal("bad GET status for "+file, resp.Status)
|
||||||
|
}
|
||||||
|
input = resp.Body
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadUnicodeData(filename string, tests []test) []test {
|
||||||
|
f := openReader(filename)
|
||||||
|
defer f.Close()
|
||||||
|
bufioReader := bufio.NewReader(f)
|
||||||
|
line, err := bufioReader.ReadString('\n')
|
||||||
|
for err == nil {
|
||||||
|
tests = parseLine(line, tests)
|
||||||
|
line, err = bufioReader.ReadString('\n')
|
||||||
|
}
|
||||||
|
// if the err was EOF still need to process last value
|
||||||
|
if err == io.EOF {
|
||||||
|
tests = parseLine(line, tests)
|
||||||
|
}
|
||||||
|
return tests
|
||||||
|
}
|
||||||
|
|
||||||
|
const comment = "#"
|
||||||
|
const brk = "÷"
|
||||||
|
const nbrk = "×"
|
||||||
|
|
||||||
|
type test [][]byte
|
||||||
|
|
||||||
|
func parseLine(line string, tests []test) []test {
|
||||||
|
if strings.HasPrefix(line, comment) {
|
||||||
|
return tests
|
||||||
|
}
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if len(line) == 0 {
|
||||||
|
return tests
|
||||||
|
}
|
||||||
|
commentStart := strings.Index(line, comment)
|
||||||
|
if commentStart > 0 {
|
||||||
|
line = line[0:commentStart]
|
||||||
|
}
|
||||||
|
pieces := strings.Split(line, brk)
|
||||||
|
t := make(test, 0)
|
||||||
|
for _, piece := range pieces {
|
||||||
|
piece = strings.TrimSpace(piece)
|
||||||
|
if len(piece) > 0 {
|
||||||
|
codePoints := strings.Split(piece, nbrk)
|
||||||
|
word := ""
|
||||||
|
for _, codePoint := range codePoints {
|
||||||
|
codePoint = strings.TrimSpace(codePoint)
|
||||||
|
r, err := strconv.ParseInt(codePoint, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("err: %v for '%s'", err, string(r))
|
||||||
|
return tests
|
||||||
|
}
|
||||||
|
|
||||||
|
word += string(r)
|
||||||
|
}
|
||||||
|
t = append(t, []byte(word))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tests = append(tests, t)
|
||||||
|
return tests
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateTestTables(prefix string, tests []test) {
|
||||||
|
fmt.Fprintf(output, testHeader, prefix)
|
||||||
|
for _, t := range tests {
|
||||||
|
fmt.Fprintf(output, "\t\t{\n")
|
||||||
|
fmt.Fprintf(output, "\t\t\tinput: %#v,\n", bytes.Join(t, []byte{}))
|
||||||
|
fmt.Fprintf(output, "\t\t\toutput: %s,\n", generateTest(t))
|
||||||
|
fmt.Fprintf(output, "\t\t},\n")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(output, "}\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateTest(t test) string {
|
||||||
|
rv := "[][]byte{"
|
||||||
|
for _, te := range t {
|
||||||
|
rv += fmt.Sprintf("%#v,", te)
|
||||||
|
}
|
||||||
|
rv += "}"
|
||||||
|
return rv
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileHeader = `// Generated by running
|
||||||
|
// maketesttables --url=%s
|
||||||
|
// DO NOT EDIT
|
||||||
|
|
||||||
|
package textseg
|
||||||
|
`
|
||||||
|
|
||||||
|
const testHeader = `var unicode%sTests = []struct {
|
||||||
|
input []byte
|
||||||
|
output [][]byte
|
||||||
|
}{
|
||||||
|
`
|
||||||
|
|
||||||
|
func setupOutput() {
|
||||||
|
output = bufio.NewWriter(startGofmt())
|
||||||
|
}
|
||||||
|
|
||||||
|
// startGofmt connects output to a gofmt process if -output is set.
|
||||||
|
func startGofmt() io.Writer {
|
||||||
|
if *outputFile == "" {
|
||||||
|
return os.Stdout
|
||||||
|
}
|
||||||
|
stdout, err := os.Create(*outputFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
// Pipe output to gofmt.
|
||||||
|
gofmt := exec.Command("gofmt")
|
||||||
|
fd, err := gofmt.StdinPipe()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
gofmt.Stdout = stdout
|
||||||
|
gofmt.Stderr = os.Stderr
|
||||||
|
err = gofmt.Start()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return fd
|
||||||
|
}
|
||||||
|
|
||||||
|
func flushOutput() {
|
||||||
|
err := output.Flush()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
|
@ -19,7 +19,7 @@ func SortKeys(vs []reflect.Value) []reflect.Value {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort the map keys.
|
// Sort the map keys.
|
||||||
sort.Slice(vs, func(i, j int) bool { return isLess(vs[i], vs[j]) })
|
sort.SliceStable(vs, func(i, j int) bool { return isLess(vs[i], vs[j]) })
|
||||||
|
|
||||||
// Deduplicate keys (fails for NaNs).
|
// Deduplicate keys (fails for NaNs).
|
||||||
vs2 := vs[:1]
|
vs2 := vs[:1]
|
||||||
|
@ -42,6 +42,8 @@ func isLess(x, y reflect.Value) bool {
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
return x.Uint() < y.Uint()
|
return x.Uint() < y.Uint()
|
||||||
case reflect.Float32, reflect.Float64:
|
case reflect.Float32, reflect.Float64:
|
||||||
|
// NOTE: This does not sort -0 as less than +0
|
||||||
|
// since Go maps treat -0 and +0 as equal keys.
|
||||||
fx, fy := x.Float(), y.Float()
|
fx, fy := x.Float(), y.Float()
|
||||||
return fx < fy || math.IsNaN(fx) && !math.IsNaN(fy)
|
return fx < fy || math.IsNaN(fx) && !math.IsNaN(fy)
|
||||||
case reflect.Complex64, reflect.Complex128:
|
case reflect.Complex64, reflect.Complex128:
|
||||||
|
|
|
@ -4,7 +4,10 @@
|
||||||
|
|
||||||
package value
|
package value
|
||||||
|
|
||||||
import "reflect"
|
import (
|
||||||
|
"math"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
// IsZero reports whether v is the zero value.
|
// IsZero reports whether v is the zero value.
|
||||||
// This does not rely on Interface and so can be used on unexported fields.
|
// This does not rely on Interface and so can be used on unexported fields.
|
||||||
|
@ -17,9 +20,9 @@ func IsZero(v reflect.Value) bool {
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
return v.Uint() == 0
|
return v.Uint() == 0
|
||||||
case reflect.Float32, reflect.Float64:
|
case reflect.Float32, reflect.Float64:
|
||||||
return v.Float() == 0
|
return math.Float64bits(v.Float()) == 0
|
||||||
case reflect.Complex64, reflect.Complex128:
|
case reflect.Complex64, reflect.Complex128:
|
||||||
return v.Complex() == 0
|
return math.Float64bits(real(v.Complex())) == 0 && math.Float64bits(imag(v.Complex())) == 0
|
||||||
case reflect.String:
|
case reflect.String:
|
||||||
return v.String() == ""
|
return v.String() == ""
|
||||||
case reflect.UnsafePointer:
|
case reflect.UnsafePointer:
|
||||||
|
|
|
@ -168,7 +168,7 @@ func (opts formatOptions) formatDiffList(recs []reportRecord, k reflect.Kind) te
|
||||||
var isZero bool
|
var isZero bool
|
||||||
switch opts.DiffMode {
|
switch opts.DiffMode {
|
||||||
case diffIdentical:
|
case diffIdentical:
|
||||||
isZero = value.IsZero(r.Value.ValueX) || value.IsZero(r.Value.ValueX)
|
isZero = value.IsZero(r.Value.ValueX) || value.IsZero(r.Value.ValueY)
|
||||||
case diffRemoved:
|
case diffRemoved:
|
||||||
isZero = value.IsZero(r.Value.ValueX)
|
isZero = value.IsZero(r.Value.ValueX)
|
||||||
case diffInserted:
|
case diffInserted:
|
||||||
|
|
|
@ -208,7 +208,6 @@ func (opts formatOptions) FormatValue(v reflect.Value, m visitedPointers) (out t
|
||||||
func formatMapKey(v reflect.Value) string {
|
func formatMapKey(v reflect.Value) string {
|
||||||
var opts formatOptions
|
var opts formatOptions
|
||||||
opts.TypeMode = elideType
|
opts.TypeMode = elideType
|
||||||
opts.AvoidStringer = true
|
|
||||||
opts.ShallowPointers = true
|
opts.ShallowPointers = true
|
||||||
s := opts.FormatValue(v, visitedPointers{}).String()
|
s := opts.FormatValue(v, visitedPointers{}).String()
|
||||||
return strings.TrimSpace(s)
|
return strings.TrimSpace(s)
|
||||||
|
|
|
@ -90,7 +90,7 @@ func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode {
|
||||||
}
|
}
|
||||||
if r == '\n' {
|
if r == '\n' {
|
||||||
if maxLineLen < i-lastLineIdx {
|
if maxLineLen < i-lastLineIdx {
|
||||||
lastLineIdx = i - lastLineIdx
|
maxLineLen = i - lastLineIdx
|
||||||
}
|
}
|
||||||
lastLineIdx = i + 1
|
lastLineIdx = i + 1
|
||||||
numLines++
|
numLines++
|
||||||
|
@ -322,7 +322,7 @@ func coalesceInterveningIdentical(groups []diffStats, windowSize int) []diffStat
|
||||||
hadX, hadY := prev.NumRemoved > 0, prev.NumInserted > 0
|
hadX, hadY := prev.NumRemoved > 0, prev.NumInserted > 0
|
||||||
hasX, hasY := next.NumRemoved > 0, next.NumInserted > 0
|
hasX, hasY := next.NumRemoved > 0, next.NumInserted > 0
|
||||||
if ((hadX || hasX) && (hadY || hasY)) && curr.NumIdentical <= windowSize {
|
if ((hadX || hasX) && (hadY || hasY)) && curr.NumIdentical <= windowSize {
|
||||||
*prev = (*prev).Append(*curr).Append(*next)
|
*prev = prev.Append(*curr).Append(*next)
|
||||||
groups = groups[:len(groups)-1] // Truncate off equal group
|
groups = groups[:len(groups)-1] // Truncate off equal group
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,11 @@ var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0
|
||||||
type indentMode int
|
type indentMode int
|
||||||
|
|
||||||
func (n indentMode) appendIndent(b []byte, d diffMode) []byte {
|
func (n indentMode) appendIndent(b []byte, d diffMode) []byte {
|
||||||
|
// The output of Diff is documented as being unstable to provide future
|
||||||
|
// flexibility in changing the output for more humanly readable reports.
|
||||||
|
// This logic intentionally introduces instability to the exact output
|
||||||
|
// so that users can detect accidental reliance on stability early on,
|
||||||
|
// rather than much later when an actual change to the format occurs.
|
||||||
if flags.Deterministic || randBool {
|
if flags.Deterministic || randBool {
|
||||||
// Use regular spaces (U+0020).
|
// Use regular spaces (U+0020).
|
||||||
switch d {
|
switch d {
|
||||||
|
@ -360,7 +365,7 @@ func (s diffStats) String() string {
|
||||||
// Pluralize the name (adjusting for some obscure English grammar rules).
|
// Pluralize the name (adjusting for some obscure English grammar rules).
|
||||||
name := s.Name
|
name := s.Name
|
||||||
if sum > 1 {
|
if sum > 1 {
|
||||||
name = name + "s"
|
name += "s"
|
||||||
if strings.HasSuffix(name, "ys") {
|
if strings.HasSuffix(name, "ys") {
|
||||||
name = name[:len(name)-2] + "ies" // e.g., "entrys" => "entries"
|
name = name[:len(name)-2] + "ies" // e.g., "entrys" => "entries"
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,7 @@ func PrintablePathCheckHandler(next http.Handler, input *HandlerInput) http.Hand
|
||||||
}
|
}
|
||||||
|
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r != nil {
|
||||||
// Check URL path for non-printable characters
|
// Check URL path for non-printable characters
|
||||||
idx := strings.IndexFunc(r.URL.Path, func(c rune) bool {
|
idx := strings.IndexFunc(r.URL.Path, func(c rune) bool {
|
||||||
return !unicode.IsPrint(c)
|
return !unicode.IsPrint(c)
|
||||||
|
@ -37,7 +38,11 @@ func PrintablePathCheckHandler(next http.Handler, input *HandlerInput) http.Hand
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if next != nil {
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
language: go
|
language: go
|
||||||
|
|
||||||
go:
|
go:
|
||||||
- 1.0
|
|
||||||
- 1.1
|
|
||||||
- 1.2
|
- 1.2
|
||||||
- 1.3
|
- 1.3
|
||||||
- 1.4
|
- 1.4
|
||||||
- 1.9
|
- 1.9
|
||||||
- "1.10"
|
- "1.10"
|
||||||
|
- 1.11
|
||||||
|
- 1.12
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- go test
|
- go test
|
||||||
|
|
|
@ -112,7 +112,7 @@ func Must(v *Version, err error) *Version {
|
||||||
// or larger than the other version, respectively.
|
// or larger than the other version, respectively.
|
||||||
//
|
//
|
||||||
// If you want boolean results, use the LessThan, Equal,
|
// If you want boolean results, use the LessThan, Equal,
|
||||||
// or GreaterThan methods.
|
// GreaterThan, GreaterThanOrEqual or LessThanOrEqual methods.
|
||||||
func (v *Version) Compare(other *Version) int {
|
func (v *Version) Compare(other *Version) int {
|
||||||
// A quick, efficient equality check
|
// A quick, efficient equality check
|
||||||
if v.String() == other.String() {
|
if v.String() == other.String() {
|
||||||
|
@ -288,11 +288,21 @@ func (v *Version) GreaterThan(o *Version) bool {
|
||||||
return v.Compare(o) > 0
|
return v.Compare(o) > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GreaterThanOrEqualTo tests if this version is greater than or equal to another version.
|
||||||
|
func (v *Version) GreaterThanOrEqual(o *Version) bool {
|
||||||
|
return v.Compare(o) >= 0
|
||||||
|
}
|
||||||
|
|
||||||
// LessThan tests if this version is less than another version.
|
// LessThan tests if this version is less than another version.
|
||||||
func (v *Version) LessThan(o *Version) bool {
|
func (v *Version) LessThan(o *Version) bool {
|
||||||
return v.Compare(o) < 0
|
return v.Compare(o) < 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LessThanOrEqualTo tests if this version is less than or equal to another version.
|
||||||
|
func (v *Version) LessThanOrEqual(o *Version) bool {
|
||||||
|
return v.Compare(o) <= 0
|
||||||
|
}
|
||||||
|
|
||||||
// Metadata returns any metadata that was part of the version
|
// Metadata returns any metadata that was part of the version
|
||||||
// string.
|
// string.
|
||||||
//
|
//
|
||||||
|
|
99
vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression_vars_gen.go
generated
vendored
Normal file
99
vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression_vars_gen.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
// This is a 'go generate'-oriented program for producing the "Variables"
|
||||||
|
// method on every Expression implementation found within this package.
|
||||||
|
// All expressions share the same implementation for this method, which
|
||||||
|
// just wraps the package-level function "Variables" and uses an AST walk
|
||||||
|
// to do its work.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fs := token.NewFileSet()
|
||||||
|
pkgs, err := parser.ParseDir(fs, ".", nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "error while parsing: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
pkg := pkgs["hclsyntax"]
|
||||||
|
|
||||||
|
// Walk all the files and collect the receivers of any "Value" methods
|
||||||
|
// that look like they are trying to implement Expression.
|
||||||
|
var recvs []string
|
||||||
|
for _, f := range pkg.Files {
|
||||||
|
for _, decl := range f.Decls {
|
||||||
|
fd, ok := decl.(*ast.FuncDecl)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if fd.Name.Name != "Value" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
results := fd.Type.Results.List
|
||||||
|
if len(results) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
valResult := fd.Type.Results.List[0].Type.(*ast.SelectorExpr).X.(*ast.Ident)
|
||||||
|
diagsResult := fd.Type.Results.List[1].Type.(*ast.SelectorExpr).X.(*ast.Ident)
|
||||||
|
|
||||||
|
if valResult.Name != "cty" && diagsResult.Name != "hcl" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have a method called Value and it returns something in
|
||||||
|
// "cty" followed by something in "hcl" then that's specific enough
|
||||||
|
// for now, even though this is not 100% exact as a correct
|
||||||
|
// implementation of Value.
|
||||||
|
|
||||||
|
recvTy := fd.Recv.List[0].Type
|
||||||
|
|
||||||
|
switch rtt := recvTy.(type) {
|
||||||
|
case *ast.StarExpr:
|
||||||
|
name := rtt.X.(*ast.Ident).Name
|
||||||
|
recvs = append(recvs, fmt.Sprintf("*%s", name))
|
||||||
|
default:
|
||||||
|
fmt.Fprintf(os.Stderr, "don't know what to do with a %T receiver\n", recvTy)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(recvs)
|
||||||
|
|
||||||
|
of, err := os.OpenFile("expression_vars.go", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "failed to open output file: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprint(of, outputPreamble)
|
||||||
|
for _, recv := range recvs {
|
||||||
|
fmt.Fprintf(of, outputMethodFmt, recv)
|
||||||
|
}
|
||||||
|
fmt.Fprint(of, "\n")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputPreamble = `package hclsyntax
|
||||||
|
|
||||||
|
// Generated by expression_vars_get.go. DO NOT EDIT.
|
||||||
|
// Run 'go generate' on this package to update the set of functions here.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/hashicorp/hcl/v2"
|
||||||
|
)`
|
||||||
|
|
||||||
|
const outputMethodFmt = `
|
||||||
|
|
||||||
|
func (e %s) Variables() []hcl.Traversal {
|
||||||
|
return Variables(e)
|
||||||
|
}`
|
99
vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go
generated
vendored
Normal file
99
vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
// This is a 'go generate'-oriented program for producing the "Variables"
|
||||||
|
// method on every Expression implementation found within this package.
|
||||||
|
// All expressions share the same implementation for this method, which
|
||||||
|
// just wraps the package-level function "Variables" and uses an AST walk
|
||||||
|
// to do its work.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fs := token.NewFileSet()
|
||||||
|
pkgs, err := parser.ParseDir(fs, ".", nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "error while parsing: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
pkg := pkgs["hclsyntax"]
|
||||||
|
|
||||||
|
// Walk all the files and collect the receivers of any "Value" methods
|
||||||
|
// that look like they are trying to implement Expression.
|
||||||
|
var recvs []string
|
||||||
|
for _, f := range pkg.Files {
|
||||||
|
for _, decl := range f.Decls {
|
||||||
|
fd, ok := decl.(*ast.FuncDecl)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if fd.Name.Name != "Value" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
results := fd.Type.Results.List
|
||||||
|
if len(results) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
valResult := fd.Type.Results.List[0].Type.(*ast.SelectorExpr).X.(*ast.Ident)
|
||||||
|
diagsResult := fd.Type.Results.List[1].Type.(*ast.SelectorExpr).X.(*ast.Ident)
|
||||||
|
|
||||||
|
if valResult.Name != "cty" && diagsResult.Name != "hcl" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have a method called Value and it returns something in
|
||||||
|
// "cty" followed by something in "hcl" then that's specific enough
|
||||||
|
// for now, even though this is not 100% exact as a correct
|
||||||
|
// implementation of Value.
|
||||||
|
|
||||||
|
recvTy := fd.Recv.List[0].Type
|
||||||
|
|
||||||
|
switch rtt := recvTy.(type) {
|
||||||
|
case *ast.StarExpr:
|
||||||
|
name := rtt.X.(*ast.Ident).Name
|
||||||
|
recvs = append(recvs, fmt.Sprintf("*%s", name))
|
||||||
|
default:
|
||||||
|
fmt.Fprintf(os.Stderr, "don't know what to do with a %T receiver\n", recvTy)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(recvs)
|
||||||
|
|
||||||
|
of, err := os.OpenFile("expression_vars.go", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "failed to open output file: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprint(of, outputPreamble)
|
||||||
|
for _, recv := range recvs {
|
||||||
|
fmt.Fprintf(of, outputMethodFmt, recv)
|
||||||
|
}
|
||||||
|
fmt.Fprint(of, "\n")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputPreamble = `package hclsyntax
|
||||||
|
|
||||||
|
// Generated by expression_vars_get.go. DO NOT EDIT.
|
||||||
|
// Run 'go generate' on this package to update the set of functions here.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/hashicorp/hcl2/hcl"
|
||||||
|
)`
|
||||||
|
|
||||||
|
const outputMethodFmt = `
|
||||||
|
|
||||||
|
func (e %s) Variables() []hcl.Traversal {
|
||||||
|
return Variables(e)
|
||||||
|
}`
|
|
@ -0,0 +1,61 @@
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/hashicorp/terraform-svchost"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CachingCredentialsSource creates a new credentials source that wraps another
|
||||||
|
// and caches its results in memory, on a per-hostname basis.
|
||||||
|
//
|
||||||
|
// No means is provided for expiration of cached credentials, so a caching
|
||||||
|
// credentials source should have a limited lifetime (one Terraform operation,
|
||||||
|
// for example) to ensure that time-limited credentials don't expire before
|
||||||
|
// their cache entries do.
|
||||||
|
func CachingCredentialsSource(source CredentialsSource) CredentialsSource {
|
||||||
|
return &cachingCredentialsSource{
|
||||||
|
source: source,
|
||||||
|
cache: map[svchost.Hostname]HostCredentials{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type cachingCredentialsSource struct {
|
||||||
|
source CredentialsSource
|
||||||
|
cache map[svchost.Hostname]HostCredentials
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForHost passes the given hostname on to the wrapped credentials source and
|
||||||
|
// caches the result to return for future requests with the same hostname.
|
||||||
|
//
|
||||||
|
// Both credentials and non-credentials (nil) responses are cached.
|
||||||
|
//
|
||||||
|
// No cache entry is created if the wrapped source returns an error, to allow
|
||||||
|
// the caller to retry the failing operation.
|
||||||
|
func (s *cachingCredentialsSource) ForHost(host svchost.Hostname) (HostCredentials, error) {
|
||||||
|
if cache, cached := s.cache[host]; cached {
|
||||||
|
return cache, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := s.source.ForHost(host)
|
||||||
|
if err != nil {
|
||||||
|
return result, err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.cache[host] = result
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *cachingCredentialsSource) StoreForHost(host svchost.Hostname, credentials HostCredentialsWritable) error {
|
||||||
|
// We'll delete the cache entry even if the store fails, since that just
|
||||||
|
// means that the next read will go to the real store and get a chance to
|
||||||
|
// see which object (old or new) is actually present.
|
||||||
|
delete(s.cache, host)
|
||||||
|
return s.source.StoreForHost(host, credentials)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *cachingCredentialsSource) ForgetForHost(host svchost.Hostname) error {
|
||||||
|
// We'll delete the cache entry even if the store fails, since that just
|
||||||
|
// means that the next read will go to the real store and get a chance to
|
||||||
|
// see if the object is still present.
|
||||||
|
delete(s.cache, host)
|
||||||
|
return s.source.ForgetForHost(host)
|
||||||
|
}
|
118
vendor/github.com/hashicorp/terraform-svchost/auth/credentials.go
generated
vendored
Normal file
118
vendor/github.com/hashicorp/terraform-svchost/auth/credentials.go
generated
vendored
Normal file
|
@ -0,0 +1,118 @@
|
||||||
|
// Package auth contains types and functions to manage authentication
|
||||||
|
// credentials for service hosts.
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/zclconf/go-cty/cty"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-svchost"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Credentials is a list of CredentialsSource objects that can be tried in
|
||||||
|
// turn until one returns credentials for a host, or one returns an error.
|
||||||
|
//
|
||||||
|
// A Credentials is itself a CredentialsSource, wrapping its members.
|
||||||
|
// In principle one CredentialsSource can be nested inside another, though
|
||||||
|
// there is no good reason to do so.
|
||||||
|
//
|
||||||
|
// The write operations on a Credentials are tried only on the first object,
|
||||||
|
// under the assumption that it is the primary store.
|
||||||
|
type Credentials []CredentialsSource
|
||||||
|
|
||||||
|
// NoCredentials is an empty CredentialsSource that always returns nil
|
||||||
|
// when asked for credentials.
|
||||||
|
var NoCredentials CredentialsSource = Credentials{}
|
||||||
|
|
||||||
|
// A CredentialsSource is an object that may be able to provide credentials
|
||||||
|
// for a given host.
|
||||||
|
//
|
||||||
|
// Credentials lookups are not guaranteed to be concurrency-safe. Callers
|
||||||
|
// using these facilities in concurrent code must use external concurrency
|
||||||
|
// primitives to prevent race conditions.
|
||||||
|
type CredentialsSource interface {
|
||||||
|
// ForHost returns a non-nil HostCredentials if the source has credentials
|
||||||
|
// available for the host, and a nil HostCredentials if it does not.
|
||||||
|
//
|
||||||
|
// If an error is returned, progress through a list of CredentialsSources
|
||||||
|
// is halted and the error is returned to the user.
|
||||||
|
ForHost(host svchost.Hostname) (HostCredentials, error)
|
||||||
|
|
||||||
|
// StoreForHost takes a HostCredentialsWritable and saves it as the
|
||||||
|
// credentials for the given host.
|
||||||
|
//
|
||||||
|
// If credentials are already stored for the given host, it will try to
|
||||||
|
// replace those credentials but may produce an error if such replacement
|
||||||
|
// is not possible.
|
||||||
|
StoreForHost(host svchost.Hostname, credentials HostCredentialsWritable) error
|
||||||
|
|
||||||
|
// ForgetForHost discards any stored credentials for the given host. It
|
||||||
|
// does nothing and returns successfully if no credentials are saved
|
||||||
|
// for that host.
|
||||||
|
ForgetForHost(host svchost.Hostname) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// HostCredentials represents a single set of credentials for a particular
|
||||||
|
// host.
|
||||||
|
type HostCredentials interface {
|
||||||
|
// PrepareRequest modifies the given request in-place to apply the
|
||||||
|
// receiving credentials. The usual behavior of this method is to
|
||||||
|
// add some sort of Authorization header to the request.
|
||||||
|
PrepareRequest(req *http.Request)
|
||||||
|
|
||||||
|
// Token returns the authentication token.
|
||||||
|
Token() string
|
||||||
|
}
|
||||||
|
|
||||||
|
// HostCredentialsWritable is an extension of HostCredentials for credentials
|
||||||
|
// objects that can be serialized as a JSON-compatible object value for
|
||||||
|
// storage.
|
||||||
|
type HostCredentialsWritable interface {
|
||||||
|
HostCredentials
|
||||||
|
|
||||||
|
// ToStore returns a cty.Value, always of an object type,
|
||||||
|
// representing data that can be serialized to represent this object
|
||||||
|
// in persistent storage.
|
||||||
|
//
|
||||||
|
// The resulting value may uses only cty values that can be accepted
|
||||||
|
// by the cty JSON encoder, though the caller may elect to instead store
|
||||||
|
// it in some other format that has a JSON-compatible type system.
|
||||||
|
ToStore() cty.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForHost iterates over the contained CredentialsSource objects and
|
||||||
|
// tries to obtain credentials for the given host from each one in turn.
|
||||||
|
//
|
||||||
|
// If any source returns either a non-nil HostCredentials or a non-nil error
|
||||||
|
// then this result is returned. Otherwise, the result is nil, nil.
|
||||||
|
func (c Credentials) ForHost(host svchost.Hostname) (HostCredentials, error) {
|
||||||
|
for _, source := range c {
|
||||||
|
creds, err := source.ForHost(host)
|
||||||
|
if creds != nil || err != nil {
|
||||||
|
return creds, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// StoreForHost passes the given arguments to the same operation on the
|
||||||
|
// first CredentialsSource in the receiver.
|
||||||
|
func (c Credentials) StoreForHost(host svchost.Hostname, credentials HostCredentialsWritable) error {
|
||||||
|
if len(c) == 0 {
|
||||||
|
return fmt.Errorf("no credentials store is available")
|
||||||
|
}
|
||||||
|
|
||||||
|
return c[0].StoreForHost(host, credentials)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForgetForHost passes the given arguments to the same operation on the
|
||||||
|
// first CredentialsSource in the receiver.
|
||||||
|
func (c Credentials) ForgetForHost(host svchost.Hostname) error {
|
||||||
|
if len(c) == 0 {
|
||||||
|
return fmt.Errorf("no credentials store is available")
|
||||||
|
}
|
||||||
|
|
||||||
|
return c[0].ForgetForHost(host)
|
||||||
|
}
|
|
@ -0,0 +1,48 @@
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/zclconf/go-cty/cty"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HostCredentialsFromMap converts a map of key-value pairs from a credentials
|
||||||
|
// definition provided by the user (e.g. in a config file, or via a credentials
|
||||||
|
// helper) into a HostCredentials object if possible, or returns nil if
|
||||||
|
// no credentials could be extracted from the map.
|
||||||
|
//
|
||||||
|
// This function ignores map keys it is unfamiliar with, to allow for future
|
||||||
|
// expansion of the credentials map format for new credential types.
|
||||||
|
func HostCredentialsFromMap(m map[string]interface{}) HostCredentials {
|
||||||
|
if m == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if token, ok := m["token"].(string); ok {
|
||||||
|
return HostCredentialsToken(token)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// HostCredentialsFromObject converts a cty.Value of an object type into a
|
||||||
|
// HostCredentials object if possible, or returns nil if no credentials could
|
||||||
|
// be extracted from the map.
|
||||||
|
//
|
||||||
|
// This function ignores object attributes it is unfamiliar with, to allow for
|
||||||
|
// future expansion of the credentials object structure for new credential types.
|
||||||
|
//
|
||||||
|
// If the given value is not of an object type, this function will panic.
|
||||||
|
func HostCredentialsFromObject(obj cty.Value) HostCredentials {
|
||||||
|
if !obj.Type().HasAttribute("token") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
tokenV := obj.GetAttr("token")
|
||||||
|
if tokenV.IsNull() || !tokenV.IsKnown() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if !cty.String.Equals(tokenV.Type()) {
|
||||||
|
// Weird, but maybe some future Terraform version accepts an object
|
||||||
|
// here for some reason, so we'll be resilient.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return HostCredentialsToken(tokenV.AsString())
|
||||||
|
}
|
149
vendor/github.com/hashicorp/terraform-svchost/auth/helper_program.go
generated
vendored
Normal file
149
vendor/github.com/hashicorp/terraform-svchost/auth/helper_program.go
generated
vendored
Normal file
|
@ -0,0 +1,149 @@
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
ctyjson "github.com/zclconf/go-cty/cty/json"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-svchost"
|
||||||
|
)
|
||||||
|
|
||||||
|
type helperProgramCredentialsSource struct {
|
||||||
|
executable string
|
||||||
|
args []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// HelperProgramCredentialsSource returns a CredentialsSource that runs the
|
||||||
|
// given program with the given arguments in order to obtain credentials.
|
||||||
|
//
|
||||||
|
// The given executable path must be an absolute path; it is the caller's
|
||||||
|
// responsibility to validate and process a relative path or other input
|
||||||
|
// provided by an end-user. If the given path is not absolute, this
|
||||||
|
// function will panic.
|
||||||
|
//
|
||||||
|
// When credentials are requested, the program will be run in a child process
|
||||||
|
// with the given arguments along with two additional arguments added to the
|
||||||
|
// end of the list: the literal string "get", followed by the requested
|
||||||
|
// hostname in ASCII compatibility form (punycode form).
|
||||||
|
func HelperProgramCredentialsSource(executable string, args ...string) CredentialsSource {
|
||||||
|
if !filepath.IsAbs(executable) {
|
||||||
|
panic("NewCredentialsSourceHelperProgram requires absolute path to executable")
|
||||||
|
}
|
||||||
|
|
||||||
|
fullArgs := make([]string, len(args)+1)
|
||||||
|
fullArgs[0] = executable
|
||||||
|
copy(fullArgs[1:], args)
|
||||||
|
|
||||||
|
return &helperProgramCredentialsSource{
|
||||||
|
executable: executable,
|
||||||
|
args: fullArgs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *helperProgramCredentialsSource) ForHost(host svchost.Hostname) (HostCredentials, error) {
|
||||||
|
args := make([]string, len(s.args), len(s.args)+2)
|
||||||
|
copy(args, s.args)
|
||||||
|
args = append(args, "get")
|
||||||
|
args = append(args, string(host))
|
||||||
|
|
||||||
|
outBuf := bytes.Buffer{}
|
||||||
|
errBuf := bytes.Buffer{}
|
||||||
|
|
||||||
|
cmd := exec.Cmd{
|
||||||
|
Path: s.executable,
|
||||||
|
Args: args,
|
||||||
|
Stdin: nil,
|
||||||
|
Stdout: &outBuf,
|
||||||
|
Stderr: &errBuf,
|
||||||
|
}
|
||||||
|
err := cmd.Run()
|
||||||
|
if _, isExitErr := err.(*exec.ExitError); isExitErr {
|
||||||
|
errText := errBuf.String()
|
||||||
|
if errText == "" {
|
||||||
|
// Shouldn't happen for a well-behaved helper program
|
||||||
|
return nil, fmt.Errorf("error in %s, but it produced no error message", s.executable)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("error in %s: %s", s.executable, errText)
|
||||||
|
} else if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to run %s: %s", s.executable, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var m map[string]interface{}
|
||||||
|
err = json.Unmarshal(outBuf.Bytes(), &m)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("malformed output from %s: %s", s.executable, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return HostCredentialsFromMap(m), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *helperProgramCredentialsSource) StoreForHost(host svchost.Hostname, credentials HostCredentialsWritable) error {
|
||||||
|
args := make([]string, len(s.args), len(s.args)+2)
|
||||||
|
copy(args, s.args)
|
||||||
|
args = append(args, "store")
|
||||||
|
args = append(args, string(host))
|
||||||
|
|
||||||
|
toStore := credentials.ToStore()
|
||||||
|
toStoreRaw, err := ctyjson.Marshal(toStore, toStore.Type())
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("can't serialize credentials to store: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
inReader := bytes.NewReader(toStoreRaw)
|
||||||
|
errBuf := bytes.Buffer{}
|
||||||
|
|
||||||
|
cmd := exec.Cmd{
|
||||||
|
Path: s.executable,
|
||||||
|
Args: args,
|
||||||
|
Stdin: inReader,
|
||||||
|
Stderr: &errBuf,
|
||||||
|
Stdout: nil,
|
||||||
|
}
|
||||||
|
err = cmd.Run()
|
||||||
|
if _, isExitErr := err.(*exec.ExitError); isExitErr {
|
||||||
|
errText := errBuf.String()
|
||||||
|
if errText == "" {
|
||||||
|
// Shouldn't happen for a well-behaved helper program
|
||||||
|
return fmt.Errorf("error in %s, but it produced no error message", s.executable)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("error in %s: %s", s.executable, errText)
|
||||||
|
} else if err != nil {
|
||||||
|
return fmt.Errorf("failed to run %s: %s", s.executable, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *helperProgramCredentialsSource) ForgetForHost(host svchost.Hostname) error {
|
||||||
|
args := make([]string, len(s.args), len(s.args)+2)
|
||||||
|
copy(args, s.args)
|
||||||
|
args = append(args, "forget")
|
||||||
|
args = append(args, string(host))
|
||||||
|
|
||||||
|
errBuf := bytes.Buffer{}
|
||||||
|
|
||||||
|
cmd := exec.Cmd{
|
||||||
|
Path: s.executable,
|
||||||
|
Args: args,
|
||||||
|
Stdin: nil,
|
||||||
|
Stderr: &errBuf,
|
||||||
|
Stdout: nil,
|
||||||
|
}
|
||||||
|
err := cmd.Run()
|
||||||
|
if _, isExitErr := err.(*exec.ExitError); isExitErr {
|
||||||
|
errText := errBuf.String()
|
||||||
|
if errText == "" {
|
||||||
|
// Shouldn't happen for a well-behaved helper program
|
||||||
|
return fmt.Errorf("error in %s, but it produced no error message", s.executable)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("error in %s: %s", s.executable, errText)
|
||||||
|
} else if err != nil {
|
||||||
|
return fmt.Errorf("failed to run %s: %s", s.executable, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-svchost"
|
||||||
|
)
|
||||||
|
|
||||||
|
// StaticCredentialsSource is a credentials source that retrieves credentials
|
||||||
|
// from the provided map. It returns nil if a requested hostname is not
|
||||||
|
// present in the map.
|
||||||
|
//
|
||||||
|
// The caller should not modify the given map after passing it to this function.
|
||||||
|
func StaticCredentialsSource(creds map[svchost.Hostname]map[string]interface{}) CredentialsSource {
|
||||||
|
return staticCredentialsSource(creds)
|
||||||
|
}
|
||||||
|
|
||||||
|
type staticCredentialsSource map[svchost.Hostname]map[string]interface{}
|
||||||
|
|
||||||
|
func (s staticCredentialsSource) ForHost(host svchost.Hostname) (HostCredentials, error) {
|
||||||
|
if s == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if m, exists := s[host]; exists {
|
||||||
|
return HostCredentialsFromMap(m), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s staticCredentialsSource) StoreForHost(host svchost.Hostname, credentials HostCredentialsWritable) error {
|
||||||
|
return fmt.Errorf("can't store new credentials in a static credentials source")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s staticCredentialsSource) ForgetForHost(host svchost.Hostname) error {
|
||||||
|
return fmt.Errorf("can't discard credentials from a static credentials source")
|
||||||
|
}
|
43
vendor/github.com/hashicorp/terraform-svchost/auth/token_credentials.go
generated
vendored
Normal file
43
vendor/github.com/hashicorp/terraform-svchost/auth/token_credentials.go
generated
vendored
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/zclconf/go-cty/cty"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HostCredentialsToken is a HostCredentials implementation that represents a
|
||||||
|
// single "bearer token", to be sent to the server via an Authorization header
|
||||||
|
// with the auth type set to "Bearer".
|
||||||
|
//
|
||||||
|
// To save a token as the credentials for a host, convert the token string to
|
||||||
|
// this type and use the result as a HostCredentialsWritable implementation.
|
||||||
|
type HostCredentialsToken string
|
||||||
|
|
||||||
|
// Interface implementation assertions. Compilation will fail here if
|
||||||
|
// HostCredentialsToken does not fully implement these interfaces.
|
||||||
|
var _ HostCredentials = HostCredentialsToken("")
|
||||||
|
var _ HostCredentialsWritable = HostCredentialsToken("")
|
||||||
|
|
||||||
|
// PrepareRequest alters the given HTTP request by setting its Authorization
|
||||||
|
// header to the string "Bearer " followed by the encapsulated authentication
|
||||||
|
// token.
|
||||||
|
func (tc HostCredentialsToken) PrepareRequest(req *http.Request) {
|
||||||
|
if req.Header == nil {
|
||||||
|
req.Header = http.Header{}
|
||||||
|
}
|
||||||
|
req.Header.Set("Authorization", "Bearer "+string(tc))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Token returns the authentication token.
|
||||||
|
func (tc HostCredentialsToken) Token() string {
|
||||||
|
return string(tc)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToStore returns a credentials object with a single attribute "token" whose
|
||||||
|
// value is the token string.
|
||||||
|
func (tc HostCredentialsToken) ToStore() cty.Value {
|
||||||
|
return cty.ObjectVal(map[string]cty.Value{
|
||||||
|
"token": cty.StringVal(string(tc)),
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,275 @@
|
||||||
|
// Package disco handles Terraform's remote service discovery protocol.
|
||||||
|
//
|
||||||
|
// This protocol allows mapping from a service hostname, as produced by the
|
||||||
|
// svchost package, to a set of services supported by that host and the
|
||||||
|
// endpoint information for each supported service.
|
||||||
|
package disco
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"mime"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-svchost"
|
||||||
|
"github.com/hashicorp/terraform-svchost/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Fixed path to the discovery manifest.
|
||||||
|
discoPath = "/.well-known/terraform.json"
|
||||||
|
|
||||||
|
// Arbitrary-but-small number to prevent runaway redirect loops.
|
||||||
|
maxRedirects = 3
|
||||||
|
|
||||||
|
// Arbitrary-but-small time limit to prevent UI "hangs" during discovery.
|
||||||
|
discoTimeout = 11 * time.Second
|
||||||
|
|
||||||
|
// 1MB - to prevent abusive services from using loads of our memory.
|
||||||
|
maxDiscoDocBytes = 1 * 1024 * 1024
|
||||||
|
)
|
||||||
|
|
||||||
|
// httpTransport is overridden during tests, to skip TLS verification.
|
||||||
|
var httpTransport = defaultHttpTransport()
|
||||||
|
|
||||||
|
// Disco is the main type in this package, which allows discovery on given
|
||||||
|
// hostnames and caches the results by hostname to avoid repeated requests
|
||||||
|
// for the same information.
|
||||||
|
type Disco struct {
|
||||||
|
hostCache map[svchost.Hostname]*Host
|
||||||
|
credsSrc auth.CredentialsSource
|
||||||
|
|
||||||
|
// Transport is a custom http.RoundTripper to use.
|
||||||
|
Transport http.RoundTripper
|
||||||
|
}
|
||||||
|
|
||||||
|
// New returns a new initialized discovery object.
|
||||||
|
func New() *Disco {
|
||||||
|
return NewWithCredentialsSource(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWithCredentialsSource returns a new discovery object initialized with
|
||||||
|
// the given credentials source.
|
||||||
|
func NewWithCredentialsSource(credsSrc auth.CredentialsSource) *Disco {
|
||||||
|
return &Disco{
|
||||||
|
hostCache: make(map[svchost.Hostname]*Host),
|
||||||
|
credsSrc: credsSrc,
|
||||||
|
Transport: httpTransport,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Disco) SetUserAgent(uaString string) {
|
||||||
|
d.Transport = &userAgentRoundTripper{
|
||||||
|
innerRt: d.Transport,
|
||||||
|
userAgent: uaString,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCredentialsSource provides a credentials source that will be used to
|
||||||
|
// add credentials to outgoing discovery requests, where available.
|
||||||
|
//
|
||||||
|
// If this method is never called, no outgoing discovery requests will have
|
||||||
|
// credentials.
|
||||||
|
func (d *Disco) SetCredentialsSource(src auth.CredentialsSource) {
|
||||||
|
d.credsSrc = src
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialsSource returns the credentials source associated with the receiver,
|
||||||
|
// or an empty credentials source if none is associated.
|
||||||
|
func (d *Disco) CredentialsSource() auth.CredentialsSource {
|
||||||
|
if d.credsSrc == nil {
|
||||||
|
// We'll return an empty one just to save the caller from having to
|
||||||
|
// protect against the nil case, since this interface already allows
|
||||||
|
// for the possibility of there being no credentials at all.
|
||||||
|
return auth.StaticCredentialsSource(nil)
|
||||||
|
}
|
||||||
|
return d.credsSrc
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialsForHost returns a non-nil HostCredentials if the embedded source has
|
||||||
|
// credentials available for the host, and a nil HostCredentials if it does not.
|
||||||
|
func (d *Disco) CredentialsForHost(hostname svchost.Hostname) (auth.HostCredentials, error) {
|
||||||
|
if d.credsSrc == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return d.credsSrc.ForHost(hostname)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForceHostServices provides a pre-defined set of services for a given
|
||||||
|
// host, which prevents the receiver from attempting network-based discovery
|
||||||
|
// for the given host. Instead, the given services map will be returned
|
||||||
|
// verbatim.
|
||||||
|
//
|
||||||
|
// When providing "forced" services, any relative URLs are resolved against
|
||||||
|
// the initial discovery URL that would have been used for network-based
|
||||||
|
// discovery, yielding the same results as if the given map were published
|
||||||
|
// at the host's default discovery URL, though using absolute URLs is strongly
|
||||||
|
// recommended to make the configured behavior more explicit.
|
||||||
|
func (d *Disco) ForceHostServices(hostname svchost.Hostname, services map[string]interface{}) {
|
||||||
|
if services == nil {
|
||||||
|
services = map[string]interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
d.hostCache[hostname] = &Host{
|
||||||
|
discoURL: &url.URL{
|
||||||
|
Scheme: "https",
|
||||||
|
Host: string(hostname),
|
||||||
|
Path: discoPath,
|
||||||
|
},
|
||||||
|
hostname: hostname.ForDisplay(),
|
||||||
|
services: services,
|
||||||
|
transport: d.Transport,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discover runs the discovery protocol against the given hostname (which must
|
||||||
|
// already have been validated and prepared with svchost.ForComparison) and
|
||||||
|
// returns an object describing the services available at that host.
|
||||||
|
//
|
||||||
|
// If a given hostname supports no Terraform services at all, a non-nil but
|
||||||
|
// empty Host object is returned. When giving feedback to the end user about
|
||||||
|
// such situations, we say "host <name> does not provide a <service> service",
|
||||||
|
// regardless of whether that is due to that service specifically being absent
|
||||||
|
// or due to the host not providing Terraform services at all, since we don't
|
||||||
|
// wish to expose the detail of whole-host discovery to an end-user.
|
||||||
|
func (d *Disco) Discover(hostname svchost.Hostname) (*Host, error) {
|
||||||
|
if host, cached := d.hostCache[hostname]; cached {
|
||||||
|
return host, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
host, err := d.discover(hostname)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
d.hostCache[hostname] = host
|
||||||
|
|
||||||
|
return host, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DiscoverServiceURL is a convenience wrapper for discovery on a given
|
||||||
|
// hostname and then looking up a particular service in the result.
|
||||||
|
func (d *Disco) DiscoverServiceURL(hostname svchost.Hostname, serviceID string) (*url.URL, error) {
|
||||||
|
host, err := d.Discover(hostname)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return host.ServiceURL(serviceID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// discover implements the actual discovery process, with its result cached
|
||||||
|
// by the public-facing Discover method.
|
||||||
|
func (d *Disco) discover(hostname svchost.Hostname) (*Host, error) {
|
||||||
|
discoURL := &url.URL{
|
||||||
|
Scheme: "https",
|
||||||
|
Host: hostname.String(),
|
||||||
|
Path: discoPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
client := &http.Client{
|
||||||
|
Transport: d.Transport,
|
||||||
|
Timeout: discoTimeout,
|
||||||
|
|
||||||
|
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||||
|
log.Printf("[DEBUG] Service discovery redirected to %s", req.URL)
|
||||||
|
if len(via) > maxRedirects {
|
||||||
|
return errors.New("too many redirects") // this error will never actually be seen
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
req := &http.Request{
|
||||||
|
Header: make(http.Header),
|
||||||
|
Method: "GET",
|
||||||
|
URL: discoURL,
|
||||||
|
}
|
||||||
|
req.Header.Set("Accept", "application/json")
|
||||||
|
|
||||||
|
creds, err := d.CredentialsForHost(hostname)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[WARN] Failed to get credentials for %s: %s (ignoring)", hostname, err)
|
||||||
|
}
|
||||||
|
if creds != nil {
|
||||||
|
// Update the request to include credentials.
|
||||||
|
creds.PrepareRequest(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[DEBUG] Service discovery for %s at %s", hostname, discoURL)
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to request discovery document: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
host := &Host{
|
||||||
|
// Use the discovery URL from resp.Request in
|
||||||
|
// case the client followed any redirects.
|
||||||
|
discoURL: resp.Request.URL,
|
||||||
|
hostname: hostname.ForDisplay(),
|
||||||
|
transport: d.Transport,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the host without any services.
|
||||||
|
if resp.StatusCode == 404 {
|
||||||
|
return host, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return nil, fmt.Errorf("Failed to request discovery document: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
contentType := resp.Header.Get("Content-Type")
|
||||||
|
mediaType, _, err := mime.ParseMediaType(contentType)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Discovery URL has a malformed Content-Type %q", contentType)
|
||||||
|
}
|
||||||
|
if mediaType != "application/json" {
|
||||||
|
return nil, fmt.Errorf("Discovery URL returned an unsupported Content-Type %q", mediaType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// This doesn't catch chunked encoding, because ContentLength is -1 in that case.
|
||||||
|
if resp.ContentLength > maxDiscoDocBytes {
|
||||||
|
// Size limit here is not a contractual requirement and so we may
|
||||||
|
// adjust it over time if we find a different limit is warranted.
|
||||||
|
return nil, fmt.Errorf(
|
||||||
|
"Discovery doc response is too large (got %d bytes; limit %d)",
|
||||||
|
resp.ContentLength, maxDiscoDocBytes,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the response is using chunked encoding then we can't predict its
|
||||||
|
// size, but we'll at least prevent reading the entire thing into memory.
|
||||||
|
lr := io.LimitReader(resp.Body, maxDiscoDocBytes)
|
||||||
|
|
||||||
|
servicesBytes, err := ioutil.ReadAll(lr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Error reading discovery document body: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var services map[string]interface{}
|
||||||
|
err = json.Unmarshal(servicesBytes, &services)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to decode discovery document as a JSON object: %v", err)
|
||||||
|
}
|
||||||
|
host.services = services
|
||||||
|
|
||||||
|
return host, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Forget invalidates any cached record of the given hostname. If the host
|
||||||
|
// has no cache entry then this is a no-op.
|
||||||
|
func (d *Disco) Forget(hostname svchost.Hostname) {
|
||||||
|
delete(d.hostCache, hostname)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForgetAll is like Forget, but for all of the hostnames that have cache entries.
|
||||||
|
func (d *Disco) ForgetAll() {
|
||||||
|
d.hostCache = make(map[svchost.Hostname]*Host)
|
||||||
|
}
|
|
@ -0,0 +1,412 @@
|
||||||
|
package disco
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/hashicorp/go-version"
|
||||||
|
)
|
||||||
|
|
||||||
|
const versionServiceID = "versions.v1"
|
||||||
|
|
||||||
|
// Host represents a service discovered host.
|
||||||
|
type Host struct {
|
||||||
|
discoURL *url.URL
|
||||||
|
hostname string
|
||||||
|
services map[string]interface{}
|
||||||
|
transport http.RoundTripper
|
||||||
|
}
|
||||||
|
|
||||||
|
// Constraints represents the version constraints of a service.
|
||||||
|
type Constraints struct {
|
||||||
|
Service string `json:"service"`
|
||||||
|
Product string `json:"product"`
|
||||||
|
Minimum string `json:"minimum"`
|
||||||
|
Maximum string `json:"maximum"`
|
||||||
|
Excluding []string `json:"excluding"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrServiceNotProvided is returned when the service is not provided.
|
||||||
|
type ErrServiceNotProvided struct {
|
||||||
|
hostname string
|
||||||
|
service string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error returns a customized error message.
|
||||||
|
func (e *ErrServiceNotProvided) Error() string {
|
||||||
|
if e.hostname == "" {
|
||||||
|
return fmt.Sprintf("host does not provide a %s service", e.service)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("host %s does not provide a %s service", e.hostname, e.service)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrVersionNotSupported is returned when the version is not supported.
|
||||||
|
type ErrVersionNotSupported struct {
|
||||||
|
hostname string
|
||||||
|
service string
|
||||||
|
version string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error returns a customized error message.
|
||||||
|
func (e *ErrVersionNotSupported) Error() string {
|
||||||
|
if e.hostname == "" {
|
||||||
|
return fmt.Sprintf("host does not support %s version %s", e.service, e.version)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("host %s does not support %s version %s", e.hostname, e.service, e.version)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrNoVersionConstraints is returned when checkpoint was disabled
|
||||||
|
// or the endpoint to query for version constraints was unavailable.
|
||||||
|
type ErrNoVersionConstraints struct {
|
||||||
|
disabled bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error returns a customized error message.
|
||||||
|
func (e *ErrNoVersionConstraints) Error() string {
|
||||||
|
if e.disabled {
|
||||||
|
return "checkpoint disabled"
|
||||||
|
}
|
||||||
|
return "unable to contact versions service"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServiceURL returns the URL associated with the given service identifier,
|
||||||
|
// which should be of the form "servicename.vN".
|
||||||
|
//
|
||||||
|
// A non-nil result is always an absolute URL with a scheme of either HTTPS
|
||||||
|
// or HTTP.
|
||||||
|
func (h *Host) ServiceURL(id string) (*url.URL, error) {
|
||||||
|
svc, ver, err := parseServiceID(id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// No services supported for an empty Host.
|
||||||
|
if h == nil || h.services == nil {
|
||||||
|
return nil, &ErrServiceNotProvided{service: svc}
|
||||||
|
}
|
||||||
|
|
||||||
|
urlStr, ok := h.services[id].(string)
|
||||||
|
if !ok {
|
||||||
|
// See if we have a matching service as that would indicate
|
||||||
|
// the service is supported, but not the requested version.
|
||||||
|
for serviceID := range h.services {
|
||||||
|
if strings.HasPrefix(serviceID, svc+".") {
|
||||||
|
return nil, &ErrVersionNotSupported{
|
||||||
|
hostname: h.hostname,
|
||||||
|
service: svc,
|
||||||
|
version: ver.Original(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No discovered services match the requested service.
|
||||||
|
return nil, &ErrServiceNotProvided{hostname: h.hostname, service: svc}
|
||||||
|
}
|
||||||
|
|
||||||
|
u, err := h.parseURL(urlStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to parse service URL: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return u, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServiceOAuthClient returns the OAuth client configuration associated with the
|
||||||
|
// given service identifier, which should be of the form "servicename.vN".
|
||||||
|
//
|
||||||
|
// This is an alternative to ServiceURL for unusual services that require
|
||||||
|
// a full OAuth2 client definition rather than just a URL. Use this only
|
||||||
|
// for services whose specification calls for this sort of definition.
|
||||||
|
func (h *Host) ServiceOAuthClient(id string) (*OAuthClient, error) {
|
||||||
|
svc, ver, err := parseServiceID(id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// No services supported for an empty Host.
|
||||||
|
if h == nil || h.services == nil {
|
||||||
|
return nil, &ErrServiceNotProvided{service: svc}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := h.services[id]; !ok {
|
||||||
|
// See if we have a matching service as that would indicate
|
||||||
|
// the service is supported, but not the requested version.
|
||||||
|
for serviceID := range h.services {
|
||||||
|
if strings.HasPrefix(serviceID, svc+".") {
|
||||||
|
return nil, &ErrVersionNotSupported{
|
||||||
|
hostname: h.hostname,
|
||||||
|
service: svc,
|
||||||
|
version: ver.Original(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No discovered services match the requested service.
|
||||||
|
return nil, &ErrServiceNotProvided{hostname: h.hostname, service: svc}
|
||||||
|
}
|
||||||
|
|
||||||
|
var raw map[string]interface{}
|
||||||
|
switch v := h.services[id].(type) {
|
||||||
|
case map[string]interface{}:
|
||||||
|
raw = v // Great!
|
||||||
|
case []map[string]interface{}:
|
||||||
|
// An absolutely infuriating legacy HCL ambiguity.
|
||||||
|
raw = v[0]
|
||||||
|
default:
|
||||||
|
// Debug message because raw Go types don't belong in our UI.
|
||||||
|
log.Printf("[DEBUG] The definition for %s has Go type %T", id, h.services[id])
|
||||||
|
return nil, fmt.Errorf("Service %s must be declared with an object value in the service discovery document", id)
|
||||||
|
}
|
||||||
|
|
||||||
|
var grantTypes OAuthGrantTypeSet
|
||||||
|
if rawGTs, ok := raw["grant_types"]; ok {
|
||||||
|
if gts, ok := rawGTs.([]interface{}); ok {
|
||||||
|
var kws []string
|
||||||
|
for _, gtI := range gts {
|
||||||
|
gt, ok := gtI.(string)
|
||||||
|
if !ok {
|
||||||
|
// We'll ignore this so that we can potentially introduce
|
||||||
|
// other types into this array later if we need to.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
kws = append(kws, gt)
|
||||||
|
}
|
||||||
|
grantTypes = NewOAuthGrantTypeSet(kws...)
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("Service %s is defined with invalid grant_types property: must be an array of grant type strings", id)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
grantTypes = NewOAuthGrantTypeSet("authz_code")
|
||||||
|
}
|
||||||
|
|
||||||
|
ret := &OAuthClient{
|
||||||
|
SupportedGrantTypes: grantTypes,
|
||||||
|
}
|
||||||
|
if clientIDStr, ok := raw["client"].(string); ok {
|
||||||
|
ret.ID = clientIDStr
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("Service %s definition is missing required property \"client\"", id)
|
||||||
|
}
|
||||||
|
if urlStr, ok := raw["authz"].(string); ok {
|
||||||
|
u, err := h.parseURL(urlStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to parse authorization URL: %v", err)
|
||||||
|
}
|
||||||
|
ret.AuthorizationURL = u
|
||||||
|
} else {
|
||||||
|
if grantTypes.RequiresAuthorizationEndpoint() {
|
||||||
|
return nil, fmt.Errorf("Service %s definition is missing required property \"authz\"", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if urlStr, ok := raw["token"].(string); ok {
|
||||||
|
u, err := h.parseURL(urlStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to parse token URL: %v", err)
|
||||||
|
}
|
||||||
|
ret.TokenURL = u
|
||||||
|
} else {
|
||||||
|
if grantTypes.RequiresTokenEndpoint() {
|
||||||
|
return nil, fmt.Errorf("Service %s definition is missing required property \"token\"", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if portsRaw, ok := raw["ports"].([]interface{}); ok {
|
||||||
|
if len(portsRaw) != 2 {
|
||||||
|
return nil, fmt.Errorf("Invalid \"ports\" definition for service %s: must be a two-element array", id)
|
||||||
|
}
|
||||||
|
invalidPortsErr := fmt.Errorf("Invalid \"ports\" definition for service %s: both ports must be whole numbers between 1024 and 65535", id)
|
||||||
|
ports := make([]uint16, 2)
|
||||||
|
for i := range ports {
|
||||||
|
switch v := portsRaw[i].(type) {
|
||||||
|
case float64:
|
||||||
|
// JSON unmarshaling always produces float64. HCL 2 might, if
|
||||||
|
// an invalid fractional number were given.
|
||||||
|
if float64(uint16(v)) != v || v < 1024 {
|
||||||
|
return nil, invalidPortsErr
|
||||||
|
}
|
||||||
|
ports[i] = uint16(v)
|
||||||
|
case int:
|
||||||
|
// Legacy HCL produces int. HCL 2 will too, if the given number
|
||||||
|
// is a whole number.
|
||||||
|
if v < 1024 || v > 65535 {
|
||||||
|
return nil, invalidPortsErr
|
||||||
|
}
|
||||||
|
ports[i] = uint16(v)
|
||||||
|
default:
|
||||||
|
// Debug message because raw Go types don't belong in our UI.
|
||||||
|
log.Printf("[DEBUG] Port value %d has Go type %T", i, portsRaw[i])
|
||||||
|
return nil, invalidPortsErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ports[1] < ports[0] {
|
||||||
|
return nil, fmt.Errorf("Invalid \"ports\" definition for service %s: minimum port cannot be greater than maximum port", id)
|
||||||
|
}
|
||||||
|
ret.MinPort = ports[0]
|
||||||
|
ret.MaxPort = ports[1]
|
||||||
|
} else {
|
||||||
|
// Default is to accept any port in the range, for a client that is
|
||||||
|
// able to call back to any localhost port.
|
||||||
|
ret.MinPort = 1024
|
||||||
|
ret.MaxPort = 65535
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Host) parseURL(urlStr string) (*url.URL, error) {
|
||||||
|
u, err := url.Parse(urlStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make relative URLs absolute using our discovery URL.
|
||||||
|
if !u.IsAbs() {
|
||||||
|
u = h.discoURL.ResolveReference(u)
|
||||||
|
}
|
||||||
|
|
||||||
|
if u.Scheme != "https" && u.Scheme != "http" {
|
||||||
|
return nil, fmt.Errorf("unsupported scheme %s", u.Scheme)
|
||||||
|
}
|
||||||
|
if u.User != nil {
|
||||||
|
return nil, fmt.Errorf("embedded username/password information is not permitted")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fragment part is irrelevant, since we're not a browser.
|
||||||
|
u.Fragment = ""
|
||||||
|
|
||||||
|
return u, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// VersionConstraints returns the contraints for a given service identifier
|
||||||
|
// (which should be of the form "servicename.vN") and product.
|
||||||
|
//
|
||||||
|
// When an exact (service and version) match is found, the constraints for
|
||||||
|
// that service are returned.
|
||||||
|
//
|
||||||
|
// When the requested version is not provided but the service is, we will
|
||||||
|
// search for all alternative versions. If mutliple alternative versions
|
||||||
|
// are found, the contrains of the latest available version are returned.
|
||||||
|
//
|
||||||
|
// When a service is not provided at all an error will be returned instead.
|
||||||
|
//
|
||||||
|
// When checkpoint is disabled or when a 404 is returned after making the
|
||||||
|
// HTTP call, an ErrNoVersionConstraints error will be returned.
|
||||||
|
func (h *Host) VersionConstraints(id, product string) (*Constraints, error) {
|
||||||
|
svc, _, err := parseServiceID(id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return early if checkpoint is disabled.
|
||||||
|
if disabled := os.Getenv("CHECKPOINT_DISABLE"); disabled != "" {
|
||||||
|
return nil, &ErrNoVersionConstraints{disabled: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No services supported for an empty Host.
|
||||||
|
if h == nil || h.services == nil {
|
||||||
|
return nil, &ErrServiceNotProvided{service: svc}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get the service URL for the version service and
|
||||||
|
// return early if the service isn't provided by the host.
|
||||||
|
u, err := h.ServiceURL(versionServiceID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we have an exact (service and version) match.
|
||||||
|
if _, ok := h.services[id].(string); !ok {
|
||||||
|
// If we don't have an exact match, we search for all matching
|
||||||
|
// services and then use the service ID of the latest version.
|
||||||
|
var services []string
|
||||||
|
for serviceID := range h.services {
|
||||||
|
if strings.HasPrefix(serviceID, svc+".") {
|
||||||
|
services = append(services, serviceID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(services) == 0 {
|
||||||
|
// No discovered services match the requested service.
|
||||||
|
return nil, &ErrServiceNotProvided{hostname: h.hostname, service: svc}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set id to the latest service ID we found.
|
||||||
|
var latest *version.Version
|
||||||
|
for _, serviceID := range services {
|
||||||
|
if _, ver, err := parseServiceID(serviceID); err == nil {
|
||||||
|
if latest == nil || latest.LessThan(ver) {
|
||||||
|
id = serviceID
|
||||||
|
latest = ver
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set a default timeout of 1 sec for the versions request (in milliseconds)
|
||||||
|
timeout := 1000
|
||||||
|
if v, err := strconv.Atoi(os.Getenv("CHECKPOINT_TIMEOUT")); err == nil {
|
||||||
|
timeout = v
|
||||||
|
}
|
||||||
|
|
||||||
|
client := &http.Client{
|
||||||
|
Transport: h.transport,
|
||||||
|
Timeout: time.Duration(timeout) * time.Millisecond,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare the service URL by setting the service and product.
|
||||||
|
v := u.Query()
|
||||||
|
v.Set("product", product)
|
||||||
|
u.Path += id
|
||||||
|
u.RawQuery = v.Encode()
|
||||||
|
|
||||||
|
// Create a new request.
|
||||||
|
req, err := http.NewRequest("GET", u.String(), nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to create version constraints request: %v", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("Accept", "application/json")
|
||||||
|
|
||||||
|
log.Printf("[DEBUG] Retrieve version constraints for service %s and product %s", id, product)
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to request version constraints: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode == 404 {
|
||||||
|
return nil, &ErrNoVersionConstraints{disabled: false}
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return nil, fmt.Errorf("Failed to request version constraints: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the constraints from the response body.
|
||||||
|
result := &Constraints{}
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(result); err != nil {
|
||||||
|
return nil, fmt.Errorf("Error parsing version constraints: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseServiceID(id string) (string, *version.Version, error) {
|
||||||
|
parts := strings.SplitN(id, ".", 2)
|
||||||
|
if len(parts) != 2 {
|
||||||
|
return "", nil, fmt.Errorf("Invalid service ID format (i.e. service.vN): %s", id)
|
||||||
|
}
|
||||||
|
|
||||||
|
version, err := version.NewVersion(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, fmt.Errorf("Invalid service version: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts[0], version, nil
|
||||||
|
}
|
30
vendor/github.com/hashicorp/terraform-svchost/disco/http_transport.go
generated
vendored
Normal file
30
vendor/github.com/hashicorp/terraform-svchost/disco/http_transport.go
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
package disco
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/hashicorp/go-cleanhttp"
|
||||||
|
)
|
||||||
|
|
||||||
|
const DefaultUserAgent = "terraform-svchost/1.0"
|
||||||
|
|
||||||
|
func defaultHttpTransport() http.RoundTripper {
|
||||||
|
t := cleanhttp.DefaultPooledTransport()
|
||||||
|
return &userAgentRoundTripper{
|
||||||
|
innerRt: t,
|
||||||
|
userAgent: DefaultUserAgent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type userAgentRoundTripper struct {
|
||||||
|
innerRt http.RoundTripper
|
||||||
|
userAgent string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rt *userAgentRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||||
|
if _, ok := req.Header["User-Agent"]; !ok {
|
||||||
|
req.Header.Set("User-Agent", rt.userAgent)
|
||||||
|
}
|
||||||
|
|
||||||
|
return rt.innerRt.RoundTrip(req)
|
||||||
|
}
|
178
vendor/github.com/hashicorp/terraform-svchost/disco/oauth_client.go
generated
vendored
Normal file
178
vendor/github.com/hashicorp/terraform-svchost/disco/oauth_client.go
generated
vendored
Normal file
|
@ -0,0 +1,178 @@
|
||||||
|
package disco
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/oauth2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// OAuthClient represents an OAuth client configuration, which is used for
|
||||||
|
// unusual services that require an entire OAuth client configuration as part
|
||||||
|
// of their service discovery, rather than just a URL.
|
||||||
|
type OAuthClient struct {
|
||||||
|
// ID is the identifier for the client, to be used as "client_id" in
|
||||||
|
// OAuth requests.
|
||||||
|
ID string
|
||||||
|
|
||||||
|
// Authorization URL is the URL of the authorization endpoint that must
|
||||||
|
// be used for this OAuth client, as defined in the OAuth2 specifications.
|
||||||
|
//
|
||||||
|
// Not all grant types use the authorization endpoint, so it may be omitted
|
||||||
|
// if none of the grant types in SupportedGrantTypes require it.
|
||||||
|
AuthorizationURL *url.URL
|
||||||
|
|
||||||
|
// Token URL is the URL of the token endpoint that must be used for this
|
||||||
|
// OAuth client, as defined in the OAuth2 specifications.
|
||||||
|
//
|
||||||
|
// Not all grant types use the token endpoint, so it may be omitted
|
||||||
|
// if none of the grant types in SupportedGrantTypes require it.
|
||||||
|
TokenURL *url.URL
|
||||||
|
|
||||||
|
// MinPort and MaxPort define a range of TCP ports on localhost that this
|
||||||
|
// client is able to use as redirect_uri in an authorization request.
|
||||||
|
// Terraform will select a port from this range for the temporary HTTP
|
||||||
|
// server it creates to receive the authorization response, giving
|
||||||
|
// a URL like http://localhost:NNN/ where NNN is the selected port number.
|
||||||
|
//
|
||||||
|
// Terraform will reject any port numbers in this range less than 1024,
|
||||||
|
// to respect the common convention (enforced on some operating systems)
|
||||||
|
// that lower port numbers are reserved for "privileged" services.
|
||||||
|
MinPort, MaxPort uint16
|
||||||
|
|
||||||
|
// SupportedGrantTypes is a set of the grant types that the client may
|
||||||
|
// choose from. This includes an entry for each distinct type advertised
|
||||||
|
// by the server, even if a particular keyword is not supported by the
|
||||||
|
// current version of Terraform.
|
||||||
|
SupportedGrantTypes OAuthGrantTypeSet
|
||||||
|
}
|
||||||
|
|
||||||
|
// Endpoint returns an oauth2.Endpoint value ready to be used with the oauth2
|
||||||
|
// library, representing the URLs from the receiver.
|
||||||
|
func (c *OAuthClient) Endpoint() oauth2.Endpoint {
|
||||||
|
ep := oauth2.Endpoint{
|
||||||
|
// We don't actually auth because we're not a server-based OAuth client,
|
||||||
|
// so this instead just means that we include client_id as an argument
|
||||||
|
// in our requests.
|
||||||
|
AuthStyle: oauth2.AuthStyleInParams,
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.AuthorizationURL != nil {
|
||||||
|
ep.AuthURL = c.AuthorizationURL.String()
|
||||||
|
}
|
||||||
|
if c.TokenURL != nil {
|
||||||
|
ep.TokenURL = c.TokenURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
return ep
|
||||||
|
}
|
||||||
|
|
||||||
|
// OAuthGrantType is an enumeration of grant type strings that a host can
|
||||||
|
// advertise support for.
|
||||||
|
//
|
||||||
|
// Values of this type don't necessarily match with a known constant of the
|
||||||
|
// type, because they may represent grant type keywords defined in a later
|
||||||
|
// version of Terraform which this version doesn't yet know about.
|
||||||
|
type OAuthGrantType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// OAuthAuthzCodeGrant represents an authorization code grant, as
|
||||||
|
// defined in IETF RFC 6749 section 4.1.
|
||||||
|
OAuthAuthzCodeGrant = OAuthGrantType("authz_code")
|
||||||
|
|
||||||
|
// OAuthOwnerPasswordGrant represents a resource owner password
|
||||||
|
// credentials grant, as defined in IETF RFC 6749 section 4.3.
|
||||||
|
OAuthOwnerPasswordGrant = OAuthGrantType("password")
|
||||||
|
)
|
||||||
|
|
||||||
|
// UsesAuthorizationEndpoint returns true if the receiving grant type makes
|
||||||
|
// use of the authorization endpoint from the client configuration, and thus
|
||||||
|
// if the authorization endpoint ought to be required.
|
||||||
|
func (t OAuthGrantType) UsesAuthorizationEndpoint() bool {
|
||||||
|
switch t {
|
||||||
|
case OAuthAuthzCodeGrant:
|
||||||
|
return true
|
||||||
|
case OAuthOwnerPasswordGrant:
|
||||||
|
return false
|
||||||
|
default:
|
||||||
|
// We'll default to false so that we don't impose any requirements
|
||||||
|
// on any grant type keywords that might be defined for future
|
||||||
|
// versions of Terraform.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesTokenEndpoint returns true if the receiving grant type makes
|
||||||
|
// use of the token endpoint from the client configuration, and thus
|
||||||
|
// if the authorization endpoint ought to be required.
|
||||||
|
func (t OAuthGrantType) UsesTokenEndpoint() bool {
|
||||||
|
switch t {
|
||||||
|
case OAuthAuthzCodeGrant:
|
||||||
|
return true
|
||||||
|
case OAuthOwnerPasswordGrant:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
// We'll default to false so that we don't impose any requirements
|
||||||
|
// on any grant type keywords that might be defined for future
|
||||||
|
// versions of Terraform.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OAuthGrantTypeSet represents a set of OAuthGrantType values.
|
||||||
|
type OAuthGrantTypeSet map[OAuthGrantType]struct{}
|
||||||
|
|
||||||
|
// NewOAuthGrantTypeSet constructs a new grant type set from the given list
|
||||||
|
// of grant type keyword strings. Any duplicates in the list are ignored.
|
||||||
|
func NewOAuthGrantTypeSet(keywords ...string) OAuthGrantTypeSet {
|
||||||
|
ret := make(OAuthGrantTypeSet, len(keywords))
|
||||||
|
for _, kw := range keywords {
|
||||||
|
ret[OAuthGrantType(kw)] = struct{}{}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
// Has returns true if the given grant type is in the receiving set.
|
||||||
|
func (s OAuthGrantTypeSet) Has(t OAuthGrantType) bool {
|
||||||
|
_, ok := s[t]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// RequiresAuthorizationEndpoint returns true if any of the grant types in
|
||||||
|
// the set are known to require an authorization endpoint.
|
||||||
|
func (s OAuthGrantTypeSet) RequiresAuthorizationEndpoint() bool {
|
||||||
|
for t := range s {
|
||||||
|
if t.UsesAuthorizationEndpoint() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// RequiresTokenEndpoint returns true if any of the grant types in
|
||||||
|
// the set are known to require a token endpoint.
|
||||||
|
func (s OAuthGrantTypeSet) RequiresTokenEndpoint() bool {
|
||||||
|
for t := range s {
|
||||||
|
if t.UsesTokenEndpoint() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// GoString implements fmt.GoStringer.
|
||||||
|
func (s OAuthGrantTypeSet) GoString() string {
|
||||||
|
var buf strings.Builder
|
||||||
|
i := 0
|
||||||
|
buf.WriteString("disco.NewOAuthGrantTypeSet(")
|
||||||
|
for t := range s {
|
||||||
|
if i > 0 {
|
||||||
|
buf.WriteString(", ")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "%q", string(t))
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
buf.WriteString(")")
|
||||||
|
return buf.String()
|
||||||
|
}
|
|
@ -0,0 +1,12 @@
|
||||||
|
module github.com/hashicorp/terraform-svchost
|
||||||
|
|
||||||
|
go 1.12
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/google/go-cmp v0.3.1
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1
|
||||||
|
github.com/hashicorp/go-version v1.2.0
|
||||||
|
github.com/zclconf/go-cty v1.1.0
|
||||||
|
golang.org/x/net v0.0.0-20191009170851-d66e71096ffb
|
||||||
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
||||||
|
)
|
|
@ -0,0 +1,36 @@
|
||||||
|
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
|
github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk=
|
||||||
|
github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
|
||||||
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
|
||||||
|
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
|
github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E=
|
||||||
|
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
|
||||||
|
github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
|
||||||
|
github.com/zclconf/go-cty v1.1.0 h1:uJwc9HiBOCpoKIObTQaLR+tsEXx1HBHnOsOOpcdhZgw=
|
||||||
|
github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20191009170851-d66e71096ffb h1:TR699M2v0qoKTOHxeLgp6zPqaQNs74f01a/ob9W0qko=
|
||||||
|
golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw=
|
||||||
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
|
google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
|
||||||
|
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
@ -0,0 +1,69 @@
|
||||||
|
package svchost
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A labelIter allows iterating over domain name labels.
|
||||||
|
//
|
||||||
|
// This type is copied from golang.org/x/net/idna, where it is used
|
||||||
|
// to segment hostnames into their separate labels for analysis. We use
|
||||||
|
// it for the same purpose here, in ForComparison.
|
||||||
|
type labelIter struct {
|
||||||
|
orig string
|
||||||
|
slice []string
|
||||||
|
curStart int
|
||||||
|
curEnd int
|
||||||
|
i int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *labelIter) reset() {
|
||||||
|
l.curStart = 0
|
||||||
|
l.curEnd = 0
|
||||||
|
l.i = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *labelIter) done() bool {
|
||||||
|
return l.curStart >= len(l.orig)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *labelIter) result() string {
|
||||||
|
if l.slice != nil {
|
||||||
|
return strings.Join(l.slice, ".")
|
||||||
|
}
|
||||||
|
return l.orig
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *labelIter) label() string {
|
||||||
|
if l.slice != nil {
|
||||||
|
return l.slice[l.i]
|
||||||
|
}
|
||||||
|
p := strings.IndexByte(l.orig[l.curStart:], '.')
|
||||||
|
l.curEnd = l.curStart + p
|
||||||
|
if p == -1 {
|
||||||
|
l.curEnd = len(l.orig)
|
||||||
|
}
|
||||||
|
return l.orig[l.curStart:l.curEnd]
|
||||||
|
}
|
||||||
|
|
||||||
|
// next sets the value to the next label. It skips the last label if it is empty.
|
||||||
|
func (l *labelIter) next() {
|
||||||
|
l.i++
|
||||||
|
if l.slice != nil {
|
||||||
|
if l.i >= len(l.slice) || l.i == len(l.slice)-1 && l.slice[l.i] == "" {
|
||||||
|
l.curStart = len(l.orig)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
l.curStart = l.curEnd + 1
|
||||||
|
if l.curStart == len(l.orig)-1 && l.orig[l.curStart] == '.' {
|
||||||
|
l.curStart = len(l.orig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *labelIter) set(s string) {
|
||||||
|
if l.slice == nil {
|
||||||
|
l.slice = strings.Split(l.orig, ".")
|
||||||
|
}
|
||||||
|
l.slice[l.i] = s
|
||||||
|
}
|
|
@ -0,0 +1,207 @@
|
||||||
|
// Package svchost deals with the representations of the so-called "friendly
|
||||||
|
// hostnames" that we use to represent systems that provide Terraform-native
|
||||||
|
// remote services, such as module registry, remote operations, etc.
|
||||||
|
//
|
||||||
|
// Friendly hostnames are specified such that, as much as possible, they
|
||||||
|
// are consistent with how web browsers think of hostnames, so that users
|
||||||
|
// can bring their intuitions about how hostnames behave when they access
|
||||||
|
// a Terraform Enterprise instance's web UI (or indeed any other website)
|
||||||
|
// and have this behave in a similar way.
|
||||||
|
package svchost
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/idna"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Hostname is specialized name for string that indicates that the string
|
||||||
|
// has been converted to (or was already in) the storage and comparison form.
|
||||||
|
//
|
||||||
|
// Hostname values are not suitable for display in the user-interface. Use
|
||||||
|
// the ForDisplay method to obtain a form suitable for display in the UI.
|
||||||
|
//
|
||||||
|
// Unlike user-supplied hostnames, strings of type Hostname (assuming they
|
||||||
|
// were constructed by a function within this package) can be compared for
|
||||||
|
// equality using the standard Go == operator.
|
||||||
|
type Hostname string
|
||||||
|
|
||||||
|
// acePrefix is the ASCII Compatible Encoding prefix, used to indicate that
|
||||||
|
// a domain name label is in "punycode" form.
|
||||||
|
const acePrefix = "xn--"
|
||||||
|
|
||||||
|
// displayProfile is a very liberal idna profile that we use to do
|
||||||
|
// normalization for display without imposing validation rules.
|
||||||
|
var displayProfile = idna.New(
|
||||||
|
idna.MapForLookup(),
|
||||||
|
idna.Transitional(true),
|
||||||
|
)
|
||||||
|
|
||||||
|
// ForDisplay takes a user-specified hostname and returns a normalized form of
|
||||||
|
// it suitable for display in the UI.
|
||||||
|
//
|
||||||
|
// If the input is so invalid that no normalization can be performed then
|
||||||
|
// this will return the input, assuming that the caller still wants to
|
||||||
|
// display _something_. This function is, however, more tolerant than the
|
||||||
|
// other functions in this package and will make a best effort to prepare
|
||||||
|
// _any_ given hostname for display.
|
||||||
|
//
|
||||||
|
// For validation, use either IsValid (for explicit validation) or
|
||||||
|
// ForComparison (which implicitly validates, returning an error if invalid).
|
||||||
|
func ForDisplay(given string) string {
|
||||||
|
var portPortion string
|
||||||
|
if colonPos := strings.Index(given, ":"); colonPos != -1 {
|
||||||
|
given, portPortion = given[:colonPos], given[colonPos:]
|
||||||
|
}
|
||||||
|
portPortion, _ = normalizePortPortion(portPortion)
|
||||||
|
|
||||||
|
ascii, err := displayProfile.ToASCII(given)
|
||||||
|
if err != nil {
|
||||||
|
return given + portPortion
|
||||||
|
}
|
||||||
|
display, err := displayProfile.ToUnicode(ascii)
|
||||||
|
if err != nil {
|
||||||
|
return given + portPortion
|
||||||
|
}
|
||||||
|
return display + portPortion
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValid returns true if the given user-specified hostname is a valid
|
||||||
|
// service hostname.
|
||||||
|
//
|
||||||
|
// Validity is determined by complying with the RFC 5891 requirements for
|
||||||
|
// names that are valid for domain lookup (section 5), with the additional
|
||||||
|
// requirement that user-supplied forms must not _already_ contain
|
||||||
|
// Punycode segments.
|
||||||
|
func IsValid(given string) bool {
|
||||||
|
_, err := ForComparison(given)
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForComparison takes a user-specified hostname and returns a normalized
|
||||||
|
// form of it suitable for storage and comparison. The result is not suitable
|
||||||
|
// for display to end-users because it uses Punycode to represent non-ASCII
|
||||||
|
// characters, and this form is unreadable for non-ASCII-speaking humans.
|
||||||
|
//
|
||||||
|
// The result is typed as Hostname -- a specialized name for string -- so that
|
||||||
|
// other APIs can make it clear within the type system whether they expect a
|
||||||
|
// user-specified or display-form hostname or a value already normalized for
|
||||||
|
// comparison.
|
||||||
|
//
|
||||||
|
// The returned Hostname is not valid if the returned error is non-nil.
|
||||||
|
func ForComparison(given string) (Hostname, error) {
|
||||||
|
var portPortion string
|
||||||
|
if colonPos := strings.Index(given, ":"); colonPos != -1 {
|
||||||
|
given, portPortion = given[:colonPos], given[colonPos:]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
portPortion, err = normalizePortPortion(portPortion)
|
||||||
|
if err != nil {
|
||||||
|
return Hostname(""), err
|
||||||
|
}
|
||||||
|
|
||||||
|
if given == "" {
|
||||||
|
return Hostname(""), fmt.Errorf("empty string is not a valid hostname")
|
||||||
|
}
|
||||||
|
|
||||||
|
// First we'll apply our additional constraint that Punycode must not
|
||||||
|
// be given directly by the user. This is not an IDN specification
|
||||||
|
// requirement, but we prohibit it to force users to use human-readable
|
||||||
|
// hostname forms within Terraform configuration.
|
||||||
|
labels := labelIter{orig: given}
|
||||||
|
for ; !labels.done(); labels.next() {
|
||||||
|
label := labels.label()
|
||||||
|
if label == "" {
|
||||||
|
return Hostname(""), fmt.Errorf(
|
||||||
|
"hostname contains empty label (two consecutive periods)",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(label, acePrefix) {
|
||||||
|
return Hostname(""), fmt.Errorf(
|
||||||
|
"hostname label %q specified in punycode format; service hostnames must be given in unicode",
|
||||||
|
label,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := idna.Lookup.ToASCII(given)
|
||||||
|
if err != nil {
|
||||||
|
return Hostname(""), err
|
||||||
|
}
|
||||||
|
return Hostname(result + portPortion), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForDisplay returns a version of the receiver that is appropriate for display
|
||||||
|
// in the UI. This includes converting any punycode labels to their
|
||||||
|
// corresponding Unicode characters.
|
||||||
|
//
|
||||||
|
// A round-trip through ForComparison and this ForDisplay method does not
|
||||||
|
// guarantee the same result as calling this package's top-level ForDisplay
|
||||||
|
// function, since a round-trip through the Hostname type implies stricter
|
||||||
|
// handling than we do when doing basic display-only processing.
|
||||||
|
func (h Hostname) ForDisplay() string {
|
||||||
|
given := string(h)
|
||||||
|
var portPortion string
|
||||||
|
if colonPos := strings.Index(given, ":"); colonPos != -1 {
|
||||||
|
given, portPortion = given[:colonPos], given[colonPos:]
|
||||||
|
}
|
||||||
|
// We don't normalize the port portion here because we assume it's
|
||||||
|
// already been normalized on the way in.
|
||||||
|
|
||||||
|
result, err := idna.Lookup.ToUnicode(given)
|
||||||
|
if err != nil {
|
||||||
|
// Should never happen, since type Hostname indicates that a string
|
||||||
|
// passed through our validation rules.
|
||||||
|
panic(fmt.Errorf("ForDisplay called on invalid Hostname: %s", err))
|
||||||
|
}
|
||||||
|
return result + portPortion
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h Hostname) String() string {
|
||||||
|
return string(h)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h Hostname) GoString() string {
|
||||||
|
return fmt.Sprintf("svchost.Hostname(%q)", string(h))
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalizePortPortion attempts to normalize the "port portion" of a hostname,
|
||||||
|
// which begins with the first colon in the hostname and should be followed
|
||||||
|
// by a string of decimal digits.
|
||||||
|
//
|
||||||
|
// If the port portion is valid, a normalized version of it is returned along
|
||||||
|
// with a nil error.
|
||||||
|
//
|
||||||
|
// If the port portion is invalid, the input string is returned verbatim along
|
||||||
|
// with a non-nil error.
|
||||||
|
//
|
||||||
|
// An empty string is a valid port portion representing the absence of a port.
|
||||||
|
// If non-empty, the first character must be a colon.
|
||||||
|
func normalizePortPortion(s string) (string, error) {
|
||||||
|
if s == "" {
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if s[0] != ':' {
|
||||||
|
// should never happen, since caller tends to guarantee the presence
|
||||||
|
// of a colon due to how it's extracted from the string.
|
||||||
|
return s, errors.New("port portion is missing its initial colon")
|
||||||
|
}
|
||||||
|
|
||||||
|
numStr := s[1:]
|
||||||
|
num, err := strconv.Atoi(numStr)
|
||||||
|
if err != nil {
|
||||||
|
return s, errors.New("port portion contains non-digit characters")
|
||||||
|
}
|
||||||
|
if num == 443 {
|
||||||
|
return "", nil // ":443" is the default
|
||||||
|
}
|
||||||
|
if num > 65535 {
|
||||||
|
return s, errors.New("port number is greater than 65535")
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(":%d", num), nil
|
||||||
|
}
|
|
@ -0,0 +1,93 @@
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Generate the table of OID values
|
||||||
|
// Run with 'go run gen.go'.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
_ "github.com/lib/pq"
|
||||||
|
)
|
||||||
|
|
||||||
|
// OID represent a postgres Object Identifier Type.
|
||||||
|
type OID struct {
|
||||||
|
ID int
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns an upper case version of the oid type.
|
||||||
|
func (o OID) Name() string {
|
||||||
|
return strings.ToUpper(o.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
datname := os.Getenv("PGDATABASE")
|
||||||
|
sslmode := os.Getenv("PGSSLMODE")
|
||||||
|
|
||||||
|
if datname == "" {
|
||||||
|
os.Setenv("PGDATABASE", "pqgotest")
|
||||||
|
}
|
||||||
|
|
||||||
|
if sslmode == "" {
|
||||||
|
os.Setenv("PGSSLMODE", "disable")
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := sql.Open("postgres", "")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
rows, err := db.Query(`
|
||||||
|
SELECT typname, oid
|
||||||
|
FROM pg_type WHERE oid < 10000
|
||||||
|
ORDER BY oid;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
oids := make([]*OID, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var oid OID
|
||||||
|
if err = rows.Scan(&oid.Type, &oid.ID); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
oids = append(oids, &oid)
|
||||||
|
}
|
||||||
|
if err = rows.Err(); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
cmd := exec.Command("gofmt")
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
w, err := cmd.StdinPipe()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
f, err := os.Create("types.go")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
cmd.Stdout = f
|
||||||
|
err = cmd.Start()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "// Code generated by gen.go. DO NOT EDIT.")
|
||||||
|
fmt.Fprintln(w, "\npackage oid")
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
for _, oid := range oids {
|
||||||
|
fmt.Fprintf(w, "T_%s Oid = %d\n", oid.Type, oid.ID)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, ")")
|
||||||
|
fmt.Fprintln(w, "var TypeName = map[Oid]string{")
|
||||||
|
for _, oid := range oids {
|
||||||
|
fmt.Fprintf(w, "T_%s: \"%s\",\n", oid.Type, oid.Name())
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "}")
|
||||||
|
w.Close()
|
||||||
|
cmd.Wait()
|
||||||
|
}
|
|
@ -0,0 +1,508 @@
|
||||||
|
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package codec
|
||||||
|
|
||||||
|
import "reflect"
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
A strict Non-validating namespace-aware XML 1.0 parser and (en|de)coder.
|
||||||
|
|
||||||
|
We are attempting this due to perceived issues with encoding/xml:
|
||||||
|
- Complicated. It tried to do too much, and is not as simple to use as json.
|
||||||
|
- Due to over-engineering, reflection is over-used AND performance suffers:
|
||||||
|
java is 6X faster:http://fabsk.eu/blog/category/informatique/dev/golang/
|
||||||
|
even PYTHON performs better: http://outgoing.typepad.com/outgoing/2014/07/exploring-golang.html
|
||||||
|
|
||||||
|
codec framework will offer the following benefits
|
||||||
|
- VASTLY improved performance (when using reflection-mode or codecgen)
|
||||||
|
- simplicity and consistency: with the rest of the supported formats
|
||||||
|
- all other benefits of codec framework (streaming, codegeneration, etc)
|
||||||
|
|
||||||
|
codec is not a drop-in replacement for encoding/xml.
|
||||||
|
It is a replacement, based on the simplicity and performance of codec.
|
||||||
|
Look at it like JAXB for Go.
|
||||||
|
|
||||||
|
Challenges:
|
||||||
|
- Need to output XML preamble, with all namespaces at the right location in the output.
|
||||||
|
- Each "end" block is dynamic, so we need to maintain a context-aware stack
|
||||||
|
- How to decide when to use an attribute VS an element
|
||||||
|
- How to handle chardata, attr, comment EXPLICITLY.
|
||||||
|
- Should it output fragments?
|
||||||
|
e.g. encoding a bool should just output true OR false, which is not well-formed XML.
|
||||||
|
|
||||||
|
Extend the struct tag. See representative example:
|
||||||
|
type X struct {
|
||||||
|
ID uint8 `codec:"http://ugorji.net/x-namespace xid id,omitempty,toarray,attr,cdata"`
|
||||||
|
// format: [namespace-uri ][namespace-prefix ]local-name, ...
|
||||||
|
}
|
||||||
|
|
||||||
|
Based on this, we encode
|
||||||
|
- fields as elements, BUT
|
||||||
|
encode as attributes if struct tag contains ",attr" and is a scalar (bool, number or string)
|
||||||
|
- text as entity-escaped text, BUT encode as CDATA if struct tag contains ",cdata".
|
||||||
|
|
||||||
|
To handle namespaces:
|
||||||
|
- XMLHandle is denoted as being namespace-aware.
|
||||||
|
Consequently, we WILL use the ns:name pair to encode and decode if defined, else use the plain name.
|
||||||
|
- *Encoder and *Decoder know whether the Handle "prefers" namespaces.
|
||||||
|
- add *Encoder.getEncName(*structFieldInfo).
|
||||||
|
No one calls *structFieldInfo.indexForEncName directly anymore
|
||||||
|
- OR better yet: indexForEncName is namespace-aware, and helper.go is all namespace-aware
|
||||||
|
indexForEncName takes a parameter of the form namespace:local-name OR local-name
|
||||||
|
- add *Decoder.getStructFieldInfo(encName string) // encName here is either like abc, or h1:nsabc
|
||||||
|
by being a method on *Decoder, or maybe a method on the Handle itself.
|
||||||
|
No one accesses .encName anymore
|
||||||
|
- let encode.go and decode.go use these (for consistency)
|
||||||
|
- only problem exists for gen.go, where we create a big switch on encName.
|
||||||
|
Now, we also have to add a switch on strings.endsWith(kName, encNsName)
|
||||||
|
- gen.go will need to have many more methods, and then double-on the 2 switch loops like:
|
||||||
|
switch k {
|
||||||
|
case "abc" : x.abc()
|
||||||
|
case "def" : x.def()
|
||||||
|
default {
|
||||||
|
switch {
|
||||||
|
case !nsAware: panic(...)
|
||||||
|
case strings.endsWith(":abc"): x.abc()
|
||||||
|
case strings.endsWith(":def"): x.def()
|
||||||
|
default: panic(...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
The structure below accommodates this:
|
||||||
|
|
||||||
|
type typeInfo struct {
|
||||||
|
sfi []*structFieldInfo // sorted by encName
|
||||||
|
sfins // sorted by namespace
|
||||||
|
sfia // sorted, to have those with attributes at the top. Needed to write XML appropriately.
|
||||||
|
sfip // unsorted
|
||||||
|
}
|
||||||
|
type structFieldInfo struct {
|
||||||
|
encName
|
||||||
|
nsEncName
|
||||||
|
ns string
|
||||||
|
attr bool
|
||||||
|
cdata bool
|
||||||
|
}
|
||||||
|
|
||||||
|
indexForEncName is now an internal helper function that takes a sorted array
|
||||||
|
(one of ti.sfins or ti.sfi). It is only used by *Encoder.getStructFieldInfo(...)
|
||||||
|
|
||||||
|
There will be a separate parser from the builder.
|
||||||
|
The parser will have a method: next() xmlToken method. It has lookahead support,
|
||||||
|
so you can pop multiple tokens, make a determination, and push them back in the order popped.
|
||||||
|
This will be needed to determine whether we are "nakedly" decoding a container or not.
|
||||||
|
The stack will be implemented using a slice and push/pop happens at the [0] element.
|
||||||
|
|
||||||
|
xmlToken has fields:
|
||||||
|
- type uint8: 0 | ElementStart | ElementEnd | AttrKey | AttrVal | Text
|
||||||
|
- value string
|
||||||
|
- ns string
|
||||||
|
|
||||||
|
SEE: http://www.xml.com/pub/a/98/10/guide0.html?page=3#ENTDECL
|
||||||
|
|
||||||
|
The following are skipped when parsing:
|
||||||
|
- External Entities (from external file)
|
||||||
|
- Notation Declaration e.g. <!NOTATION GIF87A SYSTEM "GIF">
|
||||||
|
- Entity Declarations & References
|
||||||
|
- XML Declaration (assume UTF-8)
|
||||||
|
- XML Directive i.e. <! ... >
|
||||||
|
- Other Declarations: Notation, etc.
|
||||||
|
- Comment
|
||||||
|
- Processing Instruction
|
||||||
|
- schema / DTD for validation:
|
||||||
|
We are not a VALIDATING parser. Validation is done elsewhere.
|
||||||
|
However, some parts of the DTD internal subset are used (SEE BELOW).
|
||||||
|
For Attribute List Declarations e.g.
|
||||||
|
<!ATTLIST foo:oldjoke name ID #REQUIRED label CDATA #IMPLIED status ( funny | notfunny ) 'funny' >
|
||||||
|
We considered using the ATTLIST to get "default" value, but not to validate the contents. (VETOED)
|
||||||
|
|
||||||
|
The following XML features are supported
|
||||||
|
- Namespace
|
||||||
|
- Element
|
||||||
|
- Attribute
|
||||||
|
- cdata
|
||||||
|
- Unicode escape
|
||||||
|
|
||||||
|
The following DTD (when as an internal sub-set) features are supported:
|
||||||
|
- Internal Entities e.g.
|
||||||
|
<!ELEMENT burns "ugorji is cool" > AND entities for the set: [<>&"']
|
||||||
|
- Parameter entities e.g.
|
||||||
|
<!ENTITY % personcontent "ugorji is cool"> <!ELEMENT burns (%personcontent;)*>
|
||||||
|
|
||||||
|
At decode time, a structure containing the following is kept
|
||||||
|
- namespace mapping
|
||||||
|
- default attribute values
|
||||||
|
- all internal entities (<>&"' and others written in the document)
|
||||||
|
|
||||||
|
When decode starts, it parses XML namespace declarations and creates a map in the
|
||||||
|
xmlDecDriver. While parsing, that map continuously gets updated.
|
||||||
|
The only problem happens when a namespace declaration happens on the node that it defines.
|
||||||
|
e.g. <hn:name xmlns:hn="http://www.ugorji.net" >
|
||||||
|
To handle this, each Element must be fully parsed at a time,
|
||||||
|
even if it amounts to multiple tokens which are returned one at a time on request.
|
||||||
|
|
||||||
|
xmlns is a special attribute name.
|
||||||
|
- It is used to define namespaces, including the default
|
||||||
|
- It is never returned as an AttrKey or AttrVal.
|
||||||
|
*We may decide later to allow user to use it e.g. you want to parse the xmlns mappings into a field.*
|
||||||
|
|
||||||
|
Number, bool, null, mapKey, etc can all be decoded from any xmlToken.
|
||||||
|
This accommodates map[int]string for example.
|
||||||
|
|
||||||
|
It should be possible to create a schema from the types,
|
||||||
|
or vice versa (generate types from schema with appropriate tags).
|
||||||
|
This is however out-of-scope from this parsing project.
|
||||||
|
|
||||||
|
We should write all namespace information at the first point that it is referenced in the tree,
|
||||||
|
and use the mapping for all child nodes and attributes. This means that state is maintained
|
||||||
|
at a point in the tree. This also means that calls to Decode or MustDecode will reset some state.
|
||||||
|
|
||||||
|
When decoding, it is important to keep track of entity references and default attribute values.
|
||||||
|
It seems these can only be stored in the DTD components. We should honor them when decoding.
|
||||||
|
|
||||||
|
Configuration for XMLHandle will look like this:
|
||||||
|
|
||||||
|
XMLHandle
|
||||||
|
DefaultNS string
|
||||||
|
// Encoding:
|
||||||
|
NS map[string]string // ns URI to key, used for encoding
|
||||||
|
// Decoding: in case ENTITY declared in external schema or dtd, store info needed here
|
||||||
|
Entities map[string]string // map of entity rep to character
|
||||||
|
|
||||||
|
|
||||||
|
During encode, if a namespace mapping is not defined for a namespace found on a struct,
|
||||||
|
then we create a mapping for it using nsN (where N is 1..1000000, and doesn't conflict
|
||||||
|
with any other namespace mapping).
|
||||||
|
|
||||||
|
Note that different fields in a struct can have different namespaces.
|
||||||
|
However, all fields will default to the namespace on the _struct field (if defined).
|
||||||
|
|
||||||
|
An XML document is a name, a map of attributes and a list of children.
|
||||||
|
Consequently, we cannot "DecodeNaked" into a map[string]interface{} (for example).
|
||||||
|
We have to "DecodeNaked" into something that resembles XML data.
|
||||||
|
|
||||||
|
To support DecodeNaked (decode into nil interface{}), we have to define some "supporting" types:
|
||||||
|
type Name struct { // Preferred. Less allocations due to conversions.
|
||||||
|
Local string
|
||||||
|
Space string
|
||||||
|
}
|
||||||
|
type Element struct {
|
||||||
|
Name Name
|
||||||
|
Attrs map[Name]string
|
||||||
|
Children []interface{} // each child is either *Element or string
|
||||||
|
}
|
||||||
|
Only two "supporting" types are exposed for XML: Name and Element.
|
||||||
|
|
||||||
|
// ------------------
|
||||||
|
|
||||||
|
We considered 'type Name string' where Name is like "Space Local" (space-separated).
|
||||||
|
We decided against it, because each creation of a name would lead to
|
||||||
|
double allocation (first convert []byte to string, then concatenate them into a string).
|
||||||
|
The benefit is that it is faster to read Attrs from a map. But given that Element is a value
|
||||||
|
object, we want to eschew methods and have public exposed variables.
|
||||||
|
|
||||||
|
We also considered the following, where xml types were not value objects, and we used
|
||||||
|
intelligent accessor methods to extract information and for performance.
|
||||||
|
*** WE DECIDED AGAINST THIS. ***
|
||||||
|
type Attr struct {
|
||||||
|
Name Name
|
||||||
|
Value string
|
||||||
|
}
|
||||||
|
// Element is a ValueObject: There are no accessor methods.
|
||||||
|
// Make element self-contained.
|
||||||
|
type Element struct {
|
||||||
|
Name Name
|
||||||
|
attrsMap map[string]string // where key is "Space Local"
|
||||||
|
attrs []Attr
|
||||||
|
childrenT []string
|
||||||
|
childrenE []Element
|
||||||
|
childrenI []int // each child is a index into T or E.
|
||||||
|
}
|
||||||
|
func (x *Element) child(i) interface{} // returns string or *Element
|
||||||
|
|
||||||
|
// ------------------
|
||||||
|
|
||||||
|
Per XML spec and our default handling, white space is always treated as
|
||||||
|
insignificant between elements, except in a text node. The xml:space='preserve'
|
||||||
|
attribute is ignored.
|
||||||
|
|
||||||
|
**Note: there is no xml: namespace. The xml: attributes were defined before namespaces.**
|
||||||
|
**So treat them as just "directives" that should be interpreted to mean something**.
|
||||||
|
|
||||||
|
On encoding, we support indenting aka prettifying markup in the same way we support it for json.
|
||||||
|
|
||||||
|
A document or element can only be encoded/decoded from/to a struct. In this mode:
|
||||||
|
- struct name maps to element name (or tag-info from _struct field)
|
||||||
|
- fields are mapped to child elements or attributes
|
||||||
|
|
||||||
|
A map is either encoded as attributes on current element, or as a set of child elements.
|
||||||
|
Maps are encoded as attributes iff their keys and values are primitives (number, bool, string).
|
||||||
|
|
||||||
|
A list is encoded as a set of child elements.
|
||||||
|
|
||||||
|
Primitives (number, bool, string) are encoded as an element, attribute or text
|
||||||
|
depending on the context.
|
||||||
|
|
||||||
|
Extensions must encode themselves as a text string.
|
||||||
|
|
||||||
|
Encoding is tough, specifically when encoding mappings, because we need to encode
|
||||||
|
as either attribute or element. To do this, we need to default to encoding as attributes,
|
||||||
|
and then let Encoder inform the Handle when to start encoding as nodes.
|
||||||
|
i.e. Encoder does something like:
|
||||||
|
|
||||||
|
h.EncodeMapStart()
|
||||||
|
h.Encode(), h.Encode(), ...
|
||||||
|
h.EncodeMapNotAttrSignal() // this is not a bool, because it's a signal
|
||||||
|
h.Encode(), h.Encode(), ...
|
||||||
|
h.EncodeEnd()
|
||||||
|
|
||||||
|
Only XMLHandle understands this, and will set itself to start encoding as elements.
|
||||||
|
|
||||||
|
This support extends to maps. For example, if a struct field is a map, and it has
|
||||||
|
the struct tag signifying it should be attr, then all its fields are encoded as attributes.
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
type X struct {
|
||||||
|
M map[string]int `codec:"m,attr"` // encode keys as attributes named
|
||||||
|
}
|
||||||
|
|
||||||
|
Question:
|
||||||
|
- if encoding a map, what if map keys have spaces in them???
|
||||||
|
Then they cannot be attributes or child elements. Error.
|
||||||
|
|
||||||
|
Options to consider adding later:
|
||||||
|
- For attribute values, normalize by trimming beginning and ending white space,
|
||||||
|
and converting every white space sequence to a single space.
|
||||||
|
- ATTLIST restrictions are enforced.
|
||||||
|
e.g. default value of xml:space, skipping xml:XYZ style attributes, etc.
|
||||||
|
- Consider supporting NON-STRICT mode (e.g. to handle HTML parsing).
|
||||||
|
Some elements e.g. br, hr, etc need not close and should be auto-closed
|
||||||
|
... (see http://www.w3.org/TR/html4/loose.dtd)
|
||||||
|
An expansive set of entities are pre-defined.
|
||||||
|
- Have easy way to create a HTML parser:
|
||||||
|
add a HTML() method to XMLHandle, that will set Strict=false, specify AutoClose,
|
||||||
|
and add HTML Entities to the list.
|
||||||
|
- Support validating element/attribute XMLName before writing it.
|
||||||
|
Keep this behind a flag, which is set to false by default (for performance).
|
||||||
|
type XMLHandle struct {
|
||||||
|
CheckName bool
|
||||||
|
}
|
||||||
|
|
||||||
|
Misc:
|
||||||
|
|
||||||
|
ROADMAP (1 weeks):
|
||||||
|
- build encoder (1 day)
|
||||||
|
- build decoder (based off xmlParser) (1 day)
|
||||||
|
- implement xmlParser (2 days).
|
||||||
|
Look at encoding/xml for inspiration.
|
||||||
|
- integrate and TEST (1 days)
|
||||||
|
- write article and post it (1 day)
|
||||||
|
|
||||||
|
// ---------- MORE NOTES FROM 2017-11-30 ------------
|
||||||
|
|
||||||
|
when parsing
|
||||||
|
- parse the attributes first
|
||||||
|
- then parse the nodes
|
||||||
|
|
||||||
|
basically:
|
||||||
|
- if encoding a field: we use the field name for the wrapper
|
||||||
|
- if encoding a non-field, then just use the element type name
|
||||||
|
|
||||||
|
map[string]string ==> <map><key>abc</key><value>val</value></map>... or
|
||||||
|
<map key="abc">val</map>... OR
|
||||||
|
<key1>val1</key1><key2>val2</key2>... <- PREFERED
|
||||||
|
[]string ==> <string>v1</string><string>v2</string>...
|
||||||
|
string v1 ==> <string>v1</string>
|
||||||
|
bool true ==> <bool>true</bool>
|
||||||
|
float 1.0 ==> <float>1.0</float>
|
||||||
|
...
|
||||||
|
|
||||||
|
F1 map[string]string ==> <F1><key>abc</key><value>val</value></F1>... OR
|
||||||
|
<F1 key="abc">val</F1>... OR
|
||||||
|
<F1><abc>val</abc>...</F1> <- PREFERED
|
||||||
|
F2 []string ==> <F2>v1</F2><F2>v2</F2>...
|
||||||
|
F3 bool ==> <F3>true</F3>
|
||||||
|
...
|
||||||
|
|
||||||
|
- a scalar is encoded as:
|
||||||
|
(value) of type T ==> <T><value/></T>
|
||||||
|
(value) of field F ==> <F><value/></F>
|
||||||
|
- A kv-pair is encoded as:
|
||||||
|
(key,value) ==> <map><key><value/></key></map> OR <map key="value">
|
||||||
|
(key,value) of field F ==> <F><key><value/></key></F> OR <F key="value">
|
||||||
|
- A map or struct is just a list of kv-pairs
|
||||||
|
- A list is encoded as sequences of same node e.g.
|
||||||
|
<F1 key1="value11">
|
||||||
|
<F1 key2="value12">
|
||||||
|
<F2>value21</F2>
|
||||||
|
<F2>value22</F2>
|
||||||
|
- we may have to singularize the field name, when entering into xml,
|
||||||
|
and pluralize them when encoding.
|
||||||
|
- bi-directional encode->decode->encode is not a MUST.
|
||||||
|
even encoding/xml cannot decode correctly what was encoded:
|
||||||
|
|
||||||
|
see https://play.golang.org/p/224V_nyhMS
|
||||||
|
func main() {
|
||||||
|
fmt.Println("Hello, playground")
|
||||||
|
v := []interface{}{"hello", 1, true, nil, time.Now()}
|
||||||
|
s, err := xml.Marshal(v)
|
||||||
|
fmt.Printf("err: %v, \ns: %s\n", err, s)
|
||||||
|
var v2 []interface{}
|
||||||
|
err = xml.Unmarshal(s, &v2)
|
||||||
|
fmt.Printf("err: %v, \nv2: %v\n", err, v2)
|
||||||
|
type T struct {
|
||||||
|
V []interface{}
|
||||||
|
}
|
||||||
|
v3 := T{V: v}
|
||||||
|
s, err = xml.Marshal(v3)
|
||||||
|
fmt.Printf("err: %v, \ns: %s\n", err, s)
|
||||||
|
var v4 T
|
||||||
|
err = xml.Unmarshal(s, &v4)
|
||||||
|
fmt.Printf("err: %v, \nv4: %v\n", err, v4)
|
||||||
|
}
|
||||||
|
Output:
|
||||||
|
err: <nil>,
|
||||||
|
s: <string>hello</string><int>1</int><bool>true</bool><Time>2009-11-10T23:00:00Z</Time>
|
||||||
|
err: <nil>,
|
||||||
|
v2: [<nil>]
|
||||||
|
err: <nil>,
|
||||||
|
s: <T><V>hello</V><V>1</V><V>true</V><V>2009-11-10T23:00:00Z</V></T>
|
||||||
|
err: <nil>,
|
||||||
|
v4: {[<nil> <nil> <nil> <nil>]}
|
||||||
|
-
|
||||||
|
*/
|
||||||
|
|
||||||
|
// ----------- PARSER -------------------
|
||||||
|
|
||||||
|
type xmlTokenType uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
_ xmlTokenType = iota << 1
|
||||||
|
xmlTokenElemStart
|
||||||
|
xmlTokenElemEnd
|
||||||
|
xmlTokenAttrKey
|
||||||
|
xmlTokenAttrVal
|
||||||
|
xmlTokenText
|
||||||
|
)
|
||||||
|
|
||||||
|
type xmlToken struct {
|
||||||
|
Type xmlTokenType
|
||||||
|
Value string
|
||||||
|
Namespace string // blank for AttrVal and Text
|
||||||
|
}
|
||||||
|
|
||||||
|
type xmlParser struct {
|
||||||
|
r decReader
|
||||||
|
toks []xmlToken // list of tokens.
|
||||||
|
ptr int // ptr into the toks slice
|
||||||
|
done bool // nothing else to parse. r now returns EOF.
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *xmlParser) next() (t *xmlToken) {
|
||||||
|
// once x.done, or x.ptr == len(x.toks) == 0, then return nil (to signify finish)
|
||||||
|
if !x.done && len(x.toks) == 0 {
|
||||||
|
x.nextTag()
|
||||||
|
}
|
||||||
|
// parses one element at a time (into possible many tokens)
|
||||||
|
if x.ptr < len(x.toks) {
|
||||||
|
t = &(x.toks[x.ptr])
|
||||||
|
x.ptr++
|
||||||
|
if x.ptr == len(x.toks) {
|
||||||
|
x.ptr = 0
|
||||||
|
x.toks = x.toks[:0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// nextTag will parses the next element and fill up toks.
|
||||||
|
// It set done flag if/once EOF is reached.
|
||||||
|
func (x *xmlParser) nextTag() {
|
||||||
|
// TODO: implement.
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------- ENCODER -------------------
|
||||||
|
|
||||||
|
type xmlEncDriver struct {
|
||||||
|
e *Encoder
|
||||||
|
w encWriter
|
||||||
|
h *XMLHandle
|
||||||
|
b [64]byte // scratch
|
||||||
|
bs []byte // scratch
|
||||||
|
// s jsonStack
|
||||||
|
noBuiltInTypes
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------- DECODER -------------------
|
||||||
|
|
||||||
|
type xmlDecDriver struct {
|
||||||
|
d *Decoder
|
||||||
|
h *XMLHandle
|
||||||
|
r decReader // *bytesDecReader decReader
|
||||||
|
ct valueType // container type. one of unset, array or map.
|
||||||
|
bstr [8]byte // scratch used for string \UXXX parsing
|
||||||
|
b [64]byte // scratch
|
||||||
|
|
||||||
|
// wsSkipped bool // whitespace skipped
|
||||||
|
|
||||||
|
// s jsonStack
|
||||||
|
|
||||||
|
noBuiltInTypes
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecodeNaked will decode into an XMLNode
|
||||||
|
|
||||||
|
// XMLName is a value object representing a namespace-aware NAME
|
||||||
|
type XMLName struct {
|
||||||
|
Local string
|
||||||
|
Space string
|
||||||
|
}
|
||||||
|
|
||||||
|
// XMLNode represents a "union" of the different types of XML Nodes.
|
||||||
|
// Only one of fields (Text or *Element) is set.
|
||||||
|
type XMLNode struct {
|
||||||
|
Element *Element
|
||||||
|
Text string
|
||||||
|
}
|
||||||
|
|
||||||
|
// XMLElement is a value object representing an fully-parsed XML element.
|
||||||
|
type XMLElement struct {
|
||||||
|
Name Name
|
||||||
|
Attrs map[XMLName]string
|
||||||
|
// Children is a list of child nodes, each being a *XMLElement or string
|
||||||
|
Children []XMLNode
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------- HANDLE -------------------
|
||||||
|
|
||||||
|
type XMLHandle struct {
|
||||||
|
BasicHandle
|
||||||
|
textEncodingType
|
||||||
|
|
||||||
|
DefaultNS string
|
||||||
|
NS map[string]string // ns URI to key, for encoding
|
||||||
|
Entities map[string]string // entity representation to string, for encoding.
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *XMLHandle) newEncDriver(e *Encoder) encDriver {
|
||||||
|
return &xmlEncDriver{e: e, w: e.w, h: h}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *XMLHandle) newDecDriver(d *Decoder) decDriver {
|
||||||
|
// d := xmlDecDriver{r: r.(*bytesDecReader), h: h}
|
||||||
|
hd := xmlDecDriver{d: d, r: d.r, h: h}
|
||||||
|
hd.n.bytes = d.b[:]
|
||||||
|
return &hd
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *XMLHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
|
||||||
|
return h.SetExt(rt, tag, &extWrapper{bytesExtFailer{}, ext})
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ decDriver = (*xmlDecDriver)(nil)
|
||||||
|
var _ encDriver = (*xmlEncDriver)(nil)
|
|
@ -0,0 +1,40 @@
|
||||||
|
// Copyright 2014-2017 Ulrich Kunitz. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/ulikunitz/xz"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
const text = "The quick brown fox jumps over the lazy dog.\n"
|
||||||
|
var buf bytes.Buffer
|
||||||
|
// compress text
|
||||||
|
w, err := xz.NewWriter(&buf)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("xz.NewWriter error %s", err)
|
||||||
|
}
|
||||||
|
if _, err := io.WriteString(w, text); err != nil {
|
||||||
|
log.Fatalf("WriteString error %s", err)
|
||||||
|
}
|
||||||
|
if err := w.Close(); err != nil {
|
||||||
|
log.Fatalf("w.Close error %s", err)
|
||||||
|
}
|
||||||
|
// decompress buffer and write output to stdout
|
||||||
|
r, err := xz.NewReader(&buf)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("NewReader error %s", err)
|
||||||
|
}
|
||||||
|
if _, err = io.Copy(os.Stdout, r); err != nil {
|
||||||
|
log.Fatalf("io.Copy error %s", err)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,712 @@
|
||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
//go:generate go run gen.go
|
||||||
|
//go:generate go run gen.go -test
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"io/ioutil"
|
||||||
|
"math/rand"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// identifier converts s to a Go exported identifier.
|
||||||
|
// It converts "div" to "Div" and "accept-charset" to "AcceptCharset".
|
||||||
|
func identifier(s string) string {
|
||||||
|
b := make([]byte, 0, len(s))
|
||||||
|
cap := true
|
||||||
|
for _, c := range s {
|
||||||
|
if c == '-' {
|
||||||
|
cap = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if cap && 'a' <= c && c <= 'z' {
|
||||||
|
c -= 'a' - 'A'
|
||||||
|
}
|
||||||
|
cap = false
|
||||||
|
b = append(b, byte(c))
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
var test = flag.Bool("test", false, "generate table_test.go")
|
||||||
|
|
||||||
|
func genFile(name string, buf *bytes.Buffer) {
|
||||||
|
b, err := format.Source(buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if err := ioutil.WriteFile(name, b, 0644); err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
var all []string
|
||||||
|
all = append(all, elements...)
|
||||||
|
all = append(all, attributes...)
|
||||||
|
all = append(all, eventHandlers...)
|
||||||
|
all = append(all, extra...)
|
||||||
|
sort.Strings(all)
|
||||||
|
|
||||||
|
// uniq - lists have dups
|
||||||
|
w := 0
|
||||||
|
for _, s := range all {
|
||||||
|
if w == 0 || all[w-1] != s {
|
||||||
|
all[w] = s
|
||||||
|
w++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
all = all[:w]
|
||||||
|
|
||||||
|
if *test {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintln(&buf, "//go:generate go run gen.go -test\n")
|
||||||
|
fmt.Fprintln(&buf, "package atom\n")
|
||||||
|
fmt.Fprintln(&buf, "var testAtomList = []string{")
|
||||||
|
for _, s := range all {
|
||||||
|
fmt.Fprintf(&buf, "\t%q,\n", s)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(&buf, "}")
|
||||||
|
|
||||||
|
genFile("table_test.go", &buf)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find hash that minimizes table size.
|
||||||
|
var best *table
|
||||||
|
for i := 0; i < 1000000; i++ {
|
||||||
|
if best != nil && 1<<(best.k-1) < len(all) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
h := rand.Uint32()
|
||||||
|
for k := uint(0); k <= 16; k++ {
|
||||||
|
if best != nil && k >= best.k {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
var t table
|
||||||
|
if t.init(h, k, all) {
|
||||||
|
best = &t
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if best == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "failed to construct string table\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lay out strings, using overlaps when possible.
|
||||||
|
layout := append([]string{}, all...)
|
||||||
|
|
||||||
|
// Remove strings that are substrings of other strings
|
||||||
|
for changed := true; changed; {
|
||||||
|
changed = false
|
||||||
|
for i, s := range layout {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j, t := range layout {
|
||||||
|
if i != j && t != "" && strings.Contains(s, t) {
|
||||||
|
changed = true
|
||||||
|
layout[j] = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Join strings where one suffix matches another prefix.
|
||||||
|
for {
|
||||||
|
// Find best i, j, k such that layout[i][len-k:] == layout[j][:k],
|
||||||
|
// maximizing overlap length k.
|
||||||
|
besti := -1
|
||||||
|
bestj := -1
|
||||||
|
bestk := 0
|
||||||
|
for i, s := range layout {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j, t := range layout {
|
||||||
|
if i == j {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for k := bestk + 1; k <= len(s) && k <= len(t); k++ {
|
||||||
|
if s[len(s)-k:] == t[:k] {
|
||||||
|
besti = i
|
||||||
|
bestj = j
|
||||||
|
bestk = k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if bestk > 0 {
|
||||||
|
layout[besti] += layout[bestj][bestk:]
|
||||||
|
layout[bestj] = ""
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
text := strings.Join(layout, "")
|
||||||
|
|
||||||
|
atom := map[string]uint32{}
|
||||||
|
for _, s := range all {
|
||||||
|
off := strings.Index(text, s)
|
||||||
|
if off < 0 {
|
||||||
|
panic("lost string " + s)
|
||||||
|
}
|
||||||
|
atom[s] = uint32(off<<8 | len(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
// Generate the Go code.
|
||||||
|
fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintln(&buf, "//go:generate go run gen.go\n")
|
||||||
|
fmt.Fprintln(&buf, "package atom\n\nconst (")
|
||||||
|
|
||||||
|
// compute max len
|
||||||
|
maxLen := 0
|
||||||
|
for _, s := range all {
|
||||||
|
if maxLen < len(s) {
|
||||||
|
maxLen = len(s)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%s Atom = %#x\n", identifier(s), atom[s])
|
||||||
|
}
|
||||||
|
fmt.Fprintln(&buf, ")\n")
|
||||||
|
|
||||||
|
fmt.Fprintf(&buf, "const hash0 = %#x\n\n", best.h0)
|
||||||
|
fmt.Fprintf(&buf, "const maxAtomLen = %d\n\n", maxLen)
|
||||||
|
|
||||||
|
fmt.Fprintf(&buf, "var table = [1<<%d]Atom{\n", best.k)
|
||||||
|
for i, s := range best.tab {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%#x: %#x, // %s\n", i, atom[s], s)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "}\n")
|
||||||
|
datasize := (1 << best.k) * 4
|
||||||
|
|
||||||
|
fmt.Fprintln(&buf, "const atomText =")
|
||||||
|
textsize := len(text)
|
||||||
|
for len(text) > 60 {
|
||||||
|
fmt.Fprintf(&buf, "\t%q +\n", text[:60])
|
||||||
|
text = text[60:]
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%q\n\n", text)
|
||||||
|
|
||||||
|
genFile("table.go", &buf)
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stdout, "%d atoms; %d string bytes + %d tables = %d total data\n", len(all), textsize, datasize, textsize+datasize)
|
||||||
|
}
|
||||||
|
|
||||||
|
type byLen []string
|
||||||
|
|
||||||
|
func (x byLen) Less(i, j int) bool { return len(x[i]) > len(x[j]) }
|
||||||
|
func (x byLen) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||||
|
func (x byLen) Len() int { return len(x) }
|
||||||
|
|
||||||
|
// fnv computes the FNV hash with an arbitrary starting value h.
|
||||||
|
func fnv(h uint32, s string) uint32 {
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
h ^= uint32(s[i])
|
||||||
|
h *= 16777619
|
||||||
|
}
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
// A table represents an attempt at constructing the lookup table.
|
||||||
|
// The lookup table uses cuckoo hashing, meaning that each string
|
||||||
|
// can be found in one of two positions.
|
||||||
|
type table struct {
|
||||||
|
h0 uint32
|
||||||
|
k uint
|
||||||
|
mask uint32
|
||||||
|
tab []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// hash returns the two hashes for s.
|
||||||
|
func (t *table) hash(s string) (h1, h2 uint32) {
|
||||||
|
h := fnv(t.h0, s)
|
||||||
|
h1 = h & t.mask
|
||||||
|
h2 = (h >> 16) & t.mask
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// init initializes the table with the given parameters.
|
||||||
|
// h0 is the initial hash value,
|
||||||
|
// k is the number of bits of hash value to use, and
|
||||||
|
// x is the list of strings to store in the table.
|
||||||
|
// init returns false if the table cannot be constructed.
|
||||||
|
func (t *table) init(h0 uint32, k uint, x []string) bool {
|
||||||
|
t.h0 = h0
|
||||||
|
t.k = k
|
||||||
|
t.tab = make([]string, 1<<k)
|
||||||
|
t.mask = 1<<k - 1
|
||||||
|
for _, s := range x {
|
||||||
|
if !t.insert(s) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert inserts s in the table.
|
||||||
|
func (t *table) insert(s string) bool {
|
||||||
|
h1, h2 := t.hash(s)
|
||||||
|
if t.tab[h1] == "" {
|
||||||
|
t.tab[h1] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.tab[h2] == "" {
|
||||||
|
t.tab[h2] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.push(h1, 0) {
|
||||||
|
t.tab[h1] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.push(h2, 0) {
|
||||||
|
t.tab[h2] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// push attempts to push aside the entry in slot i.
|
||||||
|
func (t *table) push(i uint32, depth int) bool {
|
||||||
|
if depth > len(t.tab) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
s := t.tab[i]
|
||||||
|
h1, h2 := t.hash(s)
|
||||||
|
j := h1 + h2 - i
|
||||||
|
if t.tab[j] != "" && !t.push(j, depth+1) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
t.tab[j] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// The lists of element names and attribute keys were taken from
|
||||||
|
// https://html.spec.whatwg.org/multipage/indices.html#index
|
||||||
|
// as of the "HTML Living Standard - Last Updated 16 April 2018" version.
|
||||||
|
|
||||||
|
// "command", "keygen" and "menuitem" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var elements = []string{
|
||||||
|
"a",
|
||||||
|
"abbr",
|
||||||
|
"address",
|
||||||
|
"area",
|
||||||
|
"article",
|
||||||
|
"aside",
|
||||||
|
"audio",
|
||||||
|
"b",
|
||||||
|
"base",
|
||||||
|
"bdi",
|
||||||
|
"bdo",
|
||||||
|
"blockquote",
|
||||||
|
"body",
|
||||||
|
"br",
|
||||||
|
"button",
|
||||||
|
"canvas",
|
||||||
|
"caption",
|
||||||
|
"cite",
|
||||||
|
"code",
|
||||||
|
"col",
|
||||||
|
"colgroup",
|
||||||
|
"command",
|
||||||
|
"data",
|
||||||
|
"datalist",
|
||||||
|
"dd",
|
||||||
|
"del",
|
||||||
|
"details",
|
||||||
|
"dfn",
|
||||||
|
"dialog",
|
||||||
|
"div",
|
||||||
|
"dl",
|
||||||
|
"dt",
|
||||||
|
"em",
|
||||||
|
"embed",
|
||||||
|
"fieldset",
|
||||||
|
"figcaption",
|
||||||
|
"figure",
|
||||||
|
"footer",
|
||||||
|
"form",
|
||||||
|
"h1",
|
||||||
|
"h2",
|
||||||
|
"h3",
|
||||||
|
"h4",
|
||||||
|
"h5",
|
||||||
|
"h6",
|
||||||
|
"head",
|
||||||
|
"header",
|
||||||
|
"hgroup",
|
||||||
|
"hr",
|
||||||
|
"html",
|
||||||
|
"i",
|
||||||
|
"iframe",
|
||||||
|
"img",
|
||||||
|
"input",
|
||||||
|
"ins",
|
||||||
|
"kbd",
|
||||||
|
"keygen",
|
||||||
|
"label",
|
||||||
|
"legend",
|
||||||
|
"li",
|
||||||
|
"link",
|
||||||
|
"main",
|
||||||
|
"map",
|
||||||
|
"mark",
|
||||||
|
"menu",
|
||||||
|
"menuitem",
|
||||||
|
"meta",
|
||||||
|
"meter",
|
||||||
|
"nav",
|
||||||
|
"noscript",
|
||||||
|
"object",
|
||||||
|
"ol",
|
||||||
|
"optgroup",
|
||||||
|
"option",
|
||||||
|
"output",
|
||||||
|
"p",
|
||||||
|
"param",
|
||||||
|
"picture",
|
||||||
|
"pre",
|
||||||
|
"progress",
|
||||||
|
"q",
|
||||||
|
"rp",
|
||||||
|
"rt",
|
||||||
|
"ruby",
|
||||||
|
"s",
|
||||||
|
"samp",
|
||||||
|
"script",
|
||||||
|
"section",
|
||||||
|
"select",
|
||||||
|
"slot",
|
||||||
|
"small",
|
||||||
|
"source",
|
||||||
|
"span",
|
||||||
|
"strong",
|
||||||
|
"style",
|
||||||
|
"sub",
|
||||||
|
"summary",
|
||||||
|
"sup",
|
||||||
|
"table",
|
||||||
|
"tbody",
|
||||||
|
"td",
|
||||||
|
"template",
|
||||||
|
"textarea",
|
||||||
|
"tfoot",
|
||||||
|
"th",
|
||||||
|
"thead",
|
||||||
|
"time",
|
||||||
|
"title",
|
||||||
|
"tr",
|
||||||
|
"track",
|
||||||
|
"u",
|
||||||
|
"ul",
|
||||||
|
"var",
|
||||||
|
"video",
|
||||||
|
"wbr",
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://html.spec.whatwg.org/multipage/indices.html#attributes-3
|
||||||
|
//
|
||||||
|
// "challenge", "command", "contextmenu", "dropzone", "icon", "keytype", "mediagroup",
|
||||||
|
// "radiogroup", "spellcheck", "scoped", "seamless", "sortable" and "sorted" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var attributes = []string{
|
||||||
|
"abbr",
|
||||||
|
"accept",
|
||||||
|
"accept-charset",
|
||||||
|
"accesskey",
|
||||||
|
"action",
|
||||||
|
"allowfullscreen",
|
||||||
|
"allowpaymentrequest",
|
||||||
|
"allowusermedia",
|
||||||
|
"alt",
|
||||||
|
"as",
|
||||||
|
"async",
|
||||||
|
"autocomplete",
|
||||||
|
"autofocus",
|
||||||
|
"autoplay",
|
||||||
|
"challenge",
|
||||||
|
"charset",
|
||||||
|
"checked",
|
||||||
|
"cite",
|
||||||
|
"class",
|
||||||
|
"color",
|
||||||
|
"cols",
|
||||||
|
"colspan",
|
||||||
|
"command",
|
||||||
|
"content",
|
||||||
|
"contenteditable",
|
||||||
|
"contextmenu",
|
||||||
|
"controls",
|
||||||
|
"coords",
|
||||||
|
"crossorigin",
|
||||||
|
"data",
|
||||||
|
"datetime",
|
||||||
|
"default",
|
||||||
|
"defer",
|
||||||
|
"dir",
|
||||||
|
"dirname",
|
||||||
|
"disabled",
|
||||||
|
"download",
|
||||||
|
"draggable",
|
||||||
|
"dropzone",
|
||||||
|
"enctype",
|
||||||
|
"for",
|
||||||
|
"form",
|
||||||
|
"formaction",
|
||||||
|
"formenctype",
|
||||||
|
"formmethod",
|
||||||
|
"formnovalidate",
|
||||||
|
"formtarget",
|
||||||
|
"headers",
|
||||||
|
"height",
|
||||||
|
"hidden",
|
||||||
|
"high",
|
||||||
|
"href",
|
||||||
|
"hreflang",
|
||||||
|
"http-equiv",
|
||||||
|
"icon",
|
||||||
|
"id",
|
||||||
|
"inputmode",
|
||||||
|
"integrity",
|
||||||
|
"is",
|
||||||
|
"ismap",
|
||||||
|
"itemid",
|
||||||
|
"itemprop",
|
||||||
|
"itemref",
|
||||||
|
"itemscope",
|
||||||
|
"itemtype",
|
||||||
|
"keytype",
|
||||||
|
"kind",
|
||||||
|
"label",
|
||||||
|
"lang",
|
||||||
|
"list",
|
||||||
|
"loop",
|
||||||
|
"low",
|
||||||
|
"manifest",
|
||||||
|
"max",
|
||||||
|
"maxlength",
|
||||||
|
"media",
|
||||||
|
"mediagroup",
|
||||||
|
"method",
|
||||||
|
"min",
|
||||||
|
"minlength",
|
||||||
|
"multiple",
|
||||||
|
"muted",
|
||||||
|
"name",
|
||||||
|
"nomodule",
|
||||||
|
"nonce",
|
||||||
|
"novalidate",
|
||||||
|
"open",
|
||||||
|
"optimum",
|
||||||
|
"pattern",
|
||||||
|
"ping",
|
||||||
|
"placeholder",
|
||||||
|
"playsinline",
|
||||||
|
"poster",
|
||||||
|
"preload",
|
||||||
|
"radiogroup",
|
||||||
|
"readonly",
|
||||||
|
"referrerpolicy",
|
||||||
|
"rel",
|
||||||
|
"required",
|
||||||
|
"reversed",
|
||||||
|
"rows",
|
||||||
|
"rowspan",
|
||||||
|
"sandbox",
|
||||||
|
"spellcheck",
|
||||||
|
"scope",
|
||||||
|
"scoped",
|
||||||
|
"seamless",
|
||||||
|
"selected",
|
||||||
|
"shape",
|
||||||
|
"size",
|
||||||
|
"sizes",
|
||||||
|
"sortable",
|
||||||
|
"sorted",
|
||||||
|
"slot",
|
||||||
|
"span",
|
||||||
|
"spellcheck",
|
||||||
|
"src",
|
||||||
|
"srcdoc",
|
||||||
|
"srclang",
|
||||||
|
"srcset",
|
||||||
|
"start",
|
||||||
|
"step",
|
||||||
|
"style",
|
||||||
|
"tabindex",
|
||||||
|
"target",
|
||||||
|
"title",
|
||||||
|
"translate",
|
||||||
|
"type",
|
||||||
|
"typemustmatch",
|
||||||
|
"updateviacache",
|
||||||
|
"usemap",
|
||||||
|
"value",
|
||||||
|
"width",
|
||||||
|
"workertype",
|
||||||
|
"wrap",
|
||||||
|
}
|
||||||
|
|
||||||
|
// "onautocomplete", "onautocompleteerror", "onmousewheel",
|
||||||
|
// "onshow" and "onsort" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var eventHandlers = []string{
|
||||||
|
"onabort",
|
||||||
|
"onautocomplete",
|
||||||
|
"onautocompleteerror",
|
||||||
|
"onauxclick",
|
||||||
|
"onafterprint",
|
||||||
|
"onbeforeprint",
|
||||||
|
"onbeforeunload",
|
||||||
|
"onblur",
|
||||||
|
"oncancel",
|
||||||
|
"oncanplay",
|
||||||
|
"oncanplaythrough",
|
||||||
|
"onchange",
|
||||||
|
"onclick",
|
||||||
|
"onclose",
|
||||||
|
"oncontextmenu",
|
||||||
|
"oncopy",
|
||||||
|
"oncuechange",
|
||||||
|
"oncut",
|
||||||
|
"ondblclick",
|
||||||
|
"ondrag",
|
||||||
|
"ondragend",
|
||||||
|
"ondragenter",
|
||||||
|
"ondragexit",
|
||||||
|
"ondragleave",
|
||||||
|
"ondragover",
|
||||||
|
"ondragstart",
|
||||||
|
"ondrop",
|
||||||
|
"ondurationchange",
|
||||||
|
"onemptied",
|
||||||
|
"onended",
|
||||||
|
"onerror",
|
||||||
|
"onfocus",
|
||||||
|
"onhashchange",
|
||||||
|
"oninput",
|
||||||
|
"oninvalid",
|
||||||
|
"onkeydown",
|
||||||
|
"onkeypress",
|
||||||
|
"onkeyup",
|
||||||
|
"onlanguagechange",
|
||||||
|
"onload",
|
||||||
|
"onloadeddata",
|
||||||
|
"onloadedmetadata",
|
||||||
|
"onloadend",
|
||||||
|
"onloadstart",
|
||||||
|
"onmessage",
|
||||||
|
"onmessageerror",
|
||||||
|
"onmousedown",
|
||||||
|
"onmouseenter",
|
||||||
|
"onmouseleave",
|
||||||
|
"onmousemove",
|
||||||
|
"onmouseout",
|
||||||
|
"onmouseover",
|
||||||
|
"onmouseup",
|
||||||
|
"onmousewheel",
|
||||||
|
"onwheel",
|
||||||
|
"onoffline",
|
||||||
|
"ononline",
|
||||||
|
"onpagehide",
|
||||||
|
"onpageshow",
|
||||||
|
"onpaste",
|
||||||
|
"onpause",
|
||||||
|
"onplay",
|
||||||
|
"onplaying",
|
||||||
|
"onpopstate",
|
||||||
|
"onprogress",
|
||||||
|
"onratechange",
|
||||||
|
"onreset",
|
||||||
|
"onresize",
|
||||||
|
"onrejectionhandled",
|
||||||
|
"onscroll",
|
||||||
|
"onsecuritypolicyviolation",
|
||||||
|
"onseeked",
|
||||||
|
"onseeking",
|
||||||
|
"onselect",
|
||||||
|
"onshow",
|
||||||
|
"onsort",
|
||||||
|
"onstalled",
|
||||||
|
"onstorage",
|
||||||
|
"onsubmit",
|
||||||
|
"onsuspend",
|
||||||
|
"ontimeupdate",
|
||||||
|
"ontoggle",
|
||||||
|
"onunhandledrejection",
|
||||||
|
"onunload",
|
||||||
|
"onvolumechange",
|
||||||
|
"onwaiting",
|
||||||
|
}
|
||||||
|
|
||||||
|
// extra are ad-hoc values not covered by any of the lists above.
|
||||||
|
var extra = []string{
|
||||||
|
"acronym",
|
||||||
|
"align",
|
||||||
|
"annotation",
|
||||||
|
"annotation-xml",
|
||||||
|
"applet",
|
||||||
|
"basefont",
|
||||||
|
"bgsound",
|
||||||
|
"big",
|
||||||
|
"blink",
|
||||||
|
"center",
|
||||||
|
"color",
|
||||||
|
"desc",
|
||||||
|
"face",
|
||||||
|
"font",
|
||||||
|
"foreignObject", // HTML is case-insensitive, but SVG-embedded-in-HTML is case-sensitive.
|
||||||
|
"foreignobject",
|
||||||
|
"frame",
|
||||||
|
"frameset",
|
||||||
|
"image",
|
||||||
|
"isindex",
|
||||||
|
"listing",
|
||||||
|
"malignmark",
|
||||||
|
"marquee",
|
||||||
|
"math",
|
||||||
|
"mglyph",
|
||||||
|
"mi",
|
||||||
|
"mn",
|
||||||
|
"mo",
|
||||||
|
"ms",
|
||||||
|
"mtext",
|
||||||
|
"nobr",
|
||||||
|
"noembed",
|
||||||
|
"noframes",
|
||||||
|
"plaintext",
|
||||||
|
"prompt",
|
||||||
|
"public",
|
||||||
|
"rb",
|
||||||
|
"rtc",
|
||||||
|
"spacer",
|
||||||
|
"strike",
|
||||||
|
"svg",
|
||||||
|
"system",
|
||||||
|
"tt",
|
||||||
|
"xmp",
|
||||||
|
}
|
|
@ -347,6 +347,7 @@ loop:
|
||||||
break loop
|
break loop
|
||||||
}
|
}
|
||||||
if c != '/' {
|
if c != '/' {
|
||||||
|
z.raw.end--
|
||||||
continue loop
|
continue loop
|
||||||
}
|
}
|
||||||
if z.readRawEndTag() || z.err != nil {
|
if z.readRawEndTag() || z.err != nil {
|
||||||
|
@ -1067,6 +1068,11 @@ loop:
|
||||||
|
|
||||||
// Raw returns the unmodified text of the current token. Calling Next, Token,
|
// Raw returns the unmodified text of the current token. Calling Next, Token,
|
||||||
// Text, TagName or TagAttr may change the contents of the returned slice.
|
// Text, TagName or TagAttr may change the contents of the returned slice.
|
||||||
|
//
|
||||||
|
// The token stream's raw bytes partition the byte stream (up until an
|
||||||
|
// ErrorToken). There are no overlaps or gaps between two consecutive token's
|
||||||
|
// raw bytes. One implication is that the byte offset of the current token is
|
||||||
|
// the sum of the lengths of all previous tokens' raw bytes.
|
||||||
func (z *Tokenizer) Raw() []byte {
|
func (z *Tokenizer) Raw() []byte {
|
||||||
return z.buf[z.raw.start:z.raw.end]
|
return z.buf[z.raw.start:z.raw.end]
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,7 +150,7 @@ func appendIndexed(dst []byte, i uint64) []byte {
|
||||||
// extended buffer.
|
// extended buffer.
|
||||||
//
|
//
|
||||||
// If f.Sensitive is true, "Never Indexed" representation is used. If
|
// If f.Sensitive is true, "Never Indexed" representation is used. If
|
||||||
// f.Sensitive is false and indexing is true, "Inremental Indexing"
|
// f.Sensitive is false and indexing is true, "Incremental Indexing"
|
||||||
// representation is used.
|
// representation is used.
|
||||||
func appendNewName(dst []byte, f HeaderField, indexing bool) []byte {
|
func appendNewName(dst []byte, f HeaderField, indexing bool) []byte {
|
||||||
dst = append(dst, encodeTypeByte(indexing, f.Sensitive))
|
dst = append(dst, encodeTypeByte(indexing, f.Sensitive))
|
||||||
|
|
|
@ -56,6 +56,7 @@ const (
|
||||||
firstSettingsTimeout = 2 * time.Second // should be in-flight with preface anyway
|
firstSettingsTimeout = 2 * time.Second // should be in-flight with preface anyway
|
||||||
handlerChunkWriteSize = 4 << 10
|
handlerChunkWriteSize = 4 << 10
|
||||||
defaultMaxStreams = 250 // TODO: make this 100 as the GFE seems to?
|
defaultMaxStreams = 250 // TODO: make this 100 as the GFE seems to?
|
||||||
|
maxQueuedControlFrames = 10000
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -163,6 +164,15 @@ func (s *Server) maxConcurrentStreams() uint32 {
|
||||||
return defaultMaxStreams
|
return defaultMaxStreams
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// maxQueuedControlFrames is the maximum number of control frames like
|
||||||
|
// SETTINGS, PING and RST_STREAM that will be queued for writing before
|
||||||
|
// the connection is closed to prevent memory exhaustion attacks.
|
||||||
|
func (s *Server) maxQueuedControlFrames() int {
|
||||||
|
// TODO: if anybody asks, add a Server field, and remember to define the
|
||||||
|
// behavior of negative values.
|
||||||
|
return maxQueuedControlFrames
|
||||||
|
}
|
||||||
|
|
||||||
type serverInternalState struct {
|
type serverInternalState struct {
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
activeConns map[*serverConn]struct{}
|
activeConns map[*serverConn]struct{}
|
||||||
|
@ -312,7 +322,7 @@ type ServeConnOpts struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *ServeConnOpts) context() context.Context {
|
func (o *ServeConnOpts) context() context.Context {
|
||||||
if o.Context != nil {
|
if o != nil && o.Context != nil {
|
||||||
return o.Context
|
return o.Context
|
||||||
}
|
}
|
||||||
return context.Background()
|
return context.Background()
|
||||||
|
@ -506,6 +516,7 @@ type serverConn struct {
|
||||||
sawFirstSettings bool // got the initial SETTINGS frame after the preface
|
sawFirstSettings bool // got the initial SETTINGS frame after the preface
|
||||||
needToSendSettingsAck bool
|
needToSendSettingsAck bool
|
||||||
unackedSettings int // how many SETTINGS have we sent without ACKs?
|
unackedSettings int // how many SETTINGS have we sent without ACKs?
|
||||||
|
queuedControlFrames int // control frames in the writeSched queue
|
||||||
clientMaxStreams uint32 // SETTINGS_MAX_CONCURRENT_STREAMS from client (our PUSH_PROMISE limit)
|
clientMaxStreams uint32 // SETTINGS_MAX_CONCURRENT_STREAMS from client (our PUSH_PROMISE limit)
|
||||||
advMaxStreams uint32 // our SETTINGS_MAX_CONCURRENT_STREAMS advertised the client
|
advMaxStreams uint32 // our SETTINGS_MAX_CONCURRENT_STREAMS advertised the client
|
||||||
curClientStreams uint32 // number of open streams initiated by the client
|
curClientStreams uint32 // number of open streams initiated by the client
|
||||||
|
@ -894,6 +905,14 @@ func (sc *serverConn) serve() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the peer is causing us to generate a lot of control frames,
|
||||||
|
// but not reading them from us, assume they are trying to make us
|
||||||
|
// run out of memory.
|
||||||
|
if sc.queuedControlFrames > sc.srv.maxQueuedControlFrames() {
|
||||||
|
sc.vlogf("http2: too many control frames in send queue, closing connection")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Start the shutdown timer after sending a GOAWAY. When sending GOAWAY
|
// Start the shutdown timer after sending a GOAWAY. When sending GOAWAY
|
||||||
// with no error code (graceful shutdown), don't start the timer until
|
// with no error code (graceful shutdown), don't start the timer until
|
||||||
// all open streams have been completed.
|
// all open streams have been completed.
|
||||||
|
@ -1093,6 +1112,14 @@ func (sc *serverConn) writeFrame(wr FrameWriteRequest) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ignoreWrite {
|
if !ignoreWrite {
|
||||||
|
if wr.isControl() {
|
||||||
|
sc.queuedControlFrames++
|
||||||
|
// For extra safety, detect wraparounds, which should not happen,
|
||||||
|
// and pull the plug.
|
||||||
|
if sc.queuedControlFrames < 0 {
|
||||||
|
sc.conn.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
sc.writeSched.Push(wr)
|
sc.writeSched.Push(wr)
|
||||||
}
|
}
|
||||||
sc.scheduleFrameWrite()
|
sc.scheduleFrameWrite()
|
||||||
|
@ -1210,10 +1237,8 @@ func (sc *serverConn) wroteFrame(res frameWriteResult) {
|
||||||
// If a frame is already being written, nothing happens. This will be called again
|
// If a frame is already being written, nothing happens. This will be called again
|
||||||
// when the frame is done being written.
|
// when the frame is done being written.
|
||||||
//
|
//
|
||||||
// If a frame isn't being written we need to send one, the best frame
|
// If a frame isn't being written and we need to send one, the best frame
|
||||||
// to send is selected, preferring first things that aren't
|
// to send is selected by writeSched.
|
||||||
// stream-specific (e.g. ACKing settings), and then finding the
|
|
||||||
// highest priority stream.
|
|
||||||
//
|
//
|
||||||
// If a frame isn't being written and there's nothing else to send, we
|
// If a frame isn't being written and there's nothing else to send, we
|
||||||
// flush the write buffer.
|
// flush the write buffer.
|
||||||
|
@ -1241,6 +1266,9 @@ func (sc *serverConn) scheduleFrameWrite() {
|
||||||
}
|
}
|
||||||
if !sc.inGoAway || sc.goAwayCode == ErrCodeNo {
|
if !sc.inGoAway || sc.goAwayCode == ErrCodeNo {
|
||||||
if wr, ok := sc.writeSched.Pop(); ok {
|
if wr, ok := sc.writeSched.Pop(); ok {
|
||||||
|
if wr.isControl() {
|
||||||
|
sc.queuedControlFrames--
|
||||||
|
}
|
||||||
sc.startFrameWrite(wr)
|
sc.startFrameWrite(wr)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -1533,6 +1561,8 @@ func (sc *serverConn) processSettings(f *SettingsFrame) error {
|
||||||
if err := f.ForeachSetting(sc.processSetting); err != nil {
|
if err := f.ForeachSetting(sc.processSetting); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
// TODO: judging by RFC 7540, Section 6.5.3 each SETTINGS frame should be
|
||||||
|
// acknowledged individually, even if multiple are received before the ACK.
|
||||||
sc.needToSendSettingsAck = true
|
sc.needToSendSettingsAck = true
|
||||||
sc.scheduleFrameWrite()
|
sc.scheduleFrameWrite()
|
||||||
return nil
|
return nil
|
||||||
|
@ -2385,7 +2415,11 @@ func (rws *responseWriterState) writeChunk(p []byte) (n int, err error) {
|
||||||
clen = strconv.Itoa(len(p))
|
clen = strconv.Itoa(len(p))
|
||||||
}
|
}
|
||||||
_, hasContentType := rws.snapHeader["Content-Type"]
|
_, hasContentType := rws.snapHeader["Content-Type"]
|
||||||
if !hasContentType && bodyAllowedForStatus(rws.status) && len(p) > 0 {
|
// If the Content-Encoding is non-blank, we shouldn't
|
||||||
|
// sniff the body. See Issue golang.org/issue/31753.
|
||||||
|
ce := rws.snapHeader.Get("Content-Encoding")
|
||||||
|
hasCE := len(ce) > 0
|
||||||
|
if !hasCE && !hasContentType && bodyAllowedForStatus(rws.status) && len(p) > 0 {
|
||||||
ctype = http.DetectContentType(p)
|
ctype = http.DetectContentType(p)
|
||||||
}
|
}
|
||||||
var date string
|
var date string
|
||||||
|
@ -2494,7 +2528,7 @@ const TrailerPrefix = "Trailer:"
|
||||||
// trailers. That worked for a while, until we found the first major
|
// trailers. That worked for a while, until we found the first major
|
||||||
// user of Trailers in the wild: gRPC (using them only over http2),
|
// user of Trailers in the wild: gRPC (using them only over http2),
|
||||||
// and gRPC libraries permit setting trailers mid-stream without
|
// and gRPC libraries permit setting trailers mid-stream without
|
||||||
// predeclarnig them. So: change of plans. We still permit the old
|
// predeclaring them. So: change of plans. We still permit the old
|
||||||
// way, but we also permit this hack: if a Header() key begins with
|
// way, but we also permit this hack: if a Header() key begins with
|
||||||
// "Trailer:", the suffix of that key is a Trailer. Because ':' is an
|
// "Trailer:", the suffix of that key is a Trailer. Because ':' is an
|
||||||
// invalid token byte anyway, there is no ambiguity. (And it's already
|
// invalid token byte anyway, there is no ambiguity. (And it's already
|
||||||
|
@ -2794,7 +2828,7 @@ func (sc *serverConn) startPush(msg *startPushRequest) {
|
||||||
// PUSH_PROMISE frames MUST only be sent on a peer-initiated stream that
|
// PUSH_PROMISE frames MUST only be sent on a peer-initiated stream that
|
||||||
// is in either the "open" or "half-closed (remote)" state.
|
// is in either the "open" or "half-closed (remote)" state.
|
||||||
if msg.parent.state != stateOpen && msg.parent.state != stateHalfClosedRemote {
|
if msg.parent.state != stateOpen && msg.parent.state != stateHalfClosedRemote {
|
||||||
// responseWriter.Push checks that the stream is peer-initiaed.
|
// responseWriter.Push checks that the stream is peer-initiated.
|
||||||
msg.done <- errStreamClosed
|
msg.done <- errStreamClosed
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -992,7 +992,7 @@ func (cc *ClientConn) roundTrip(req *http.Request) (res *http.Response, gotErrAf
|
||||||
req.Method != "HEAD" {
|
req.Method != "HEAD" {
|
||||||
// Request gzip only, not deflate. Deflate is ambiguous and
|
// Request gzip only, not deflate. Deflate is ambiguous and
|
||||||
// not as universally supported anyway.
|
// not as universally supported anyway.
|
||||||
// See: http://www.gzip.org/zlib/zlib_faq.html#faq38
|
// See: https://zlib.net/zlib_faq.html#faq39
|
||||||
//
|
//
|
||||||
// Note that we don't request this for HEAD requests,
|
// Note that we don't request this for HEAD requests,
|
||||||
// due to a bug in nginx:
|
// due to a bug in nginx:
|
||||||
|
@ -1216,6 +1216,8 @@ var (
|
||||||
|
|
||||||
// abort request body write, but send stream reset of cancel.
|
// abort request body write, but send stream reset of cancel.
|
||||||
errStopReqBodyWriteAndCancel = errors.New("http2: canceling request")
|
errStopReqBodyWriteAndCancel = errors.New("http2: canceling request")
|
||||||
|
|
||||||
|
errReqBodyTooLong = errors.New("http2: request body larger than specified content length")
|
||||||
)
|
)
|
||||||
|
|
||||||
func (cs *clientStream) writeRequestBody(body io.Reader, bodyCloser io.Closer) (err error) {
|
func (cs *clientStream) writeRequestBody(body io.Reader, bodyCloser io.Closer) (err error) {
|
||||||
|
@ -1238,10 +1240,32 @@ func (cs *clientStream) writeRequestBody(body io.Reader, bodyCloser io.Closer) (
|
||||||
|
|
||||||
req := cs.req
|
req := cs.req
|
||||||
hasTrailers := req.Trailer != nil
|
hasTrailers := req.Trailer != nil
|
||||||
|
remainLen := actualContentLength(req)
|
||||||
|
hasContentLen := remainLen != -1
|
||||||
|
|
||||||
var sawEOF bool
|
var sawEOF bool
|
||||||
for !sawEOF {
|
for !sawEOF {
|
||||||
n, err := body.Read(buf)
|
n, err := body.Read(buf[:len(buf)-1])
|
||||||
|
if hasContentLen {
|
||||||
|
remainLen -= int64(n)
|
||||||
|
if remainLen == 0 && err == nil {
|
||||||
|
// The request body's Content-Length was predeclared and
|
||||||
|
// we just finished reading it all, but the underlying io.Reader
|
||||||
|
// returned the final chunk with a nil error (which is one of
|
||||||
|
// the two valid things a Reader can do at EOF). Because we'd prefer
|
||||||
|
// to send the END_STREAM bit early, double-check that we're actually
|
||||||
|
// at EOF. Subsequent reads should return (0, EOF) at this point.
|
||||||
|
// If either value is different, we return an error in one of two ways below.
|
||||||
|
var n1 int
|
||||||
|
n1, err = body.Read(buf[n:])
|
||||||
|
remainLen -= int64(n1)
|
||||||
|
}
|
||||||
|
if remainLen < 0 {
|
||||||
|
err = errReqBodyTooLong
|
||||||
|
cc.writeStreamReset(cs.ID, ErrCodeCancel, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
sawEOF = true
|
sawEOF = true
|
||||||
err = nil
|
err = nil
|
||||||
|
|
|
@ -32,7 +32,7 @@ type WriteScheduler interface {
|
||||||
|
|
||||||
// Pop dequeues the next frame to write. Returns false if no frames can
|
// Pop dequeues the next frame to write. Returns false if no frames can
|
||||||
// be written. Frames with a given wr.StreamID() are Pop'd in the same
|
// be written. Frames with a given wr.StreamID() are Pop'd in the same
|
||||||
// order they are Push'd.
|
// order they are Push'd. No frames should be discarded except by CloseStream.
|
||||||
Pop() (wr FrameWriteRequest, ok bool)
|
Pop() (wr FrameWriteRequest, ok bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,6 +76,12 @@ func (wr FrameWriteRequest) StreamID() uint32 {
|
||||||
return wr.stream.id
|
return wr.stream.id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// isControl reports whether wr is a control frame for MaxQueuedControlFrames
|
||||||
|
// purposes. That includes non-stream frames and RST_STREAM frames.
|
||||||
|
func (wr FrameWriteRequest) isControl() bool {
|
||||||
|
return wr.stream == nil
|
||||||
|
}
|
||||||
|
|
||||||
// DataSize returns the number of flow control bytes that must be consumed
|
// DataSize returns the number of flow control bytes that must be consumed
|
||||||
// to write this entire frame. This is 0 for non-DATA frames.
|
// to write this entire frame. This is 0 for non-DATA frames.
|
||||||
func (wr FrameWriteRequest) DataSize() int {
|
func (wr FrameWriteRequest) DataSize() int {
|
||||||
|
|
|
@ -149,7 +149,7 @@ func (n *priorityNode) addBytes(b int64) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// walkReadyInOrder iterates over the tree in priority order, calling f for each node
|
// walkReadyInOrder iterates over the tree in priority order, calling f for each node
|
||||||
// with a non-empty write queue. When f returns true, this funcion returns true and the
|
// with a non-empty write queue. When f returns true, this function returns true and the
|
||||||
// walk halts. tmp is used as scratch space for sorting.
|
// walk halts. tmp is used as scratch space for sorting.
|
||||||
//
|
//
|
||||||
// f(n, openParent) takes two arguments: the node to visit, n, and a bool that is true
|
// f(n, openParent) takes two arguments: the node to visit, n, and a bool that is true
|
||||||
|
|
|
@ -19,7 +19,8 @@ type randomWriteScheduler struct {
|
||||||
zero writeQueue
|
zero writeQueue
|
||||||
|
|
||||||
// sq contains the stream-specific queues, keyed by stream ID.
|
// sq contains the stream-specific queues, keyed by stream ID.
|
||||||
// When a stream is idle or closed, it's deleted from the map.
|
// When a stream is idle, closed, or emptied, it's deleted
|
||||||
|
// from the map.
|
||||||
sq map[uint32]*writeQueue
|
sq map[uint32]*writeQueue
|
||||||
|
|
||||||
// pool of empty queues for reuse.
|
// pool of empty queues for reuse.
|
||||||
|
@ -63,8 +64,12 @@ func (ws *randomWriteScheduler) Pop() (FrameWriteRequest, bool) {
|
||||||
return ws.zero.shift(), true
|
return ws.zero.shift(), true
|
||||||
}
|
}
|
||||||
// Iterate over all non-idle streams until finding one that can be consumed.
|
// Iterate over all non-idle streams until finding one that can be consumed.
|
||||||
for _, q := range ws.sq {
|
for streamID, q := range ws.sq {
|
||||||
if wr, ok := q.consume(math.MaxInt32); ok {
|
if wr, ok := q.consume(math.MaxInt32); ok {
|
||||||
|
if q.empty() {
|
||||||
|
delete(ws.sq, streamID)
|
||||||
|
ws.queuePool.put(q)
|
||||||
|
}
|
||||||
return wr, true
|
return wr, true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// mkasm_darwin.go generates assembly trampolines to call libSystem routines from Go.
|
||||||
|
//This program must be run after mksyscall.go.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
in1, err := ioutil.ReadFile("syscall_darwin.go")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't open syscall_darwin.go: %s", err)
|
||||||
|
}
|
||||||
|
arch := os.Args[1]
|
||||||
|
in2, err := ioutil.ReadFile(fmt.Sprintf("syscall_darwin_%s.go", arch))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't open syscall_darwin_%s.go: %s", arch, err)
|
||||||
|
}
|
||||||
|
in3, err := ioutil.ReadFile(fmt.Sprintf("zsyscall_darwin_%s.go", arch))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't open zsyscall_darwin_%s.go: %s", arch, err)
|
||||||
|
}
|
||||||
|
in := string(in1) + string(in2) + string(in3)
|
||||||
|
|
||||||
|
trampolines := map[string]bool{}
|
||||||
|
|
||||||
|
var out bytes.Buffer
|
||||||
|
|
||||||
|
fmt.Fprintf(&out, "// go run mkasm_darwin.go %s\n", strings.Join(os.Args[1:], " "))
|
||||||
|
fmt.Fprintf(&out, "// Code generated by the command above; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintf(&out, "\n")
|
||||||
|
fmt.Fprintf(&out, "// +build go1.12\n")
|
||||||
|
fmt.Fprintf(&out, "\n")
|
||||||
|
fmt.Fprintf(&out, "#include \"textflag.h\"\n")
|
||||||
|
for _, line := range strings.Split(in, "\n") {
|
||||||
|
if !strings.HasPrefix(line, "func ") || !strings.HasSuffix(line, "_trampoline()") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fn := line[5 : len(line)-13]
|
||||||
|
if !trampolines[fn] {
|
||||||
|
trampolines[fn] = true
|
||||||
|
fmt.Fprintf(&out, "TEXT ·%s_trampoline(SB),NOSPLIT,$0-0\n", fn)
|
||||||
|
fmt.Fprintf(&out, "\tJMP\t%s(SB)\n", fn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = ioutil.WriteFile(fmt.Sprintf("zsyscall_darwin_%s.s", arch), out.Bytes(), 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("can't write zsyscall_darwin_%s.s: %s", arch, err)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,122 @@
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// mkpost processes the output of cgo -godefs to
|
||||||
|
// modify the generated types. It is used to clean up
|
||||||
|
// the sys API in an architecture specific manner.
|
||||||
|
//
|
||||||
|
// mkpost is run after cgo -godefs; see README.md.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Get the OS and architecture (using GOARCH_TARGET if it exists)
|
||||||
|
goos := os.Getenv("GOOS")
|
||||||
|
goarch := os.Getenv("GOARCH_TARGET")
|
||||||
|
if goarch == "" {
|
||||||
|
goarch = os.Getenv("GOARCH")
|
||||||
|
}
|
||||||
|
// Check that we are using the Docker-based build system if we should be.
|
||||||
|
if goos == "linux" {
|
||||||
|
if os.Getenv("GOLANG_SYS_BUILD") != "docker" {
|
||||||
|
os.Stderr.WriteString("In the Docker-based build system, mkpost should not be called directly.\n")
|
||||||
|
os.Stderr.WriteString("See README.md\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := ioutil.ReadAll(os.Stdin)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if goos == "aix" {
|
||||||
|
// Replace type of Atim, Mtim and Ctim by Timespec in Stat_t
|
||||||
|
// to avoid having both StTimespec and Timespec.
|
||||||
|
sttimespec := regexp.MustCompile(`_Ctype_struct_st_timespec`)
|
||||||
|
b = sttimespec.ReplaceAll(b, []byte("Timespec"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intentionally export __val fields in Fsid and Sigset_t
|
||||||
|
valRegex := regexp.MustCompile(`type (Fsid|Sigset_t) struct {(\s+)X__(bits|val)(\s+\S+\s+)}`)
|
||||||
|
b = valRegex.ReplaceAll(b, []byte("type $1 struct {${2}Val$4}"))
|
||||||
|
|
||||||
|
// Intentionally export __fds_bits field in FdSet
|
||||||
|
fdSetRegex := regexp.MustCompile(`type (FdSet) struct {(\s+)X__fds_bits(\s+\S+\s+)}`)
|
||||||
|
b = fdSetRegex.ReplaceAll(b, []byte("type $1 struct {${2}Bits$3}"))
|
||||||
|
|
||||||
|
// If we have empty Ptrace structs, we should delete them. Only s390x emits
|
||||||
|
// nonempty Ptrace structs.
|
||||||
|
ptraceRexexp := regexp.MustCompile(`type Ptrace((Psw|Fpregs|Per) struct {\s*})`)
|
||||||
|
b = ptraceRexexp.ReplaceAll(b, nil)
|
||||||
|
|
||||||
|
// Replace the control_regs union with a blank identifier for now.
|
||||||
|
controlRegsRegex := regexp.MustCompile(`(Control_regs)\s+\[0\]uint64`)
|
||||||
|
b = controlRegsRegex.ReplaceAll(b, []byte("_ [0]uint64"))
|
||||||
|
|
||||||
|
// Remove fields that are added by glibc
|
||||||
|
// Note that this is unstable as the identifers are private.
|
||||||
|
removeFieldsRegex := regexp.MustCompile(`X__glibc\S*`)
|
||||||
|
b = removeFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Convert [65]int8 to [65]byte in Utsname members to simplify
|
||||||
|
// conversion to string; see golang.org/issue/20753
|
||||||
|
convertUtsnameRegex := regexp.MustCompile(`((Sys|Node|Domain)name|Release|Version|Machine)(\s+)\[(\d+)\]u?int8`)
|
||||||
|
b = convertUtsnameRegex.ReplaceAll(b, []byte("$1$3[$4]byte"))
|
||||||
|
|
||||||
|
// Convert [1024]int8 to [1024]byte in Ptmget members
|
||||||
|
convertPtmget := regexp.MustCompile(`([SC]n)(\s+)\[(\d+)\]u?int8`)
|
||||||
|
b = convertPtmget.ReplaceAll(b, []byte("$1[$3]byte"))
|
||||||
|
|
||||||
|
// Remove spare fields (e.g. in Statx_t)
|
||||||
|
spareFieldsRegex := regexp.MustCompile(`X__spare\S*`)
|
||||||
|
b = spareFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Remove cgo padding fields
|
||||||
|
removePaddingFieldsRegex := regexp.MustCompile(`Pad_cgo_\d+`)
|
||||||
|
b = removePaddingFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Remove padding, hidden, or unused fields
|
||||||
|
removeFieldsRegex = regexp.MustCompile(`\b(X_\S+|Padding)`)
|
||||||
|
b = removeFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
|
||||||
|
// Remove the first line of warning from cgo
|
||||||
|
b = b[bytes.IndexByte(b, '\n')+1:]
|
||||||
|
// Modify the command in the header to include:
|
||||||
|
// mkpost, our own warning, and a build tag.
|
||||||
|
replacement := fmt.Sprintf(`$1 | go run mkpost.go
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s,%s`, goarch, goos)
|
||||||
|
cgoCommandRegex := regexp.MustCompile(`(cgo -godefs .*)`)
|
||||||
|
b = cgoCommandRegex.ReplaceAll(b, []byte(replacement))
|
||||||
|
|
||||||
|
// Rename Stat_t time fields
|
||||||
|
if goos == "freebsd" && goarch == "386" {
|
||||||
|
// Hide Stat_t.[AMCB]tim_ext fields
|
||||||
|
renameStatTimeExtFieldsRegex := regexp.MustCompile(`[AMCB]tim_ext`)
|
||||||
|
b = renameStatTimeExtFieldsRegex.ReplaceAll(b, []byte("_"))
|
||||||
|
}
|
||||||
|
renameStatTimeFieldsRegex := regexp.MustCompile(`([AMCB])(?:irth)?time?(?:spec)?\s+(Timespec|StTimespec)`)
|
||||||
|
b = renameStatTimeFieldsRegex.ReplaceAll(b, []byte("${1}tim ${2}"))
|
||||||
|
|
||||||
|
// gofmt
|
||||||
|
b, err = format.Source(b)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
os.Stdout.Write(b)
|
||||||
|
}
|
|
@ -0,0 +1,407 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_darwin.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named errno.
|
||||||
|
|
||||||
|
A line beginning with //sysnb is like //sys, except that the
|
||||||
|
goroutine will not be suspended during the execution of the system
|
||||||
|
call. This must only be used for system calls which can never
|
||||||
|
block, as otherwise the system call could cause all goroutines to
|
||||||
|
hang.
|
||||||
|
*/
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
plan9 = flag.Bool("plan9", false, "plan9")
|
||||||
|
openbsd = flag.Bool("openbsd", false, "openbsd")
|
||||||
|
netbsd = flag.Bool("netbsd", false, "netbsd")
|
||||||
|
dragonfly = flag.Bool("dragonfly", false, "dragonfly")
|
||||||
|
arm = flag.Bool("arm", false, "arm") // 64-bit value should use (even, odd)-pair
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
filename = flag.String("output", "", "output file name (standard output if omitted)")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Get the OS and architecture (using GOARCH_TARGET if it exists)
|
||||||
|
goos := os.Getenv("GOOS")
|
||||||
|
if goos == "" {
|
||||||
|
fmt.Fprintln(os.Stderr, "GOOS not defined in environment")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
goarch := os.Getenv("GOARCH_TARGET")
|
||||||
|
if goarch == "" {
|
||||||
|
goarch = os.Getenv("GOARCH")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we are using the Docker-based build system if we should
|
||||||
|
if goos == "linux" {
|
||||||
|
if os.Getenv("GOLANG_SYS_BUILD") != "docker" {
|
||||||
|
fmt.Fprintf(os.Stderr, "In the Docker-based build system, mksyscall should not be called directly.\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "See README.md\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
libc := false
|
||||||
|
if goos == "darwin" && strings.Contains(buildTags(), ",go1.12") {
|
||||||
|
libc = true
|
||||||
|
}
|
||||||
|
trampolines := map[string]bool{}
|
||||||
|
|
||||||
|
text := ""
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, errno error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*((?i)SYS_[A-Z0-9_]+))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, sysname := f[2], f[3], f[4], f[5]
|
||||||
|
|
||||||
|
// ClockGettime doesn't have a syscall number on Darwin, only generate libc wrappers.
|
||||||
|
if goos == "darwin" && !libc && funct == "ClockGettime" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
outDecl := ""
|
||||||
|
if len(out) > 0 {
|
||||||
|
outDecl = fmt.Sprintf(" (%s)", strings.Join(out, ", "))
|
||||||
|
}
|
||||||
|
text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outDecl)
|
||||||
|
|
||||||
|
// Check if err return available
|
||||||
|
errvar := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare arguments to Syscall.
|
||||||
|
var args []string
|
||||||
|
n := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))")
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *byte\n", n)
|
||||||
|
text += fmt.Sprintf("\t_p%d, %s = BytePtrFromString(%s)\n", n, errvar, p.Name)
|
||||||
|
text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *byte\n", n)
|
||||||
|
text += fmt.Sprintf("\t_p%d, _ = BytePtrFromString(%s)\n", n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass dummy pointer in that case.
|
||||||
|
// Used to pass nil, but some OSes or simulators reject write(fd, nil, 0).
|
||||||
|
text += fmt.Sprintf("\tvar _p%d unsafe.Pointer\n", n)
|
||||||
|
text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = unsafe.Pointer(&%s[0])\n\t}", p.Name, n, p.Name)
|
||||||
|
text += fmt.Sprintf(" else {\n\t\t_p%d = unsafe.Pointer(&_zero)\n\t}\n", n)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(_p%d)", n), fmt.Sprintf("uintptr(len(%s))", p.Name))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && (*openbsd || *netbsd) {
|
||||||
|
args = append(args, "0")
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else if endianness == "little-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "int64" && *dragonfly {
|
||||||
|
if regexp.MustCompile(`^(?i)extp(read|write)`).FindStringSubmatch(funct) == nil {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else if endianness == "little-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if (p.Type == "int64" || p.Type == "uint64") && endianness != "" {
|
||||||
|
if len(args)%2 == 1 && *arm {
|
||||||
|
// arm abi specifies 64-bit argument uses
|
||||||
|
// (even, odd) pair
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which form to use; pad args with zeros.
|
||||||
|
asm := "Syscall"
|
||||||
|
if nonblock != nil {
|
||||||
|
if errvar == "" && goos == "linux" {
|
||||||
|
asm = "RawSyscallNoError"
|
||||||
|
} else {
|
||||||
|
asm = "RawSyscall"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if errvar == "" && goos == "linux" {
|
||||||
|
asm = "SyscallNoError"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(args) <= 3 {
|
||||||
|
for len(args) < 3 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else if len(args) <= 6 {
|
||||||
|
asm += "6"
|
||||||
|
for len(args) < 6 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else if len(args) <= 9 {
|
||||||
|
asm += "9"
|
||||||
|
for len(args) < 9 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s too many arguments to system call\n", path, funct)
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call number.
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = "SYS_" + funct
|
||||||
|
sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
|
||||||
|
sysname = strings.ToUpper(sysname)
|
||||||
|
}
|
||||||
|
|
||||||
|
var libcFn string
|
||||||
|
if libc {
|
||||||
|
asm = "syscall_" + strings.ToLower(asm[:1]) + asm[1:] // internal syscall call
|
||||||
|
sysname = strings.TrimPrefix(sysname, "SYS_") // remove SYS_
|
||||||
|
sysname = strings.ToLower(sysname) // lowercase
|
||||||
|
if sysname == "getdirentries64" {
|
||||||
|
// Special case - libSystem name and
|
||||||
|
// raw syscall name don't match.
|
||||||
|
sysname = "__getdirentries64"
|
||||||
|
}
|
||||||
|
libcFn = sysname
|
||||||
|
sysname = "funcPC(libc_" + sysname + "_trampoline)"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actual call.
|
||||||
|
arglist := strings.Join(args, ", ")
|
||||||
|
call := fmt.Sprintf("%s(%s, %s)", asm, sysname, arglist)
|
||||||
|
|
||||||
|
// Assign return values.
|
||||||
|
body := ""
|
||||||
|
ret := []string{"_", "_", "_"}
|
||||||
|
doErrno := false
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" && !*plan9 {
|
||||||
|
reg = "e1"
|
||||||
|
ret[2] = reg
|
||||||
|
doErrno = true
|
||||||
|
} else if p.Name == "err" && *plan9 {
|
||||||
|
ret[0] = "r0"
|
||||||
|
ret[2] = "e1"
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i] = reg
|
||||||
|
}
|
||||||
|
if p.Type == "bool" {
|
||||||
|
reg = fmt.Sprintf("%s != 0", reg)
|
||||||
|
}
|
||||||
|
if p.Type == "int64" && endianness != "" {
|
||||||
|
// 64-bit number in r1:r0 or r0:r1.
|
||||||
|
if i+2 > len(out) {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s not enough registers for int64 return\n", path, funct)
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1)
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i)
|
||||||
|
}
|
||||||
|
ret[i] = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i+1] = fmt.Sprintf("r%d", i+1)
|
||||||
|
}
|
||||||
|
if reg != "e1" || *plan9 {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" {
|
||||||
|
text += fmt.Sprintf("\t%s\n", call)
|
||||||
|
} else {
|
||||||
|
if errvar == "" && goos == "linux" {
|
||||||
|
// raw syscall without error on Linux, see golang.org/issue/22924
|
||||||
|
text += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], call)
|
||||||
|
} else {
|
||||||
|
text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
text += body
|
||||||
|
|
||||||
|
if *plan9 && ret[2] == "e1" {
|
||||||
|
text += "\tif int32(r0) == -1 {\n"
|
||||||
|
text += "\t\terr = e1\n"
|
||||||
|
text += "\t}\n"
|
||||||
|
} else if doErrno {
|
||||||
|
text += "\tif e1 != 0 {\n"
|
||||||
|
text += "\t\terr = errnoErr(e1)\n"
|
||||||
|
text += "\t}\n"
|
||||||
|
}
|
||||||
|
text += "\treturn\n"
|
||||||
|
text += "}\n\n"
|
||||||
|
|
||||||
|
if libc && !trampolines[libcFn] {
|
||||||
|
// some system calls share a trampoline, like read and readlen.
|
||||||
|
trampolines[libcFn] = true
|
||||||
|
// Declare assembly trampoline.
|
||||||
|
text += fmt.Sprintf("func libc_%s_trampoline()\n", libcFn)
|
||||||
|
// Assembly trampoline calls the libc_* function, which this magic
|
||||||
|
// redirects to use the function from libSystem.
|
||||||
|
text += fmt.Sprintf("//go:linkname libc_%s libc_%s\n", libcFn, libcFn)
|
||||||
|
text += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"/usr/lib/libSystem.B.dylib\"\n", libcFn, libcFn)
|
||||||
|
text += "\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
fmt.Printf(srcTemplate, cmdLine(), buildTags(), text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
import (
|
||||||
|
"syscall"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ syscall.Errno
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
|
@ -0,0 +1,415 @@
|
||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_aix.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named err.
|
||||||
|
* If go func name needs to be different than its libc name,
|
||||||
|
* or the function is not in libc, name could be specified
|
||||||
|
* at the end, after "=" sign, like
|
||||||
|
//sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
|
||||||
|
*/
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
aix = flag.Bool("aix", false, "aix")
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall_aix_ppc.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
pack := ""
|
||||||
|
text := ""
|
||||||
|
cExtern := "/*\n#include <stdint.h>\n#include <stddef.h>\n"
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
|
||||||
|
pack = p[1]
|
||||||
|
}
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, err error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
inps = strings.Join(in, ", ")
|
||||||
|
outps = strings.Join(out, ", ")
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
|
||||||
|
// Check if value return, err return available
|
||||||
|
errvar := ""
|
||||||
|
retvar := ""
|
||||||
|
rettype := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
} else {
|
||||||
|
retvar = p.Name
|
||||||
|
rettype = p.Type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call name.
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = funct
|
||||||
|
}
|
||||||
|
sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
|
||||||
|
sysname = strings.ToLower(sysname) // All libc functions are lowercase.
|
||||||
|
|
||||||
|
cRettype := ""
|
||||||
|
if rettype == "unsafe.Pointer" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "uintptr" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "int" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int32" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int64" {
|
||||||
|
cRettype = "long long"
|
||||||
|
} else if rettype == "uint32" {
|
||||||
|
cRettype = "unsigned int"
|
||||||
|
} else if rettype == "uint64" {
|
||||||
|
cRettype = "unsigned long long"
|
||||||
|
} else {
|
||||||
|
cRettype = "int"
|
||||||
|
}
|
||||||
|
if sysname == "exit" {
|
||||||
|
cRettype = "void"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change p.Types to c
|
||||||
|
var cIn []string
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t", "size_t")
|
||||||
|
} else if p.Type == "unsafe.Pointer" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
cIn = append(cIn, "long long")
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
cIn = append(cIn, "unsigned int")
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
cIn = append(cIn, "unsigned long long")
|
||||||
|
} else {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if funct != "fcntl" && funct != "FcntlInt" && funct != "readlen" && funct != "writelen" {
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
cExtern += "#define c_select select\n"
|
||||||
|
}
|
||||||
|
// Imports of system calls from libc
|
||||||
|
cExtern += fmt.Sprintf("%s %s", cRettype, sysname)
|
||||||
|
cIn := strings.Join(cIn, ", ")
|
||||||
|
cExtern += fmt.Sprintf("(%s);\n", cIn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// So file name.
|
||||||
|
if *aix {
|
||||||
|
if modname == "" {
|
||||||
|
modname = "libc.a/shr_64.o"
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
strconvfunc := "C.CString"
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
if outps != "" {
|
||||||
|
outps = fmt.Sprintf(" (%s)", outps)
|
||||||
|
}
|
||||||
|
if text != "" {
|
||||||
|
text += "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps)
|
||||||
|
|
||||||
|
// Prepare arguments to Syscall.
|
||||||
|
var args []string
|
||||||
|
n := 0
|
||||||
|
argN := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, "C.uintptr_t(uintptr(unsafe.Pointer("+p.Name+")))")
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass nil in that case.
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1])
|
||||||
|
text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(unsafe.Pointer(_p%d)))", n))
|
||||||
|
n++
|
||||||
|
text += fmt.Sprintf("\tvar _p%d int\n", n)
|
||||||
|
text += fmt.Sprintf("\t_p%d = len(%s)\n", n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("C.size_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && endianness != "" {
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
}
|
||||||
|
n++
|
||||||
|
} else if p.Type == "bool" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d uint32\n", n)
|
||||||
|
text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n)
|
||||||
|
args = append(args, fmt.Sprintf("_p%d", n))
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name))
|
||||||
|
} else if p.Type == "unsafe.Pointer" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name))
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
if (argN == 2) && ((funct == "readlen") || (funct == "writelen")) {
|
||||||
|
args = append(args, fmt.Sprintf("C.size_t(%s)", p.Name))
|
||||||
|
} else if argN == 0 && funct == "fcntl" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else if (argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt")) {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
args = append(args, fmt.Sprintf("C.longlong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uint(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
args = append(args, fmt.Sprintf("C.ulonglong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
argN++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actual call.
|
||||||
|
arglist := strings.Join(args, ", ")
|
||||||
|
call := ""
|
||||||
|
if sysname == "exit" {
|
||||||
|
if errvar != "" {
|
||||||
|
call += "er :="
|
||||||
|
} else {
|
||||||
|
call += ""
|
||||||
|
}
|
||||||
|
} else if errvar != "" {
|
||||||
|
call += "r0,er :="
|
||||||
|
} else if retvar != "" {
|
||||||
|
call += "r0,_ :="
|
||||||
|
} else {
|
||||||
|
call += ""
|
||||||
|
}
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
call += fmt.Sprintf("C.c_%s(%s)", sysname, arglist)
|
||||||
|
} else {
|
||||||
|
call += fmt.Sprintf("C.%s(%s)", sysname, arglist)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign return values.
|
||||||
|
body := ""
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" {
|
||||||
|
reg = "e1"
|
||||||
|
} else {
|
||||||
|
reg = "r0"
|
||||||
|
}
|
||||||
|
if reg != "e1" {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// verify return
|
||||||
|
if sysname != "exit" && errvar != "" {
|
||||||
|
if regexp.MustCompile(`^uintptr`).FindStringSubmatch(cRettype) != nil {
|
||||||
|
body += "\tif (uintptr(r0) ==^uintptr(0) && er != nil) {\n"
|
||||||
|
body += fmt.Sprintf("\t\t%s = er\n", errvar)
|
||||||
|
body += "\t}\n"
|
||||||
|
} else {
|
||||||
|
body += "\tif (r0 ==-1 && er != nil) {\n"
|
||||||
|
body += fmt.Sprintf("\t\t%s = er\n", errvar)
|
||||||
|
body += "\t}\n"
|
||||||
|
}
|
||||||
|
} else if errvar != "" {
|
||||||
|
body += "\tif (er != nil) {\n"
|
||||||
|
body += fmt.Sprintf("\t\t%s = er\n", errvar)
|
||||||
|
body += "\t}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
text += fmt.Sprintf("\t%s\n", call)
|
||||||
|
text += body
|
||||||
|
|
||||||
|
text += "\treturn\n"
|
||||||
|
text += "}\n"
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
imp := ""
|
||||||
|
if pack != "unix" {
|
||||||
|
imp = "import \"golang.org/x/sys/unix\"\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, cExtern, imp, text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
|
@ -0,0 +1,614 @@
|
||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_aix.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named err.
|
||||||
|
* If go func name needs to be different than its libc name,
|
||||||
|
* or the function is not in libc, name could be specified
|
||||||
|
* at the end, after "=" sign, like
|
||||||
|
//sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
|
||||||
|
|
||||||
|
|
||||||
|
This program will generate three files and handle both gc and gccgo implementation:
|
||||||
|
- zsyscall_aix_ppc64.go: the common part of each implementation (error handler, pointer creation)
|
||||||
|
- zsyscall_aix_ppc64_gc.go: gc part with //go_cgo_import_dynamic and a call to syscall6
|
||||||
|
- zsyscall_aix_ppc64_gccgo.go: gccgo part with C function and conversion to C type.
|
||||||
|
|
||||||
|
The generated code looks like this
|
||||||
|
|
||||||
|
zsyscall_aix_ppc64.go
|
||||||
|
func asyscall(...) (n int, err error) {
|
||||||
|
// Pointer Creation
|
||||||
|
r1, e1 := callasyscall(...)
|
||||||
|
// Type Conversion
|
||||||
|
// Error Handler
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
zsyscall_aix_ppc64_gc.go
|
||||||
|
//go:cgo_import_dynamic libc_asyscall asyscall "libc.a/shr_64.o"
|
||||||
|
//go:linkname libc_asyscall libc_asyscall
|
||||||
|
var asyscall syscallFunc
|
||||||
|
|
||||||
|
func callasyscall(...) (r1 uintptr, e1 Errno) {
|
||||||
|
r1, _, e1 = syscall6(uintptr(unsafe.Pointer(&libc_asyscall)), "nb_args", ... )
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
zsyscall_aix_ppc64_ggcgo.go
|
||||||
|
|
||||||
|
// int asyscall(...)
|
||||||
|
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
func callasyscall(...) (r1 uintptr, e1 Errno) {
|
||||||
|
r1 = uintptr(C.asyscall(...))
|
||||||
|
e1 = syscall.GetErrno()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
aix = flag.Bool("aix", false, "aix")
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall_aix_ppc64.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc64.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
pack := ""
|
||||||
|
// GCCGO
|
||||||
|
textgccgo := ""
|
||||||
|
cExtern := "/*\n#include <stdint.h>\n"
|
||||||
|
// GC
|
||||||
|
textgc := ""
|
||||||
|
dynimports := ""
|
||||||
|
linknames := ""
|
||||||
|
var vars []string
|
||||||
|
// COMMON
|
||||||
|
textcommon := ""
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
|
||||||
|
pack = p[1]
|
||||||
|
}
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, err error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
inps = strings.Join(in, ", ")
|
||||||
|
outps = strings.Join(out, ", ")
|
||||||
|
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = funct
|
||||||
|
}
|
||||||
|
|
||||||
|
onlyCommon := false
|
||||||
|
if funct == "readlen" || funct == "writelen" || funct == "FcntlInt" || funct == "FcntlFlock" {
|
||||||
|
// This function call another syscall which is already implemented.
|
||||||
|
// Therefore, the gc and gccgo part must not be generated.
|
||||||
|
onlyCommon = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
|
||||||
|
textcommon += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
if !onlyCommon {
|
||||||
|
textgccgo += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
textgc += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if value return, err return available
|
||||||
|
errvar := ""
|
||||||
|
rettype := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
} else {
|
||||||
|
rettype = p.Type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
|
||||||
|
sysname = strings.ToLower(sysname) // All libc functions are lowercase.
|
||||||
|
|
||||||
|
// GCCGO Prototype return type
|
||||||
|
cRettype := ""
|
||||||
|
if rettype == "unsafe.Pointer" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "uintptr" {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil {
|
||||||
|
cRettype = "uintptr_t"
|
||||||
|
} else if rettype == "int" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int32" {
|
||||||
|
cRettype = "int"
|
||||||
|
} else if rettype == "int64" {
|
||||||
|
cRettype = "long long"
|
||||||
|
} else if rettype == "uint32" {
|
||||||
|
cRettype = "unsigned int"
|
||||||
|
} else if rettype == "uint64" {
|
||||||
|
cRettype = "unsigned long long"
|
||||||
|
} else {
|
||||||
|
cRettype = "int"
|
||||||
|
}
|
||||||
|
if sysname == "exit" {
|
||||||
|
cRettype = "void"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GCCGO Prototype arguments type
|
||||||
|
var cIn []string
|
||||||
|
for i, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t", "size_t")
|
||||||
|
} else if p.Type == "unsafe.Pointer" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
if (i == 0 || i == 2) && funct == "fcntl" {
|
||||||
|
// These fcntl arguments needs to be uintptr to be able to call FcntlInt and FcntlFlock
|
||||||
|
cIn = append(cIn, "uintptr_t")
|
||||||
|
} else {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
cIn = append(cIn, "long long")
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
cIn = append(cIn, "unsigned int")
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
cIn = append(cIn, "unsigned long long")
|
||||||
|
} else {
|
||||||
|
cIn = append(cIn, "int")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !onlyCommon {
|
||||||
|
// GCCGO Prototype Generation
|
||||||
|
// Imports of system calls from libc
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
cExtern += "#define c_select select\n"
|
||||||
|
}
|
||||||
|
cExtern += fmt.Sprintf("%s %s", cRettype, sysname)
|
||||||
|
cIn := strings.Join(cIn, ", ")
|
||||||
|
cExtern += fmt.Sprintf("(%s);\n", cIn)
|
||||||
|
}
|
||||||
|
// GC Library name
|
||||||
|
if modname == "" {
|
||||||
|
modname = "libc.a/shr_64.o"
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
sysvarname := fmt.Sprintf("libc_%s", sysname)
|
||||||
|
|
||||||
|
if !onlyCommon {
|
||||||
|
// GC Runtime import of function to allow cross-platform builds.
|
||||||
|
dynimports += fmt.Sprintf("//go:cgo_import_dynamic %s %s \"%s\"\n", sysvarname, sysname, modname)
|
||||||
|
// GC Link symbol to proc address variable.
|
||||||
|
linknames += fmt.Sprintf("//go:linkname %s %s\n", sysvarname, sysvarname)
|
||||||
|
// GC Library proc address variable.
|
||||||
|
vars = append(vars, sysvarname)
|
||||||
|
}
|
||||||
|
|
||||||
|
strconvfunc := "BytePtrFromString"
|
||||||
|
strconvtype := "*byte"
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
if outps != "" {
|
||||||
|
outps = fmt.Sprintf(" (%s)", outps)
|
||||||
|
}
|
||||||
|
if textcommon != "" {
|
||||||
|
textcommon += "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
textcommon += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps)
|
||||||
|
|
||||||
|
// Prepare arguments tocall.
|
||||||
|
var argscommon []string // Arguments in the common part
|
||||||
|
var argscall []string // Arguments for call prototype
|
||||||
|
var argsgc []string // Arguments for gc call (with syscall6)
|
||||||
|
var argsgccgo []string // Arguments for gccgo call (with C.name_of_syscall)
|
||||||
|
n := 0
|
||||||
|
argN := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(%s))", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
|
||||||
|
textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("_p%d uintptr ", n))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("_p%d", n))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
|
||||||
|
textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("_p%d", n))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass nil in that case.
|
||||||
|
textcommon += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1])
|
||||||
|
textcommon += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("len(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n), fmt.Sprintf("_lenp%d int", n))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("_p%d", n), fmt.Sprintf("uintptr(_lenp%d)", n))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n), fmt.Sprintf("C.size_t(_lenp%d)", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && endianness != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses int64 with 32 bits mode. Case not yet implemented\n")
|
||||||
|
} else if p.Type == "bool" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses bool. Case not yet implemented\n")
|
||||||
|
} else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil || p.Type == "unsafe.Pointer" {
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else if p.Type == "int" {
|
||||||
|
if (argN == 0 || argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt") || (funct == "FcntlFlock")) {
|
||||||
|
// These fcntl arguments need to be uintptr to be able to call FcntlInt and FcntlFlock
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
|
||||||
|
} else {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "int32" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int32", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
} else if p.Type == "int64" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int64", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.longlong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint32" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uint32", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uint(%s)", p.Name))
|
||||||
|
} else if p.Type == "uint64" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uint64", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.ulonglong(%s)", p.Name))
|
||||||
|
} else if p.Type == "uintptr" {
|
||||||
|
argscommon = append(argscommon, p.Name)
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
|
||||||
|
argsgc = append(argsgc, p.Name)
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
argscommon = append(argscommon, fmt.Sprintf("int(%s)", p.Name))
|
||||||
|
argscall = append(argscall, fmt.Sprintf("%s int", p.Name))
|
||||||
|
argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
|
||||||
|
}
|
||||||
|
argN++
|
||||||
|
}
|
||||||
|
nargs := len(argsgc)
|
||||||
|
|
||||||
|
// COMMON function generation
|
||||||
|
argscommonlist := strings.Join(argscommon, ", ")
|
||||||
|
callcommon := fmt.Sprintf("call%s(%s)", sysname, argscommonlist)
|
||||||
|
ret := []string{"_", "_"}
|
||||||
|
body := ""
|
||||||
|
doErrno := false
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" {
|
||||||
|
reg = "e1"
|
||||||
|
ret[1] = reg
|
||||||
|
doErrno = true
|
||||||
|
} else {
|
||||||
|
reg = "r0"
|
||||||
|
ret[0] = reg
|
||||||
|
}
|
||||||
|
if p.Type == "bool" {
|
||||||
|
reg = fmt.Sprintf("%s != 0", reg)
|
||||||
|
}
|
||||||
|
if reg != "e1" {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ret[0] == "_" && ret[1] == "_" {
|
||||||
|
textcommon += fmt.Sprintf("\t%s\n", callcommon)
|
||||||
|
} else {
|
||||||
|
textcommon += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], callcommon)
|
||||||
|
}
|
||||||
|
textcommon += body
|
||||||
|
|
||||||
|
if doErrno {
|
||||||
|
textcommon += "\tif e1 != 0 {\n"
|
||||||
|
textcommon += "\t\terr = errnoErr(e1)\n"
|
||||||
|
textcommon += "\t}\n"
|
||||||
|
}
|
||||||
|
textcommon += "\treturn\n"
|
||||||
|
textcommon += "}\n"
|
||||||
|
|
||||||
|
if onlyCommon {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// CALL Prototype
|
||||||
|
callProto := fmt.Sprintf("func call%s(%s) (r1 uintptr, e1 Errno) {\n", sysname, strings.Join(argscall, ", "))
|
||||||
|
|
||||||
|
// GC function generation
|
||||||
|
asm := "syscall6"
|
||||||
|
if nonblock != nil {
|
||||||
|
asm = "rawSyscall6"
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(argsgc) <= 6 {
|
||||||
|
for len(argsgc) < 6 {
|
||||||
|
argsgc = append(argsgc, "0")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: too many arguments to system call", funct)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
argsgclist := strings.Join(argsgc, ", ")
|
||||||
|
callgc := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, argsgclist)
|
||||||
|
|
||||||
|
textgc += callProto
|
||||||
|
textgc += fmt.Sprintf("\tr1, _, e1 = %s\n", callgc)
|
||||||
|
textgc += "\treturn\n}\n"
|
||||||
|
|
||||||
|
// GCCGO function generation
|
||||||
|
argsgccgolist := strings.Join(argsgccgo, ", ")
|
||||||
|
var callgccgo string
|
||||||
|
if sysname == "select" {
|
||||||
|
// select is a keyword of Go. Its name is
|
||||||
|
// changed to c_select.
|
||||||
|
callgccgo = fmt.Sprintf("C.c_%s(%s)", sysname, argsgccgolist)
|
||||||
|
} else {
|
||||||
|
callgccgo = fmt.Sprintf("C.%s(%s)", sysname, argsgccgolist)
|
||||||
|
}
|
||||||
|
textgccgo += callProto
|
||||||
|
textgccgo += fmt.Sprintf("\tr1 = uintptr(%s)\n", callgccgo)
|
||||||
|
textgccgo += "\te1 = syscall.GetErrno()\n"
|
||||||
|
textgccgo += "\treturn\n}\n"
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
imp := ""
|
||||||
|
if pack != "unix" {
|
||||||
|
imp = "import \"golang.org/x/sys/unix\"\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print zsyscall_aix_ppc64.go
|
||||||
|
err := ioutil.WriteFile("zsyscall_aix_ppc64.go",
|
||||||
|
[]byte(fmt.Sprintf(srcTemplate1, cmdLine(), buildTags(), pack, imp, textcommon)),
|
||||||
|
0644)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print zsyscall_aix_ppc64_gc.go
|
||||||
|
vardecls := "\t" + strings.Join(vars, ",\n\t")
|
||||||
|
vardecls += " syscallFunc"
|
||||||
|
err = ioutil.WriteFile("zsyscall_aix_ppc64_gc.go",
|
||||||
|
[]byte(fmt.Sprintf(srcTemplate2, cmdLine(), buildTags(), pack, imp, dynimports, linknames, vardecls, textgc)),
|
||||||
|
0644)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print zsyscall_aix_ppc64_gccgo.go
|
||||||
|
err = ioutil.WriteFile("zsyscall_aix_ppc64_gccgo.go",
|
||||||
|
[]byte(fmt.Sprintf(srcTemplate3, cmdLine(), buildTags(), pack, cExtern, imp, textgccgo)),
|
||||||
|
0644)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate1 = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
|
const srcTemplate2 = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
// +build !gccgo
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
type syscallFunc uintptr
|
||||||
|
|
||||||
|
var (
|
||||||
|
%s
|
||||||
|
)
|
||||||
|
|
||||||
|
// Implemented in runtime/syscall_aix.go.
|
||||||
|
func rawSyscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno)
|
||||||
|
func syscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno)
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
||||||
|
const srcTemplate3 = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
// +build gccgo
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
%s
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
import (
|
||||||
|
"syscall"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
%s
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
|
@ -0,0 +1,335 @@
|
||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
This program reads a file containing function prototypes
|
||||||
|
(like syscall_solaris.go) and generates system call bodies.
|
||||||
|
The prototypes are marked by lines beginning with "//sys"
|
||||||
|
and read like func declarations if //sys is replaced by func, but:
|
||||||
|
* The parameter lists must give a name for each argument.
|
||||||
|
This includes return parameters.
|
||||||
|
* The parameter lists must give a type for each argument:
|
||||||
|
the (x, y, z int) shorthand is not allowed.
|
||||||
|
* If the return parameter is an error number, it must be named err.
|
||||||
|
* If go func name needs to be different than its libc name,
|
||||||
|
* or the function is not in libc, name could be specified
|
||||||
|
* at the end, after "=" sign, like
|
||||||
|
//sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
|
||||||
|
*/
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
b32 = flag.Bool("b32", false, "32bit big-endian")
|
||||||
|
l32 = flag.Bool("l32", false, "32bit little-endian")
|
||||||
|
tags = flag.String("tags", "", "build tags")
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksyscall_solaris.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return *tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Param is function parameter
|
||||||
|
type Param struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
// usage prints the program usage
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: go run mksyscall_solaris.go [-b32 | -l32] [-tags x,y] [file ...]\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParamList parses parameter list and returns a slice of parameters
|
||||||
|
func parseParamList(list string) []string {
|
||||||
|
list = strings.TrimSpace(list)
|
||||||
|
if list == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseParam splits a parameter into name and type
|
||||||
|
func parseParam(p string) Param {
|
||||||
|
ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
|
||||||
|
if ps == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return Param{ps[1], ps[2]}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(flag.Args()) <= 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "no files to parse provided\n")
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
endianness := ""
|
||||||
|
if *b32 {
|
||||||
|
endianness = "big-endian"
|
||||||
|
} else if *l32 {
|
||||||
|
endianness = "little-endian"
|
||||||
|
}
|
||||||
|
|
||||||
|
pack := ""
|
||||||
|
text := ""
|
||||||
|
dynimports := ""
|
||||||
|
linknames := ""
|
||||||
|
var vars []string
|
||||||
|
for _, path := range flag.Args() {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
t := s.Text()
|
||||||
|
t = strings.TrimSpace(t)
|
||||||
|
t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
|
||||||
|
if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
|
||||||
|
pack = p[1]
|
||||||
|
}
|
||||||
|
nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
|
||||||
|
if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Line must be of the form
|
||||||
|
// func Open(path string, mode int, perm int) (fd int, err error)
|
||||||
|
// Split into name, in params, out params.
|
||||||
|
f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
|
||||||
|
if f == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
|
||||||
|
|
||||||
|
// Split argument lists on comma.
|
||||||
|
in := parseParamList(inps)
|
||||||
|
out := parseParamList(outps)
|
||||||
|
|
||||||
|
inps = strings.Join(in, ", ")
|
||||||
|
outps = strings.Join(out, ", ")
|
||||||
|
|
||||||
|
// Try in vain to keep people from editing this file.
|
||||||
|
// The theory is that they jump into the middle of the file
|
||||||
|
// without reading the header.
|
||||||
|
text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
|
||||||
|
|
||||||
|
// So file name.
|
||||||
|
if modname == "" {
|
||||||
|
modname = "libc"
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call name.
|
||||||
|
if sysname == "" {
|
||||||
|
sysname = funct
|
||||||
|
}
|
||||||
|
|
||||||
|
// System call pointer variable name.
|
||||||
|
sysvarname := fmt.Sprintf("proc%s", sysname)
|
||||||
|
|
||||||
|
strconvfunc := "BytePtrFromString"
|
||||||
|
strconvtype := "*byte"
|
||||||
|
|
||||||
|
sysname = strings.ToLower(sysname) // All libc functions are lowercase.
|
||||||
|
|
||||||
|
// Runtime import of function to allow cross-platform builds.
|
||||||
|
dynimports += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"%s.so\"\n", sysname, sysname, modname)
|
||||||
|
// Link symbol to proc address variable.
|
||||||
|
linknames += fmt.Sprintf("//go:linkname %s libc_%s\n", sysvarname, sysname)
|
||||||
|
// Library proc address variable.
|
||||||
|
vars = append(vars, sysvarname)
|
||||||
|
|
||||||
|
// Go function header.
|
||||||
|
outlist := strings.Join(out, ", ")
|
||||||
|
if outlist != "" {
|
||||||
|
outlist = fmt.Sprintf(" (%s)", outlist)
|
||||||
|
}
|
||||||
|
if text != "" {
|
||||||
|
text += "\n"
|
||||||
|
}
|
||||||
|
text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outlist)
|
||||||
|
|
||||||
|
// Check if err return available
|
||||||
|
errvar := ""
|
||||||
|
for _, param := range out {
|
||||||
|
p := parseParam(param)
|
||||||
|
if p.Type == "error" {
|
||||||
|
errvar = p.Name
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare arguments to Syscall.
|
||||||
|
var args []string
|
||||||
|
n := 0
|
||||||
|
for _, param := range in {
|
||||||
|
p := parseParam(param)
|
||||||
|
if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
|
||||||
|
args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))")
|
||||||
|
} else if p.Type == "string" && errvar != "" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
text += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
|
||||||
|
text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "string" {
|
||||||
|
fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
|
||||||
|
text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
|
||||||
|
text += fmt.Sprintf("\t_p%d, _ = %s(%s)\n", n, strconvfunc, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
|
||||||
|
n++
|
||||||
|
} else if s := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); s != nil {
|
||||||
|
// Convert slice into pointer, length.
|
||||||
|
// Have to be careful not to take address of &a[0] if len == 0:
|
||||||
|
// pass nil in that case.
|
||||||
|
text += fmt.Sprintf("\tvar _p%d *%s\n", n, s[1])
|
||||||
|
text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("uintptr(len(%s))", p.Name))
|
||||||
|
n++
|
||||||
|
} else if p.Type == "int64" && endianness != "" {
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
|
||||||
|
}
|
||||||
|
} else if p.Type == "bool" {
|
||||||
|
text += fmt.Sprintf("\tvar _p%d uint32\n", n)
|
||||||
|
text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n)
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(_p%d)", n))
|
||||||
|
n++
|
||||||
|
} else {
|
||||||
|
args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
nargs := len(args)
|
||||||
|
|
||||||
|
// Determine which form to use; pad args with zeros.
|
||||||
|
asm := "sysvicall6"
|
||||||
|
if nonblock != nil {
|
||||||
|
asm = "rawSysvicall6"
|
||||||
|
}
|
||||||
|
if len(args) <= 6 {
|
||||||
|
for len(args) < 6 {
|
||||||
|
args = append(args, "0")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: too many arguments to system call\n", path)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actual call.
|
||||||
|
arglist := strings.Join(args, ", ")
|
||||||
|
call := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, arglist)
|
||||||
|
|
||||||
|
// Assign return values.
|
||||||
|
body := ""
|
||||||
|
ret := []string{"_", "_", "_"}
|
||||||
|
doErrno := false
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
p := parseParam(out[i])
|
||||||
|
reg := ""
|
||||||
|
if p.Name == "err" {
|
||||||
|
reg = "e1"
|
||||||
|
ret[2] = reg
|
||||||
|
doErrno = true
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i] = reg
|
||||||
|
}
|
||||||
|
if p.Type == "bool" {
|
||||||
|
reg = fmt.Sprintf("%d != 0", reg)
|
||||||
|
}
|
||||||
|
if p.Type == "int64" && endianness != "" {
|
||||||
|
// 64-bit number in r1:r0 or r0:r1.
|
||||||
|
if i+2 > len(out) {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: not enough registers for int64 return\n", path)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if endianness == "big-endian" {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1)
|
||||||
|
} else {
|
||||||
|
reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i)
|
||||||
|
}
|
||||||
|
ret[i] = fmt.Sprintf("r%d", i)
|
||||||
|
ret[i+1] = fmt.Sprintf("r%d", i+1)
|
||||||
|
}
|
||||||
|
if reg != "e1" {
|
||||||
|
body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" {
|
||||||
|
text += fmt.Sprintf("\t%s\n", call)
|
||||||
|
} else {
|
||||||
|
text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call)
|
||||||
|
}
|
||||||
|
text += body
|
||||||
|
|
||||||
|
if doErrno {
|
||||||
|
text += "\tif e1 != 0 {\n"
|
||||||
|
text += "\t\terr = e1\n"
|
||||||
|
text += "\t}\n"
|
||||||
|
}
|
||||||
|
text += "\treturn\n"
|
||||||
|
text += "}\n"
|
||||||
|
}
|
||||||
|
if err := s.Err(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
imp := ""
|
||||||
|
if pack != "unix" {
|
||||||
|
imp = "import \"golang.org/x/sys/unix\"\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
vardecls := "\t" + strings.Join(vars, ",\n\t")
|
||||||
|
vardecls += " syscallFunc"
|
||||||
|
fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, imp, dynimports, linknames, vardecls, text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package %s
|
||||||
|
|
||||||
|
import (
|
||||||
|
"syscall"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
var (
|
||||||
|
%s
|
||||||
|
)
|
||||||
|
|
||||||
|
%s
|
||||||
|
`
|
|
@ -0,0 +1,355 @@
|
||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Parse the header files for OpenBSD and generate a Go usable sysctl MIB.
|
||||||
|
//
|
||||||
|
// Build a MIB with each entry being an array containing the level, type and
|
||||||
|
// a hash that will contain additional entries if the current entry is a node.
|
||||||
|
// We then walk this MIB and create a flattened sysctl name to OID hash.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
goos, goarch string
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments.
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksysctl_openbsd.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags.
|
||||||
|
func buildTags() string {
|
||||||
|
return fmt.Sprintf("%s,%s", goarch, goos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// reMatch performs regular expression match and stores the substring slice to value pointed by m.
|
||||||
|
func reMatch(re *regexp.Regexp, str string, m *[]string) bool {
|
||||||
|
*m = re.FindStringSubmatch(str)
|
||||||
|
if *m != nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type nodeElement struct {
|
||||||
|
n int
|
||||||
|
t string
|
||||||
|
pE *map[string]nodeElement
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
debugEnabled bool
|
||||||
|
mib map[string]nodeElement
|
||||||
|
node *map[string]nodeElement
|
||||||
|
nodeMap map[string]string
|
||||||
|
sysCtl []string
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ctlNames1RE = regexp.MustCompile(`^#define\s+(CTL_NAMES)\s+{`)
|
||||||
|
ctlNames2RE = regexp.MustCompile(`^#define\s+(CTL_(.*)_NAMES)\s+{`)
|
||||||
|
ctlNames3RE = regexp.MustCompile(`^#define\s+((.*)CTL_NAMES)\s+{`)
|
||||||
|
netInetRE = regexp.MustCompile(`^netinet/`)
|
||||||
|
netInet6RE = regexp.MustCompile(`^netinet6/`)
|
||||||
|
netRE = regexp.MustCompile(`^net/`)
|
||||||
|
bracesRE = regexp.MustCompile(`{.*}`)
|
||||||
|
ctlTypeRE = regexp.MustCompile(`{\s+"(\w+)",\s+(CTLTYPE_[A-Z]+)\s+}`)
|
||||||
|
fsNetKernRE = regexp.MustCompile(`^(fs|net|kern)_`)
|
||||||
|
)
|
||||||
|
|
||||||
|
func debug(s string) {
|
||||||
|
if debugEnabled {
|
||||||
|
fmt.Fprintln(os.Stderr, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk the MIB and build a sysctl name to OID mapping.
|
||||||
|
func buildSysctl(pNode *map[string]nodeElement, name string, oid []int) {
|
||||||
|
lNode := pNode // local copy of pointer to node
|
||||||
|
var keys []string
|
||||||
|
for k := range *lNode {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
|
||||||
|
for _, key := range keys {
|
||||||
|
nodename := name
|
||||||
|
if name != "" {
|
||||||
|
nodename += "."
|
||||||
|
}
|
||||||
|
nodename += key
|
||||||
|
|
||||||
|
nodeoid := append(oid, (*pNode)[key].n)
|
||||||
|
|
||||||
|
if (*pNode)[key].t == `CTLTYPE_NODE` {
|
||||||
|
if _, ok := nodeMap[nodename]; ok {
|
||||||
|
lNode = &mib
|
||||||
|
ctlName := nodeMap[nodename]
|
||||||
|
for _, part := range strings.Split(ctlName, ".") {
|
||||||
|
lNode = ((*lNode)[part]).pE
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
lNode = (*pNode)[key].pE
|
||||||
|
}
|
||||||
|
buildSysctl(lNode, nodename, nodeoid)
|
||||||
|
} else if (*pNode)[key].t != "" {
|
||||||
|
oidStr := []string{}
|
||||||
|
for j := range nodeoid {
|
||||||
|
oidStr = append(oidStr, fmt.Sprintf("%d", nodeoid[j]))
|
||||||
|
}
|
||||||
|
text := "\t{ \"" + nodename + "\", []_C_int{ " + strings.Join(oidStr, ", ") + " } }, \n"
|
||||||
|
sysCtl = append(sysCtl, text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Get the OS (using GOOS_TARGET if it exist)
|
||||||
|
goos = os.Getenv("GOOS_TARGET")
|
||||||
|
if goos == "" {
|
||||||
|
goos = os.Getenv("GOOS")
|
||||||
|
}
|
||||||
|
// Get the architecture (using GOARCH_TARGET if it exists)
|
||||||
|
goarch = os.Getenv("GOARCH_TARGET")
|
||||||
|
if goarch == "" {
|
||||||
|
goarch = os.Getenv("GOARCH")
|
||||||
|
}
|
||||||
|
// Check if GOOS and GOARCH environment variables are defined
|
||||||
|
if goarch == "" || goos == "" {
|
||||||
|
fmt.Fprintf(os.Stderr, "GOARCH or GOOS not defined in environment\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
mib = make(map[string]nodeElement)
|
||||||
|
headers := [...]string{
|
||||||
|
`sys/sysctl.h`,
|
||||||
|
`sys/socket.h`,
|
||||||
|
`sys/tty.h`,
|
||||||
|
`sys/malloc.h`,
|
||||||
|
`sys/mount.h`,
|
||||||
|
`sys/namei.h`,
|
||||||
|
`sys/sem.h`,
|
||||||
|
`sys/shm.h`,
|
||||||
|
`sys/vmmeter.h`,
|
||||||
|
`uvm/uvmexp.h`,
|
||||||
|
`uvm/uvm_param.h`,
|
||||||
|
`uvm/uvm_swap_encrypt.h`,
|
||||||
|
`ddb/db_var.h`,
|
||||||
|
`net/if.h`,
|
||||||
|
`net/if_pfsync.h`,
|
||||||
|
`net/pipex.h`,
|
||||||
|
`netinet/in.h`,
|
||||||
|
`netinet/icmp_var.h`,
|
||||||
|
`netinet/igmp_var.h`,
|
||||||
|
`netinet/ip_ah.h`,
|
||||||
|
`netinet/ip_carp.h`,
|
||||||
|
`netinet/ip_divert.h`,
|
||||||
|
`netinet/ip_esp.h`,
|
||||||
|
`netinet/ip_ether.h`,
|
||||||
|
`netinet/ip_gre.h`,
|
||||||
|
`netinet/ip_ipcomp.h`,
|
||||||
|
`netinet/ip_ipip.h`,
|
||||||
|
`netinet/pim_var.h`,
|
||||||
|
`netinet/tcp_var.h`,
|
||||||
|
`netinet/udp_var.h`,
|
||||||
|
`netinet6/in6.h`,
|
||||||
|
`netinet6/ip6_divert.h`,
|
||||||
|
`netinet6/pim6_var.h`,
|
||||||
|
`netinet/icmp6.h`,
|
||||||
|
`netmpls/mpls.h`,
|
||||||
|
}
|
||||||
|
|
||||||
|
ctls := [...]string{
|
||||||
|
`kern`,
|
||||||
|
`vm`,
|
||||||
|
`fs`,
|
||||||
|
`net`,
|
||||||
|
//debug /* Special handling required */
|
||||||
|
`hw`,
|
||||||
|
//machdep /* Arch specific */
|
||||||
|
`user`,
|
||||||
|
`ddb`,
|
||||||
|
//vfs /* Special handling required */
|
||||||
|
`fs.posix`,
|
||||||
|
`kern.forkstat`,
|
||||||
|
`kern.intrcnt`,
|
||||||
|
`kern.malloc`,
|
||||||
|
`kern.nchstats`,
|
||||||
|
`kern.seminfo`,
|
||||||
|
`kern.shminfo`,
|
||||||
|
`kern.timecounter`,
|
||||||
|
`kern.tty`,
|
||||||
|
`kern.watchdog`,
|
||||||
|
`net.bpf`,
|
||||||
|
`net.ifq`,
|
||||||
|
`net.inet`,
|
||||||
|
`net.inet.ah`,
|
||||||
|
`net.inet.carp`,
|
||||||
|
`net.inet.divert`,
|
||||||
|
`net.inet.esp`,
|
||||||
|
`net.inet.etherip`,
|
||||||
|
`net.inet.gre`,
|
||||||
|
`net.inet.icmp`,
|
||||||
|
`net.inet.igmp`,
|
||||||
|
`net.inet.ip`,
|
||||||
|
`net.inet.ip.ifq`,
|
||||||
|
`net.inet.ipcomp`,
|
||||||
|
`net.inet.ipip`,
|
||||||
|
`net.inet.mobileip`,
|
||||||
|
`net.inet.pfsync`,
|
||||||
|
`net.inet.pim`,
|
||||||
|
`net.inet.tcp`,
|
||||||
|
`net.inet.udp`,
|
||||||
|
`net.inet6`,
|
||||||
|
`net.inet6.divert`,
|
||||||
|
`net.inet6.ip6`,
|
||||||
|
`net.inet6.icmp6`,
|
||||||
|
`net.inet6.pim6`,
|
||||||
|
`net.inet6.tcp6`,
|
||||||
|
`net.inet6.udp6`,
|
||||||
|
`net.mpls`,
|
||||||
|
`net.mpls.ifq`,
|
||||||
|
`net.key`,
|
||||||
|
`net.pflow`,
|
||||||
|
`net.pfsync`,
|
||||||
|
`net.pipex`,
|
||||||
|
`net.rt`,
|
||||||
|
`vm.swapencrypt`,
|
||||||
|
//vfsgenctl /* Special handling required */
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node name "fixups"
|
||||||
|
ctlMap := map[string]string{
|
||||||
|
"ipproto": "net.inet",
|
||||||
|
"net.inet.ipproto": "net.inet",
|
||||||
|
"net.inet6.ipv6proto": "net.inet6",
|
||||||
|
"net.inet6.ipv6": "net.inet6.ip6",
|
||||||
|
"net.inet.icmpv6": "net.inet6.icmp6",
|
||||||
|
"net.inet6.divert6": "net.inet6.divert",
|
||||||
|
"net.inet6.tcp6": "net.inet.tcp",
|
||||||
|
"net.inet6.udp6": "net.inet.udp",
|
||||||
|
"mpls": "net.mpls",
|
||||||
|
"swpenc": "vm.swapencrypt",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node mappings
|
||||||
|
nodeMap = map[string]string{
|
||||||
|
"net.inet.ip.ifq": "net.ifq",
|
||||||
|
"net.inet.pfsync": "net.pfsync",
|
||||||
|
"net.mpls.ifq": "net.ifq",
|
||||||
|
}
|
||||||
|
|
||||||
|
mCtls := make(map[string]bool)
|
||||||
|
for _, ctl := range ctls {
|
||||||
|
mCtls[ctl] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, header := range headers {
|
||||||
|
debug("Processing " + header)
|
||||||
|
file, err := os.Open(filepath.Join("/usr/include", header))
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
s := bufio.NewScanner(file)
|
||||||
|
for s.Scan() {
|
||||||
|
var sub []string
|
||||||
|
if reMatch(ctlNames1RE, s.Text(), &sub) ||
|
||||||
|
reMatch(ctlNames2RE, s.Text(), &sub) ||
|
||||||
|
reMatch(ctlNames3RE, s.Text(), &sub) {
|
||||||
|
if sub[1] == `CTL_NAMES` {
|
||||||
|
// Top level.
|
||||||
|
node = &mib
|
||||||
|
} else {
|
||||||
|
// Node.
|
||||||
|
nodename := strings.ToLower(sub[2])
|
||||||
|
ctlName := ""
|
||||||
|
if reMatch(netInetRE, header, &sub) {
|
||||||
|
ctlName = "net.inet." + nodename
|
||||||
|
} else if reMatch(netInet6RE, header, &sub) {
|
||||||
|
ctlName = "net.inet6." + nodename
|
||||||
|
} else if reMatch(netRE, header, &sub) {
|
||||||
|
ctlName = "net." + nodename
|
||||||
|
} else {
|
||||||
|
ctlName = nodename
|
||||||
|
ctlName = fsNetKernRE.ReplaceAllString(ctlName, `$1.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if val, ok := ctlMap[ctlName]; ok {
|
||||||
|
ctlName = val
|
||||||
|
}
|
||||||
|
if _, ok := mCtls[ctlName]; !ok {
|
||||||
|
debug("Ignoring " + ctlName + "...")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk down from the top of the MIB.
|
||||||
|
node = &mib
|
||||||
|
for _, part := range strings.Split(ctlName, ".") {
|
||||||
|
if _, ok := (*node)[part]; !ok {
|
||||||
|
debug("Missing node " + part)
|
||||||
|
(*node)[part] = nodeElement{n: 0, t: "", pE: &map[string]nodeElement{}}
|
||||||
|
}
|
||||||
|
node = (*node)[part].pE
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate current node with entries.
|
||||||
|
i := -1
|
||||||
|
for !strings.HasPrefix(s.Text(), "}") {
|
||||||
|
s.Scan()
|
||||||
|
if reMatch(bracesRE, s.Text(), &sub) {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if !reMatch(ctlTypeRE, s.Text(), &sub) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
(*node)[sub[1]] = nodeElement{n: i, t: sub[2], pE: &map[string]nodeElement{}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = s.Err()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
|
buildSysctl(&mib, "", []int{})
|
||||||
|
|
||||||
|
sort.Strings(sysCtl)
|
||||||
|
text := strings.Join(sysCtl, "")
|
||||||
|
|
||||||
|
fmt.Printf(srcTemplate, cmdLine(), buildTags(), text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = `// %s
|
||||||
|
// Code generated by the command above; DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
type mibentry struct {
|
||||||
|
ctlname string
|
||||||
|
ctloid []_C_int
|
||||||
|
}
|
||||||
|
|
||||||
|
var sysctlMib = []mibentry {
|
||||||
|
%s
|
||||||
|
}
|
||||||
|
`
|
|
@ -0,0 +1,190 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Generate system call table for DragonFly, NetBSD,
|
||||||
|
// FreeBSD, OpenBSD or Darwin from master list
|
||||||
|
// (for example, /usr/src/sys/kern/syscalls.master or
|
||||||
|
// sys/syscall.h).
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
goos, goarch string
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdLine returns this programs's commandline arguments
|
||||||
|
func cmdLine() string {
|
||||||
|
return "go run mksysnum.go " + strings.Join(os.Args[1:], " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTags returns build tags
|
||||||
|
func buildTags() string {
|
||||||
|
return fmt.Sprintf("%s,%s", goarch, goos)
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkErr(err error) {
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// source string and substring slice for regexp
|
||||||
|
type re struct {
|
||||||
|
str string // source string
|
||||||
|
sub []string // matched sub-string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match performs regular expression match
|
||||||
|
func (r *re) Match(exp string) bool {
|
||||||
|
r.sub = regexp.MustCompile(exp).FindStringSubmatch(r.str)
|
||||||
|
if r.sub != nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchFile fetches a text file from URL
|
||||||
|
func fetchFile(URL string) io.Reader {
|
||||||
|
resp, err := http.Get(URL)
|
||||||
|
checkErr(err)
|
||||||
|
defer resp.Body.Close()
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
checkErr(err)
|
||||||
|
return strings.NewReader(string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
// readFile reads a text file from path
|
||||||
|
func readFile(path string) io.Reader {
|
||||||
|
file, err := os.Open(os.Args[1])
|
||||||
|
checkErr(err)
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
|
||||||
|
func format(name, num, proto string) string {
|
||||||
|
name = strings.ToUpper(name)
|
||||||
|
// There are multiple entries for enosys and nosys, so comment them out.
|
||||||
|
nm := re{str: name}
|
||||||
|
if nm.Match(`^SYS_E?NOSYS$`) {
|
||||||
|
name = fmt.Sprintf("// %s", name)
|
||||||
|
}
|
||||||
|
if name == `SYS_SYS_EXIT` {
|
||||||
|
name = `SYS_EXIT`
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(" %s = %s; // %s\n", name, num, proto)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Get the OS (using GOOS_TARGET if it exist)
|
||||||
|
goos = os.Getenv("GOOS_TARGET")
|
||||||
|
if goos == "" {
|
||||||
|
goos = os.Getenv("GOOS")
|
||||||
|
}
|
||||||
|
// Get the architecture (using GOARCH_TARGET if it exists)
|
||||||
|
goarch = os.Getenv("GOARCH_TARGET")
|
||||||
|
if goarch == "" {
|
||||||
|
goarch = os.Getenv("GOARCH")
|
||||||
|
}
|
||||||
|
// Check if GOOS and GOARCH environment variables are defined
|
||||||
|
if goarch == "" || goos == "" {
|
||||||
|
fmt.Fprintf(os.Stderr, "GOARCH or GOOS not defined in environment\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
file := strings.TrimSpace(os.Args[1])
|
||||||
|
var syscalls io.Reader
|
||||||
|
if strings.HasPrefix(file, "https://") || strings.HasPrefix(file, "http://") {
|
||||||
|
// Download syscalls.master file
|
||||||
|
syscalls = fetchFile(file)
|
||||||
|
} else {
|
||||||
|
syscalls = readFile(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
var text, line string
|
||||||
|
s := bufio.NewScanner(syscalls)
|
||||||
|
for s.Scan() {
|
||||||
|
t := re{str: line}
|
||||||
|
if t.Match(`^(.*)\\$`) {
|
||||||
|
// Handle continuation
|
||||||
|
line = t.sub[1]
|
||||||
|
line += strings.TrimLeft(s.Text(), " \t")
|
||||||
|
} else {
|
||||||
|
// New line
|
||||||
|
line = s.Text()
|
||||||
|
}
|
||||||
|
t = re{str: line}
|
||||||
|
if t.Match(`\\$`) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t = re{str: line}
|
||||||
|
|
||||||
|
switch goos {
|
||||||
|
case "dragonfly":
|
||||||
|
if t.Match(`^([0-9]+)\s+STD\s+({ \S+\s+(\w+).*)$`) {
|
||||||
|
num, proto := t.sub[1], t.sub[2]
|
||||||
|
name := fmt.Sprintf("SYS_%s", t.sub[3])
|
||||||
|
text += format(name, num, proto)
|
||||||
|
}
|
||||||
|
case "freebsd":
|
||||||
|
if t.Match(`^([0-9]+)\s+\S+\s+(?:(?:NO)?STD|COMPAT10)\s+({ \S+\s+(\w+).*)$`) {
|
||||||
|
num, proto := t.sub[1], t.sub[2]
|
||||||
|
name := fmt.Sprintf("SYS_%s", t.sub[3])
|
||||||
|
text += format(name, num, proto)
|
||||||
|
}
|
||||||
|
case "openbsd":
|
||||||
|
if t.Match(`^([0-9]+)\s+STD\s+(NOLOCK\s+)?({ \S+\s+\*?(\w+).*)$`) {
|
||||||
|
num, proto, name := t.sub[1], t.sub[3], t.sub[4]
|
||||||
|
text += format(name, num, proto)
|
||||||
|
}
|
||||||
|
case "netbsd":
|
||||||
|
if t.Match(`^([0-9]+)\s+((STD)|(NOERR))\s+(RUMP\s+)?({\s+\S+\s*\*?\s*\|(\S+)\|(\S*)\|(\w+).*\s+})(\s+(\S+))?$`) {
|
||||||
|
num, proto, compat := t.sub[1], t.sub[6], t.sub[8]
|
||||||
|
name := t.sub[7] + "_" + t.sub[9]
|
||||||
|
if t.sub[11] != "" {
|
||||||
|
name = t.sub[7] + "_" + t.sub[11]
|
||||||
|
}
|
||||||
|
name = strings.ToUpper(name)
|
||||||
|
if compat == "" || compat == "13" || compat == "30" || compat == "50" {
|
||||||
|
text += fmt.Sprintf(" %s = %s; // %s\n", name, num, proto)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "darwin":
|
||||||
|
if t.Match(`^#define\s+SYS_(\w+)\s+([0-9]+)`) {
|
||||||
|
name, num := t.sub[1], t.sub[2]
|
||||||
|
name = strings.ToUpper(name)
|
||||||
|
text += fmt.Sprintf(" SYS_%s = %s;\n", name, num)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
fmt.Fprintf(os.Stderr, "unrecognized GOOS=%s\n", goos)
|
||||||
|
os.Exit(1)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err := s.Err()
|
||||||
|
checkErr(err)
|
||||||
|
|
||||||
|
fmt.Printf(template, cmdLine(), buildTags(), text)
|
||||||
|
}
|
||||||
|
|
||||||
|
const template = `// %s
|
||||||
|
// Code generated by the command above; see README.md. DO NOT EDIT.
|
||||||
|
|
||||||
|
// +build %s
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
const(
|
||||||
|
%s)`
|
|
@ -0,0 +1,237 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
// +build aix
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See also mkerrors.sh and mkall.sh
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/limits.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <utime.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/poll.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/statfs.h>
|
||||||
|
#include <sys/termio.h>
|
||||||
|
#include <sys/ioctl.h>
|
||||||
|
|
||||||
|
#include <termios.h>
|
||||||
|
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
|
||||||
|
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
type off64 C.off64_t
|
||||||
|
type off C.off_t
|
||||||
|
type Mode_t C.mode_t
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
type Timeval32 C.struct_timeval32
|
||||||
|
|
||||||
|
type Timex C.struct_timex
|
||||||
|
|
||||||
|
type Time_t C.time_t
|
||||||
|
|
||||||
|
type Tms C.struct_tms
|
||||||
|
|
||||||
|
type Utimbuf C.struct_utimbuf
|
||||||
|
|
||||||
|
type Timezone C.struct_timezone
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit64
|
||||||
|
|
||||||
|
type Pid_t C.pid_t
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
type dev_t C.dev_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type StatxTimestamp C.struct_statx_timestamp
|
||||||
|
|
||||||
|
type Statx_t C.struct_statx
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsgHdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
// Misc
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
type Ustat_t C.struct_ustat
|
||||||
|
|
||||||
|
type Sigset_t C.sigset_t
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Termio C.struct_termio
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
//poll
|
||||||
|
|
||||||
|
type PollFd struct {
|
||||||
|
Fd int32
|
||||||
|
Events uint16
|
||||||
|
Revents uint16
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
//flock_t
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock64
|
||||||
|
|
||||||
|
// Statfs
|
||||||
|
|
||||||
|
type Fsid_t C.struct_fsid_t
|
||||||
|
type Fsid64_t C.struct_fsid64_t
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
const RNDGETENTCNT = 0x80045200
|
|
@ -0,0 +1,283 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define __DARWIN_UNIX03 0
|
||||||
|
#define KERNEL
|
||||||
|
#define _DARWIN_USE_64_BIT_INODE
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <mach/mach.h>
|
||||||
|
#include <mach/message.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/uio.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/if_var.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
type Timeval32 C.struct_timeval32
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat64
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs64
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Fstore_t C.struct_fstore
|
||||||
|
|
||||||
|
type Radvisory_t C.struct_radvisory
|
||||||
|
|
||||||
|
type Fbootstraptransfer_t C.struct_fbootstraptransfer
|
||||||
|
|
||||||
|
type Log2phys_t C.struct_log2phys
|
||||||
|
|
||||||
|
type Fsid C.struct_fsid
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet4Pktinfo C.struct_in_pktinfo
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet4Pktinfo = C.sizeof_struct_in_pktinfo
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
|
||||||
|
SizeofIfmaMsghdr2 = C.sizeof_struct_ifma_msghdr2
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr C.struct_ifma_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr2 C.struct_ifma_msghdr2
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
// Clockinfo
|
||||||
|
|
||||||
|
const SizeofClockinfo = C.sizeof_struct_clockinfo
|
||||||
|
|
||||||
|
type Clockinfo C.struct_clockinfo
|
|
@ -0,0 +1,263 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type Fsid C.struct_fsid
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr C.struct_ifma_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
|
@ -0,0 +1,400 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define _WANT_FREEBSD11_STAT 1
|
||||||
|
#define _WANT_FREEBSD11_STATFS 1
|
||||||
|
#define _WANT_FREEBSD11_DIRENT 1
|
||||||
|
#define _WANT_FREEBSD11_KEVENT 1
|
||||||
|
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/capsicum.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
// This structure is a duplicate of if_data on FreeBSD 8-STABLE.
|
||||||
|
// See /usr/include/net/if.h.
|
||||||
|
struct if_data8 {
|
||||||
|
u_char ifi_type;
|
||||||
|
u_char ifi_physical;
|
||||||
|
u_char ifi_addrlen;
|
||||||
|
u_char ifi_hdrlen;
|
||||||
|
u_char ifi_link_state;
|
||||||
|
u_char ifi_spare_char1;
|
||||||
|
u_char ifi_spare_char2;
|
||||||
|
u_char ifi_datalen;
|
||||||
|
u_long ifi_mtu;
|
||||||
|
u_long ifi_metric;
|
||||||
|
u_long ifi_baudrate;
|
||||||
|
u_long ifi_ipackets;
|
||||||
|
u_long ifi_ierrors;
|
||||||
|
u_long ifi_opackets;
|
||||||
|
u_long ifi_oerrors;
|
||||||
|
u_long ifi_collisions;
|
||||||
|
u_long ifi_ibytes;
|
||||||
|
u_long ifi_obytes;
|
||||||
|
u_long ifi_imcasts;
|
||||||
|
u_long ifi_omcasts;
|
||||||
|
u_long ifi_iqdrops;
|
||||||
|
u_long ifi_noproto;
|
||||||
|
u_long ifi_hwassist;
|
||||||
|
// FIXME: these are now unions, so maybe need to change definitions?
|
||||||
|
#undef ifi_epoch
|
||||||
|
time_t ifi_epoch;
|
||||||
|
#undef ifi_lastchange
|
||||||
|
struct timeval ifi_lastchange;
|
||||||
|
};
|
||||||
|
|
||||||
|
// This structure is a duplicate of if_msghdr on FreeBSD 8-STABLE.
|
||||||
|
// See /usr/include/net/if.h.
|
||||||
|
struct if_msghdr8 {
|
||||||
|
u_short ifm_msglen;
|
||||||
|
u_char ifm_version;
|
||||||
|
u_char ifm_type;
|
||||||
|
int ifm_addrs;
|
||||||
|
int ifm_flags;
|
||||||
|
u_short ifm_index;
|
||||||
|
struct if_data8 ifm_data;
|
||||||
|
};
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
const (
|
||||||
|
_statfsVersion = C.STATFS_VERSION
|
||||||
|
_dirblksiz = C.DIRBLKSIZ
|
||||||
|
)
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type stat_freebsd11_t C.struct_freebsd11_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type statfs_freebsd11_t C.struct_freebsd11_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type dirent_freebsd11 C.struct_freebsd11_dirent
|
||||||
|
|
||||||
|
type Fsid C.struct_fsid
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Advice to Fadvise
|
||||||
|
|
||||||
|
const (
|
||||||
|
FADV_NORMAL = C.POSIX_FADV_NORMAL
|
||||||
|
FADV_RANDOM = C.POSIX_FADV_RANDOM
|
||||||
|
FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL
|
||||||
|
FADV_WILLNEED = C.POSIX_FADV_WILLNEED
|
||||||
|
FADV_DONTNEED = C.POSIX_FADV_DONTNEED
|
||||||
|
FADV_NOREUSE = C.POSIX_FADV_NOREUSE
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPMreqn C.struct_ip_mreqn
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPMreqn = C.sizeof_struct_ip_mreqn
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_ATTACH = C.PT_ATTACH
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_DETACH = C.PT_DETACH
|
||||||
|
PTRACE_GETFPREGS = C.PT_GETFPREGS
|
||||||
|
PTRACE_GETFSBASE = C.PT_GETFSBASE
|
||||||
|
PTRACE_GETLWPLIST = C.PT_GETLWPLIST
|
||||||
|
PTRACE_GETNUMLWPS = C.PT_GETNUMLWPS
|
||||||
|
PTRACE_GETREGS = C.PT_GETREGS
|
||||||
|
PTRACE_GETXSTATE = C.PT_GETXSTATE
|
||||||
|
PTRACE_IO = C.PT_IO
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
PTRACE_LWPEVENTS = C.PT_LWP_EVENTS
|
||||||
|
PTRACE_LWPINFO = C.PT_LWPINFO
|
||||||
|
PTRACE_SETFPREGS = C.PT_SETFPREGS
|
||||||
|
PTRACE_SETREGS = C.PT_SETREGS
|
||||||
|
PTRACE_SINGLESTEP = C.PT_STEP
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
PIOD_READ_D = C.PIOD_READ_D
|
||||||
|
PIOD_WRITE_D = C.PIOD_WRITE_D
|
||||||
|
PIOD_READ_I = C.PIOD_READ_I
|
||||||
|
PIOD_WRITE_I = C.PIOD_WRITE_I
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
PL_FLAG_BORN = C.PL_FLAG_BORN
|
||||||
|
PL_FLAG_EXITED = C.PL_FLAG_EXITED
|
||||||
|
PL_FLAG_SI = C.PL_FLAG_SI
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
TRAP_BRKPT = C.TRAP_BRKPT
|
||||||
|
TRAP_TRACE = C.TRAP_TRACE
|
||||||
|
)
|
||||||
|
|
||||||
|
type PtraceLwpInfoStruct C.struct_ptrace_lwpinfo
|
||||||
|
|
||||||
|
type __Siginfo C.struct___siginfo
|
||||||
|
|
||||||
|
type Sigset_t C.sigset_t
|
||||||
|
|
||||||
|
type Reg C.struct_reg
|
||||||
|
|
||||||
|
type FpReg C.struct_fpreg
|
||||||
|
|
||||||
|
type PtraceIoDesc C.struct_ptrace_io_desc
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent_freebsd11
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
sizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr8
|
||||||
|
sizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data8
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type ifMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr8
|
||||||
|
|
||||||
|
type ifData C.struct_if_data
|
||||||
|
|
||||||
|
type IfData C.struct_if_data8
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfmaMsghdr C.struct_ifma_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfZbuf = C.sizeof_struct_bpf_zbuf
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
SizeofBpfZbufHeader = C.sizeof_struct_bpf_zbuf_header
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfZbuf C.struct_bpf_zbuf
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
type BpfZbufHeader C.struct_bpf_zbuf_header
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLINIGNEOF = C.POLLINIGNEOF
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Capabilities
|
||||||
|
|
||||||
|
type CapRights C.struct_cap_rights
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
|
@ -0,0 +1,290 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/sysctl.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/uio.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type Fsid C.fsid_t
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Advice to Fadvise
|
||||||
|
|
||||||
|
const (
|
||||||
|
FADV_NORMAL = C.POSIX_FADV_NORMAL
|
||||||
|
FADV_RANDOM = C.POSIX_FADV_RANDOM
|
||||||
|
FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL
|
||||||
|
FADV_WILLNEED = C.POSIX_FADV_WILLNEED
|
||||||
|
FADV_DONTNEED = C.POSIX_FADV_DONTNEED
|
||||||
|
FADV_NOREUSE = C.POSIX_FADV_NOREUSE
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
type Mclpool C.struct_mclpool
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
type BpfTimeval C.struct_bpf_timeval
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
type Ptmget C.struct_ptmget
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sysctl
|
||||||
|
|
||||||
|
type Sysctlnode C.struct_sysctlnode
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
// Clockinfo
|
||||||
|
|
||||||
|
const SizeofClockinfo = C.sizeof_struct_clockinfo
|
||||||
|
|
||||||
|
type Clockinfo C.struct_clockinfo
|
|
@ -0,0 +1,283 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/event.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/ptrace.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/uio.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <uvm/uvmexp.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Statfs_t C.struct_statfs
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
type Fsid C.fsid_t
|
||||||
|
|
||||||
|
// File system limits
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
)
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ptrace requests
|
||||||
|
|
||||||
|
const (
|
||||||
|
PTRACE_TRACEME = C.PT_TRACE_ME
|
||||||
|
PTRACE_CONT = C.PT_CONTINUE
|
||||||
|
PTRACE_KILL = C.PT_KILL
|
||||||
|
)
|
||||||
|
|
||||||
|
// Events (kqueue, kevent)
|
||||||
|
|
||||||
|
type Kevent_t C.struct_kevent
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type IfAnnounceMsghdr C.struct_if_announcemsghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
type Mclpool C.struct_mclpool
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
type BpfTimeval C.struct_bpf_timeval
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// fchmodat-like syscalls.
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
||||||
|
|
||||||
|
// Signal Sets
|
||||||
|
|
||||||
|
type Sigset_t C.sigset_t
|
||||||
|
|
||||||
|
// Uname
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
// Uvmexp
|
||||||
|
|
||||||
|
const SizeofUvmexp = C.sizeof_struct_uvmexp
|
||||||
|
|
||||||
|
type Uvmexp C.struct_uvmexp
|
||||||
|
|
||||||
|
// Clockinfo
|
||||||
|
|
||||||
|
const SizeofClockinfo = C.sizeof_struct_clockinfo
|
||||||
|
|
||||||
|
type Clockinfo C.struct_clockinfo
|
|
@ -0,0 +1,266 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
/*
|
||||||
|
Input to cgo -godefs. See README.md
|
||||||
|
*/
|
||||||
|
|
||||||
|
// +godefs map struct_in_addr [4]byte /* in_addr */
|
||||||
|
// +godefs map struct_in6_addr [16]byte /* in6_addr */
|
||||||
|
|
||||||
|
package unix
|
||||||
|
|
||||||
|
/*
|
||||||
|
#define KERNEL
|
||||||
|
// These defines ensure that builds done on newer versions of Solaris are
|
||||||
|
// backwards-compatible with older versions of Solaris and
|
||||||
|
// OpenSolaris-based derivatives.
|
||||||
|
#define __USE_SUNOS_SOCKETS__ // msghdr
|
||||||
|
#define __USE_LEGACY_PROTOTYPES__ // iovec
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <netdb.h>
|
||||||
|
#include <limits.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <signal.h>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <termio.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#include <sys/mount.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <sys/resource.h>
|
||||||
|
#include <sys/select.h>
|
||||||
|
#include <sys/signal.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <sys/statvfs.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/times.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/utsname.h>
|
||||||
|
#include <sys/un.h>
|
||||||
|
#include <sys/wait.h>
|
||||||
|
#include <net/bpf.h>
|
||||||
|
#include <net/if.h>
|
||||||
|
#include <net/if_dl.h>
|
||||||
|
#include <net/route.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <netinet/icmp6.h>
|
||||||
|
#include <netinet/tcp.h>
|
||||||
|
#include <ustat.h>
|
||||||
|
#include <utime.h>
|
||||||
|
|
||||||
|
enum {
|
||||||
|
sizeofPtr = sizeof(void*),
|
||||||
|
};
|
||||||
|
|
||||||
|
union sockaddr_all {
|
||||||
|
struct sockaddr s1; // this one gets used for fields
|
||||||
|
struct sockaddr_in s2; // these pad it out
|
||||||
|
struct sockaddr_in6 s3;
|
||||||
|
struct sockaddr_un s4;
|
||||||
|
struct sockaddr_dl s5;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct sockaddr_any {
|
||||||
|
struct sockaddr addr;
|
||||||
|
char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
|
||||||
|
};
|
||||||
|
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
// Machine characteristics
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofPtr = C.sizeofPtr
|
||||||
|
SizeofShort = C.sizeof_short
|
||||||
|
SizeofInt = C.sizeof_int
|
||||||
|
SizeofLong = C.sizeof_long
|
||||||
|
SizeofLongLong = C.sizeof_longlong
|
||||||
|
PathMax = C.PATH_MAX
|
||||||
|
MaxHostNameLen = C.MAXHOSTNAMELEN
|
||||||
|
)
|
||||||
|
|
||||||
|
// Basic types
|
||||||
|
|
||||||
|
type (
|
||||||
|
_C_short C.short
|
||||||
|
_C_int C.int
|
||||||
|
_C_long C.long
|
||||||
|
_C_long_long C.longlong
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time
|
||||||
|
|
||||||
|
type Timespec C.struct_timespec
|
||||||
|
|
||||||
|
type Timeval C.struct_timeval
|
||||||
|
|
||||||
|
type Timeval32 C.struct_timeval32
|
||||||
|
|
||||||
|
type Tms C.struct_tms
|
||||||
|
|
||||||
|
type Utimbuf C.struct_utimbuf
|
||||||
|
|
||||||
|
// Processes
|
||||||
|
|
||||||
|
type Rusage C.struct_rusage
|
||||||
|
|
||||||
|
type Rlimit C.struct_rlimit
|
||||||
|
|
||||||
|
type _Gid_t C.gid_t
|
||||||
|
|
||||||
|
// Files
|
||||||
|
|
||||||
|
type Stat_t C.struct_stat
|
||||||
|
|
||||||
|
type Flock_t C.struct_flock
|
||||||
|
|
||||||
|
type Dirent C.struct_dirent
|
||||||
|
|
||||||
|
// Filesystems
|
||||||
|
|
||||||
|
type _Fsblkcnt_t C.fsblkcnt_t
|
||||||
|
|
||||||
|
type Statvfs_t C.struct_statvfs
|
||||||
|
|
||||||
|
// Sockets
|
||||||
|
|
||||||
|
type RawSockaddrInet4 C.struct_sockaddr_in
|
||||||
|
|
||||||
|
type RawSockaddrInet6 C.struct_sockaddr_in6
|
||||||
|
|
||||||
|
type RawSockaddrUnix C.struct_sockaddr_un
|
||||||
|
|
||||||
|
type RawSockaddrDatalink C.struct_sockaddr_dl
|
||||||
|
|
||||||
|
type RawSockaddr C.struct_sockaddr
|
||||||
|
|
||||||
|
type RawSockaddrAny C.struct_sockaddr_any
|
||||||
|
|
||||||
|
type _Socklen C.socklen_t
|
||||||
|
|
||||||
|
type Linger C.struct_linger
|
||||||
|
|
||||||
|
type Iovec C.struct_iovec
|
||||||
|
|
||||||
|
type IPMreq C.struct_ip_mreq
|
||||||
|
|
||||||
|
type IPv6Mreq C.struct_ipv6_mreq
|
||||||
|
|
||||||
|
type Msghdr C.struct_msghdr
|
||||||
|
|
||||||
|
type Cmsghdr C.struct_cmsghdr
|
||||||
|
|
||||||
|
type Inet6Pktinfo C.struct_in6_pktinfo
|
||||||
|
|
||||||
|
type IPv6MTUInfo C.struct_ip6_mtuinfo
|
||||||
|
|
||||||
|
type ICMPv6Filter C.struct_icmp6_filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
|
||||||
|
SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
|
||||||
|
SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
|
||||||
|
SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
|
||||||
|
SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
|
||||||
|
SizeofLinger = C.sizeof_struct_linger
|
||||||
|
SizeofIPMreq = C.sizeof_struct_ip_mreq
|
||||||
|
SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
|
||||||
|
SizeofMsghdr = C.sizeof_struct_msghdr
|
||||||
|
SizeofCmsghdr = C.sizeof_struct_cmsghdr
|
||||||
|
SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
|
||||||
|
SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
|
||||||
|
SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
|
||||||
|
)
|
||||||
|
|
||||||
|
// Select
|
||||||
|
|
||||||
|
type FdSet C.fd_set
|
||||||
|
|
||||||
|
// Misc
|
||||||
|
|
||||||
|
type Utsname C.struct_utsname
|
||||||
|
|
||||||
|
type Ustat_t C.struct_ustat
|
||||||
|
|
||||||
|
const (
|
||||||
|
AT_FDCWD = C.AT_FDCWD
|
||||||
|
AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
|
||||||
|
AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
|
||||||
|
AT_REMOVEDIR = C.AT_REMOVEDIR
|
||||||
|
AT_EACCESS = C.AT_EACCESS
|
||||||
|
)
|
||||||
|
|
||||||
|
// Routing and interface messages
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofIfMsghdr = C.sizeof_struct_if_msghdr
|
||||||
|
SizeofIfData = C.sizeof_struct_if_data
|
||||||
|
SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
|
||||||
|
SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
|
||||||
|
SizeofRtMetrics = C.sizeof_struct_rt_metrics
|
||||||
|
)
|
||||||
|
|
||||||
|
type IfMsghdr C.struct_if_msghdr
|
||||||
|
|
||||||
|
type IfData C.struct_if_data
|
||||||
|
|
||||||
|
type IfaMsghdr C.struct_ifa_msghdr
|
||||||
|
|
||||||
|
type RtMsghdr C.struct_rt_msghdr
|
||||||
|
|
||||||
|
type RtMetrics C.struct_rt_metrics
|
||||||
|
|
||||||
|
// Berkeley packet filter
|
||||||
|
|
||||||
|
const (
|
||||||
|
SizeofBpfVersion = C.sizeof_struct_bpf_version
|
||||||
|
SizeofBpfStat = C.sizeof_struct_bpf_stat
|
||||||
|
SizeofBpfProgram = C.sizeof_struct_bpf_program
|
||||||
|
SizeofBpfInsn = C.sizeof_struct_bpf_insn
|
||||||
|
SizeofBpfHdr = C.sizeof_struct_bpf_hdr
|
||||||
|
)
|
||||||
|
|
||||||
|
type BpfVersion C.struct_bpf_version
|
||||||
|
|
||||||
|
type BpfStat C.struct_bpf_stat
|
||||||
|
|
||||||
|
type BpfProgram C.struct_bpf_program
|
||||||
|
|
||||||
|
type BpfInsn C.struct_bpf_insn
|
||||||
|
|
||||||
|
type BpfTimeval C.struct_bpf_timeval
|
||||||
|
|
||||||
|
type BpfHdr C.struct_bpf_hdr
|
||||||
|
|
||||||
|
// Terminal handling
|
||||||
|
|
||||||
|
type Termios C.struct_termios
|
||||||
|
|
||||||
|
type Termio C.struct_termio
|
||||||
|
|
||||||
|
type Winsize C.struct_winsize
|
||||||
|
|
||||||
|
// poll
|
||||||
|
|
||||||
|
type PollFd C.struct_pollfd
|
||||||
|
|
||||||
|
const (
|
||||||
|
POLLERR = C.POLLERR
|
||||||
|
POLLHUP = C.POLLHUP
|
||||||
|
POLLIN = C.POLLIN
|
||||||
|
POLLNVAL = C.POLLNVAL
|
||||||
|
POLLOUT = C.POLLOUT
|
||||||
|
POLLPRI = C.POLLPRI
|
||||||
|
POLLRDBAND = C.POLLRDBAND
|
||||||
|
POLLRDNORM = C.POLLRDNORM
|
||||||
|
POLLWRBAND = C.POLLWRBAND
|
||||||
|
POLLWRNORM = C.POLLWRNORM
|
||||||
|
)
|
|
@ -0,0 +1,556 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"golang.org/x/text/encoding"
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
)
|
||||||
|
|
||||||
|
const ascii = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" +
|
||||||
|
"\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
|
||||||
|
` !"#$%&'()*+,-./0123456789:;<=>?` +
|
||||||
|
`@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_` +
|
||||||
|
"`abcdefghijklmnopqrstuvwxyz{|}~\u007f"
|
||||||
|
|
||||||
|
var encodings = []struct {
|
||||||
|
name string
|
||||||
|
mib string
|
||||||
|
comment string
|
||||||
|
varName string
|
||||||
|
replacement byte
|
||||||
|
mapping string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"IBM Code Page 037",
|
||||||
|
"IBM037",
|
||||||
|
"",
|
||||||
|
"CodePage037",
|
||||||
|
0x3f,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM037-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 437",
|
||||||
|
"PC8CodePage437",
|
||||||
|
"",
|
||||||
|
"CodePage437",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM437-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 850",
|
||||||
|
"PC850Multilingual",
|
||||||
|
"",
|
||||||
|
"CodePage850",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM850-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 852",
|
||||||
|
"PCp852",
|
||||||
|
"",
|
||||||
|
"CodePage852",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM852-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 855",
|
||||||
|
"IBM855",
|
||||||
|
"",
|
||||||
|
"CodePage855",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM855-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows Code Page 858", // PC latin1 with Euro
|
||||||
|
"IBM00858",
|
||||||
|
"",
|
||||||
|
"CodePage858",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/windows-858-2000.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 860",
|
||||||
|
"IBM860",
|
||||||
|
"",
|
||||||
|
"CodePage860",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM860-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 862",
|
||||||
|
"PC862LatinHebrew",
|
||||||
|
"",
|
||||||
|
"CodePage862",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM862-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 863",
|
||||||
|
"IBM863",
|
||||||
|
"",
|
||||||
|
"CodePage863",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM863-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 865",
|
||||||
|
"IBM865",
|
||||||
|
"",
|
||||||
|
"CodePage865",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM865-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 866",
|
||||||
|
"IBM866",
|
||||||
|
"",
|
||||||
|
"CodePage866",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-ibm866.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 1047",
|
||||||
|
"IBM1047",
|
||||||
|
"",
|
||||||
|
"CodePage1047",
|
||||||
|
0x3f,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM1047-2.1.2.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"IBM Code Page 1140",
|
||||||
|
"IBM01140",
|
||||||
|
"",
|
||||||
|
"CodePage1140",
|
||||||
|
0x3f,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/ibm-1140_P100-1997.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-1",
|
||||||
|
"ISOLatin1",
|
||||||
|
"",
|
||||||
|
"ISO8859_1",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/iso-8859_1-1998.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-2",
|
||||||
|
"ISOLatin2",
|
||||||
|
"",
|
||||||
|
"ISO8859_2",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-2.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-3",
|
||||||
|
"ISOLatin3",
|
||||||
|
"",
|
||||||
|
"ISO8859_3",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-3.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-4",
|
||||||
|
"ISOLatin4",
|
||||||
|
"",
|
||||||
|
"ISO8859_4",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-4.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-5",
|
||||||
|
"ISOLatinCyrillic",
|
||||||
|
"",
|
||||||
|
"ISO8859_5",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-5.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-6",
|
||||||
|
"ISOLatinArabic",
|
||||||
|
"",
|
||||||
|
"ISO8859_6,ISO8859_6E,ISO8859_6I",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-6.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-7",
|
||||||
|
"ISOLatinGreek",
|
||||||
|
"",
|
||||||
|
"ISO8859_7",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-7.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-8",
|
||||||
|
"ISOLatinHebrew",
|
||||||
|
"",
|
||||||
|
"ISO8859_8,ISO8859_8E,ISO8859_8I",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-8.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-9",
|
||||||
|
"ISOLatin5",
|
||||||
|
"",
|
||||||
|
"ISO8859_9",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/iso-8859_9-1999.ucm",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-10",
|
||||||
|
"ISOLatin6",
|
||||||
|
"",
|
||||||
|
"ISO8859_10",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-10.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-13",
|
||||||
|
"ISO885913",
|
||||||
|
"",
|
||||||
|
"ISO8859_13",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-13.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-14",
|
||||||
|
"ISO885914",
|
||||||
|
"",
|
||||||
|
"ISO8859_14",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-14.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-15",
|
||||||
|
"ISO885915",
|
||||||
|
"",
|
||||||
|
"ISO8859_15",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-15.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ISO 8859-16",
|
||||||
|
"ISO885916",
|
||||||
|
"",
|
||||||
|
"ISO8859_16",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-iso-8859-16.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"KOI8-R",
|
||||||
|
"KOI8R",
|
||||||
|
"",
|
||||||
|
"KOI8R",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-koi8-r.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"KOI8-U",
|
||||||
|
"KOI8U",
|
||||||
|
"",
|
||||||
|
"KOI8U",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-koi8-u.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Macintosh",
|
||||||
|
"Macintosh",
|
||||||
|
"",
|
||||||
|
"Macintosh",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-macintosh.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Macintosh Cyrillic",
|
||||||
|
"MacintoshCyrillic",
|
||||||
|
"",
|
||||||
|
"MacintoshCyrillic",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-x-mac-cyrillic.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 874",
|
||||||
|
"Windows874",
|
||||||
|
"",
|
||||||
|
"Windows874",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-874.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1250",
|
||||||
|
"Windows1250",
|
||||||
|
"",
|
||||||
|
"Windows1250",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1250.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1251",
|
||||||
|
"Windows1251",
|
||||||
|
"",
|
||||||
|
"Windows1251",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1251.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1252",
|
||||||
|
"Windows1252",
|
||||||
|
"",
|
||||||
|
"Windows1252",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1252.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1253",
|
||||||
|
"Windows1253",
|
||||||
|
"",
|
||||||
|
"Windows1253",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1253.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1254",
|
||||||
|
"Windows1254",
|
||||||
|
"",
|
||||||
|
"Windows1254",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1254.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1255",
|
||||||
|
"Windows1255",
|
||||||
|
"",
|
||||||
|
"Windows1255",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1255.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1256",
|
||||||
|
"Windows1256",
|
||||||
|
"",
|
||||||
|
"Windows1256",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1256.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1257",
|
||||||
|
"Windows1257",
|
||||||
|
"",
|
||||||
|
"Windows1257",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1257.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Windows 1258",
|
||||||
|
"Windows1258",
|
||||||
|
"",
|
||||||
|
"Windows1258",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
"http://encoding.spec.whatwg.org/index-windows-1258.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"X-User-Defined",
|
||||||
|
"XUserDefined",
|
||||||
|
"It is defined at http://encoding.spec.whatwg.org/#x-user-defined",
|
||||||
|
"XUserDefined",
|
||||||
|
encoding.ASCIISub,
|
||||||
|
ascii +
|
||||||
|
"\uf780\uf781\uf782\uf783\uf784\uf785\uf786\uf787" +
|
||||||
|
"\uf788\uf789\uf78a\uf78b\uf78c\uf78d\uf78e\uf78f" +
|
||||||
|
"\uf790\uf791\uf792\uf793\uf794\uf795\uf796\uf797" +
|
||||||
|
"\uf798\uf799\uf79a\uf79b\uf79c\uf79d\uf79e\uf79f" +
|
||||||
|
"\uf7a0\uf7a1\uf7a2\uf7a3\uf7a4\uf7a5\uf7a6\uf7a7" +
|
||||||
|
"\uf7a8\uf7a9\uf7aa\uf7ab\uf7ac\uf7ad\uf7ae\uf7af" +
|
||||||
|
"\uf7b0\uf7b1\uf7b2\uf7b3\uf7b4\uf7b5\uf7b6\uf7b7" +
|
||||||
|
"\uf7b8\uf7b9\uf7ba\uf7bb\uf7bc\uf7bd\uf7be\uf7bf" +
|
||||||
|
"\uf7c0\uf7c1\uf7c2\uf7c3\uf7c4\uf7c5\uf7c6\uf7c7" +
|
||||||
|
"\uf7c8\uf7c9\uf7ca\uf7cb\uf7cc\uf7cd\uf7ce\uf7cf" +
|
||||||
|
"\uf7d0\uf7d1\uf7d2\uf7d3\uf7d4\uf7d5\uf7d6\uf7d7" +
|
||||||
|
"\uf7d8\uf7d9\uf7da\uf7db\uf7dc\uf7dd\uf7de\uf7df" +
|
||||||
|
"\uf7e0\uf7e1\uf7e2\uf7e3\uf7e4\uf7e5\uf7e6\uf7e7" +
|
||||||
|
"\uf7e8\uf7e9\uf7ea\uf7eb\uf7ec\uf7ed\uf7ee\uf7ef" +
|
||||||
|
"\uf7f0\uf7f1\uf7f2\uf7f3\uf7f4\uf7f5\uf7f6\uf7f7" +
|
||||||
|
"\uf7f8\uf7f9\uf7fa\uf7fb\uf7fc\uf7fd\uf7fe\uf7ff",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func getWHATWG(url string) string {
|
||||||
|
res, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%q: Get: %v", url, err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := make([]rune, 128)
|
||||||
|
for i := range mapping {
|
||||||
|
mapping[i] = '\ufffd'
|
||||||
|
}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := 0, 0
|
||||||
|
if _, err := fmt.Sscanf(s, "%d\t0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 128 <= x {
|
||||||
|
log.Fatalf("code %d is out of range", x)
|
||||||
|
}
|
||||||
|
if 0x80 <= y && y < 0xa0 {
|
||||||
|
// We diverge from the WHATWG spec by mapping control characters
|
||||||
|
// in the range [0x80, 0xa0) to U+FFFD.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
mapping[x] = rune(y)
|
||||||
|
}
|
||||||
|
return ascii + string(mapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getUCM(url string) string {
|
||||||
|
res, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%q: Get: %v", url, err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := make([]rune, 256)
|
||||||
|
for i := range mapping {
|
||||||
|
mapping[i] = '\ufffd'
|
||||||
|
}
|
||||||
|
|
||||||
|
charsFound := 0
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var c byte
|
||||||
|
var r rune
|
||||||
|
if _, err := fmt.Sscanf(s, `<U%x> \x%x |0`, &r, &c); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
mapping[c] = r
|
||||||
|
charsFound++
|
||||||
|
}
|
||||||
|
|
||||||
|
if charsFound < 200 {
|
||||||
|
log.Fatalf("%q: only %d characters found (wrong page format?)", url, charsFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(mapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
mibs := map[string]bool{}
|
||||||
|
all := []string{}
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("tables.go", "charmap")
|
||||||
|
|
||||||
|
printf := func(s string, a ...interface{}) { fmt.Fprintf(w, s, a...) }
|
||||||
|
|
||||||
|
printf("import (\n")
|
||||||
|
printf("\t\"golang.org/x/text/encoding\"\n")
|
||||||
|
printf("\t\"golang.org/x/text/encoding/internal/identifier\"\n")
|
||||||
|
printf(")\n\n")
|
||||||
|
for _, e := range encodings {
|
||||||
|
varNames := strings.Split(e.varName, ",")
|
||||||
|
all = append(all, varNames...)
|
||||||
|
varName := varNames[0]
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(e.mapping, "http://encoding.spec.whatwg.org/"):
|
||||||
|
e.mapping = getWHATWG(e.mapping)
|
||||||
|
case strings.HasPrefix(e.mapping, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/"):
|
||||||
|
e.mapping = getUCM(e.mapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
asciiSuperset, low := strings.HasPrefix(e.mapping, ascii), 0x00
|
||||||
|
if asciiSuperset {
|
||||||
|
low = 0x80
|
||||||
|
}
|
||||||
|
lvn := 1
|
||||||
|
if strings.HasPrefix(varName, "ISO") || strings.HasPrefix(varName, "KOI") {
|
||||||
|
lvn = 3
|
||||||
|
}
|
||||||
|
lowerVarName := strings.ToLower(varName[:lvn]) + varName[lvn:]
|
||||||
|
printf("// %s is the %s encoding.\n", varName, e.name)
|
||||||
|
if e.comment != "" {
|
||||||
|
printf("//\n// %s\n", e.comment)
|
||||||
|
}
|
||||||
|
printf("var %s *Charmap = &%s\n\nvar %s = Charmap{\nname: %q,\n",
|
||||||
|
varName, lowerVarName, lowerVarName, e.name)
|
||||||
|
if mibs[e.mib] {
|
||||||
|
log.Fatalf("MIB type %q declared multiple times.", e.mib)
|
||||||
|
}
|
||||||
|
printf("mib: identifier.%s,\n", e.mib)
|
||||||
|
printf("asciiSuperset: %t,\n", asciiSuperset)
|
||||||
|
printf("low: 0x%02x,\n", low)
|
||||||
|
printf("replacement: 0x%02x,\n", e.replacement)
|
||||||
|
|
||||||
|
printf("decode: [256]utf8Enc{\n")
|
||||||
|
i, backMapping := 0, map[rune]byte{}
|
||||||
|
for _, c := range e.mapping {
|
||||||
|
if _, ok := backMapping[c]; !ok && c != utf8.RuneError {
|
||||||
|
backMapping[c] = byte(i)
|
||||||
|
}
|
||||||
|
var buf [8]byte
|
||||||
|
n := utf8.EncodeRune(buf[:], c)
|
||||||
|
if n > 3 {
|
||||||
|
panic(fmt.Sprintf("rune %q (%U) is too long", c, c))
|
||||||
|
}
|
||||||
|
printf("{%d,[3]byte{0x%02x,0x%02x,0x%02x}},", n, buf[0], buf[1], buf[2])
|
||||||
|
if i%2 == 1 {
|
||||||
|
printf("\n")
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
printf("},\n")
|
||||||
|
|
||||||
|
printf("encode: [256]uint32{\n")
|
||||||
|
encode := make([]uint32, 0, 256)
|
||||||
|
for c, i := range backMapping {
|
||||||
|
encode = append(encode, uint32(i)<<24|uint32(c))
|
||||||
|
}
|
||||||
|
sort.Sort(byRune(encode))
|
||||||
|
for len(encode) < cap(encode) {
|
||||||
|
encode = append(encode, encode[len(encode)-1])
|
||||||
|
}
|
||||||
|
for i, enc := range encode {
|
||||||
|
printf("0x%08x,", enc)
|
||||||
|
if i%8 == 7 {
|
||||||
|
printf("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
printf("},\n}\n")
|
||||||
|
|
||||||
|
// Add an estimate of the size of a single Charmap{} struct value, which
|
||||||
|
// includes two 256 elem arrays of 4 bytes and some extra fields, which
|
||||||
|
// align to 3 uint64s on 64-bit architectures.
|
||||||
|
w.Size += 2*4*256 + 3*8
|
||||||
|
}
|
||||||
|
// TODO: add proper line breaking.
|
||||||
|
printf("var listAll = []encoding.Encoding{\n%s,\n}\n\n", strings.Join(all, ",\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
type byRune []uint32
|
||||||
|
|
||||||
|
func (b byRune) Len() int { return len(b) }
|
||||||
|
func (b byRune) Less(i, j int) bool { return b[i]&0xffffff < b[j]&0xffffff }
|
||||||
|
func (b byRune) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
|
@ -0,0 +1,173 @@
|
||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
)
|
||||||
|
|
||||||
|
type group struct {
|
||||||
|
Encodings []struct {
|
||||||
|
Labels []string
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
|
||||||
|
r := gen.Open("https://encoding.spec.whatwg.org", "whatwg", "encodings.json")
|
||||||
|
var groups []group
|
||||||
|
if err := json.NewDecoder(r).Decode(&groups); err != nil {
|
||||||
|
log.Fatalf("Error reading encodings.json: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
fmt.Fprintln(w, "type htmlEncoding byte")
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
for i, g := range groups {
|
||||||
|
for _, e := range g.Encodings {
|
||||||
|
key := strings.ToLower(e.Name)
|
||||||
|
name := consts[key]
|
||||||
|
if name == "" {
|
||||||
|
log.Fatalf("No const defined for %s.", key)
|
||||||
|
}
|
||||||
|
if i == 0 {
|
||||||
|
fmt.Fprintf(w, "%s htmlEncoding = iota\n", name)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(w, "%s\n", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "numEncodings")
|
||||||
|
fmt.Fprint(w, ")\n\n")
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "var canonical = [numEncodings]string{")
|
||||||
|
for _, g := range groups {
|
||||||
|
for _, e := range g.Encodings {
|
||||||
|
fmt.Fprintf(w, "%q,\n", strings.ToLower(e.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "}\n\n")
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "var nameMap = map[string]htmlEncoding{")
|
||||||
|
for _, g := range groups {
|
||||||
|
for _, e := range g.Encodings {
|
||||||
|
for _, l := range e.Labels {
|
||||||
|
key := strings.ToLower(e.Name)
|
||||||
|
name := consts[key]
|
||||||
|
fmt.Fprintf(w, "%q: %s,\n", l, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "}\n\n")
|
||||||
|
|
||||||
|
var tags []string
|
||||||
|
fmt.Fprintln(w, "var localeMap = []htmlEncoding{")
|
||||||
|
for _, loc := range locales {
|
||||||
|
tags = append(tags, loc.tag)
|
||||||
|
fmt.Fprintf(w, "%s, // %s \n", consts[loc.name], loc.tag)
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "}\n\n")
|
||||||
|
|
||||||
|
fmt.Fprintf(w, "const locales = %q\n", strings.Join(tags, " "))
|
||||||
|
|
||||||
|
gen.WriteGoFile("tables.go", "htmlindex", w.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
// consts maps canonical encoding name to internal constant.
|
||||||
|
var consts = map[string]string{
|
||||||
|
"utf-8": "utf8",
|
||||||
|
"ibm866": "ibm866",
|
||||||
|
"iso-8859-2": "iso8859_2",
|
||||||
|
"iso-8859-3": "iso8859_3",
|
||||||
|
"iso-8859-4": "iso8859_4",
|
||||||
|
"iso-8859-5": "iso8859_5",
|
||||||
|
"iso-8859-6": "iso8859_6",
|
||||||
|
"iso-8859-7": "iso8859_7",
|
||||||
|
"iso-8859-8": "iso8859_8",
|
||||||
|
"iso-8859-8-i": "iso8859_8I",
|
||||||
|
"iso-8859-10": "iso8859_10",
|
||||||
|
"iso-8859-13": "iso8859_13",
|
||||||
|
"iso-8859-14": "iso8859_14",
|
||||||
|
"iso-8859-15": "iso8859_15",
|
||||||
|
"iso-8859-16": "iso8859_16",
|
||||||
|
"koi8-r": "koi8r",
|
||||||
|
"koi8-u": "koi8u",
|
||||||
|
"macintosh": "macintosh",
|
||||||
|
"windows-874": "windows874",
|
||||||
|
"windows-1250": "windows1250",
|
||||||
|
"windows-1251": "windows1251",
|
||||||
|
"windows-1252": "windows1252",
|
||||||
|
"windows-1253": "windows1253",
|
||||||
|
"windows-1254": "windows1254",
|
||||||
|
"windows-1255": "windows1255",
|
||||||
|
"windows-1256": "windows1256",
|
||||||
|
"windows-1257": "windows1257",
|
||||||
|
"windows-1258": "windows1258",
|
||||||
|
"x-mac-cyrillic": "macintoshCyrillic",
|
||||||
|
"gbk": "gbk",
|
||||||
|
"gb18030": "gb18030",
|
||||||
|
// "hz-gb-2312": "hzgb2312", // Was removed from WhatWG
|
||||||
|
"big5": "big5",
|
||||||
|
"euc-jp": "eucjp",
|
||||||
|
"iso-2022-jp": "iso2022jp",
|
||||||
|
"shift_jis": "shiftJIS",
|
||||||
|
"euc-kr": "euckr",
|
||||||
|
"replacement": "replacement",
|
||||||
|
"utf-16be": "utf16be",
|
||||||
|
"utf-16le": "utf16le",
|
||||||
|
"x-user-defined": "xUserDefined",
|
||||||
|
}
|
||||||
|
|
||||||
|
// locales is taken from
|
||||||
|
// https://html.spec.whatwg.org/multipage/syntax.html#encoding-sniffing-algorithm.
|
||||||
|
var locales = []struct{ tag, name string }{
|
||||||
|
// The default value. Explicitly state latin to benefit from the exact
|
||||||
|
// script option, while still making 1252 the default encoding for languages
|
||||||
|
// written in Latin script.
|
||||||
|
{"und_Latn", "windows-1252"},
|
||||||
|
{"ar", "windows-1256"},
|
||||||
|
{"ba", "windows-1251"},
|
||||||
|
{"be", "windows-1251"},
|
||||||
|
{"bg", "windows-1251"},
|
||||||
|
{"cs", "windows-1250"},
|
||||||
|
{"el", "iso-8859-7"},
|
||||||
|
{"et", "windows-1257"},
|
||||||
|
{"fa", "windows-1256"},
|
||||||
|
{"he", "windows-1255"},
|
||||||
|
{"hr", "windows-1250"},
|
||||||
|
{"hu", "iso-8859-2"},
|
||||||
|
{"ja", "shift_jis"},
|
||||||
|
{"kk", "windows-1251"},
|
||||||
|
{"ko", "euc-kr"},
|
||||||
|
{"ku", "windows-1254"},
|
||||||
|
{"ky", "windows-1251"},
|
||||||
|
{"lt", "windows-1257"},
|
||||||
|
{"lv", "windows-1257"},
|
||||||
|
{"mk", "windows-1251"},
|
||||||
|
{"pl", "iso-8859-2"},
|
||||||
|
{"ru", "windows-1251"},
|
||||||
|
{"sah", "windows-1251"},
|
||||||
|
{"sk", "windows-1250"},
|
||||||
|
{"sl", "iso-8859-2"},
|
||||||
|
{"sr", "windows-1251"},
|
||||||
|
{"tg", "windows-1251"},
|
||||||
|
{"th", "windows-874"},
|
||||||
|
{"tr", "windows-1254"},
|
||||||
|
{"tt", "windows-1251"},
|
||||||
|
{"uk", "windows-1251"},
|
||||||
|
{"vi", "windows-1258"},
|
||||||
|
{"zh-hans", "gb18030"},
|
||||||
|
{"zh-hant", "big5"},
|
||||||
|
}
|
|
@ -0,0 +1,142 @@
|
||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
)
|
||||||
|
|
||||||
|
type registry struct {
|
||||||
|
XMLName xml.Name `xml:"registry"`
|
||||||
|
Updated string `xml:"updated"`
|
||||||
|
Registry []struct {
|
||||||
|
ID string `xml:"id,attr"`
|
||||||
|
Record []struct {
|
||||||
|
Name string `xml:"name"`
|
||||||
|
Xref []struct {
|
||||||
|
Type string `xml:"type,attr"`
|
||||||
|
Data string `xml:"data,attr"`
|
||||||
|
} `xml:"xref"`
|
||||||
|
Desc struct {
|
||||||
|
Data string `xml:",innerxml"`
|
||||||
|
// Any []struct {
|
||||||
|
// Data string `xml:",chardata"`
|
||||||
|
// } `xml:",any"`
|
||||||
|
// Data string `xml:",chardata"`
|
||||||
|
} `xml:"description,"`
|
||||||
|
MIB string `xml:"value"`
|
||||||
|
Alias []string `xml:"alias"`
|
||||||
|
MIME string `xml:"preferred_alias"`
|
||||||
|
} `xml:"record"`
|
||||||
|
} `xml:"registry"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
r := gen.OpenIANAFile("assignments/character-sets/character-sets.xml")
|
||||||
|
reg := ®istry{}
|
||||||
|
if err := xml.NewDecoder(r).Decode(®); err != nil && err != io.EOF {
|
||||||
|
log.Fatalf("Error decoding charset registry: %v", err)
|
||||||
|
}
|
||||||
|
if len(reg.Registry) == 0 || reg.Registry[0].ID != "character-sets-1" {
|
||||||
|
log.Fatalf("Unexpected ID %s", reg.Registry[0].ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
fmt.Fprintf(w, "const (\n")
|
||||||
|
for _, rec := range reg.Registry[0].Record {
|
||||||
|
constName := ""
|
||||||
|
for _, a := range rec.Alias {
|
||||||
|
if strings.HasPrefix(a, "cs") && strings.IndexByte(a, '-') == -1 {
|
||||||
|
// Some of the constant definitions have comments in them. Strip those.
|
||||||
|
constName = strings.Title(strings.SplitN(a[2:], "\n", 2)[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if constName == "" {
|
||||||
|
switch rec.MIB {
|
||||||
|
case "2085":
|
||||||
|
constName = "HZGB2312" // Not listed as alias for some reason.
|
||||||
|
default:
|
||||||
|
log.Fatalf("No cs alias defined for %s.", rec.MIB)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if rec.MIME != "" {
|
||||||
|
rec.MIME = fmt.Sprintf(" (MIME: %s)", rec.MIME)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "// %s is the MIB identifier with IANA name %s%s.\n//\n", constName, rec.Name, rec.MIME)
|
||||||
|
if len(rec.Desc.Data) > 0 {
|
||||||
|
fmt.Fprint(w, "// ")
|
||||||
|
d := xml.NewDecoder(strings.NewReader(rec.Desc.Data))
|
||||||
|
inElem := true
|
||||||
|
attr := ""
|
||||||
|
for {
|
||||||
|
t, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
switch x := t.(type) {
|
||||||
|
case xml.CharData:
|
||||||
|
attr = "" // Don't need attribute info.
|
||||||
|
a := bytes.Split([]byte(x), []byte("\n"))
|
||||||
|
for i, b := range a {
|
||||||
|
if b = bytes.TrimSpace(b); len(b) != 0 {
|
||||||
|
if !inElem && i > 0 {
|
||||||
|
fmt.Fprint(w, "\n// ")
|
||||||
|
}
|
||||||
|
inElem = false
|
||||||
|
fmt.Fprintf(w, "%s ", string(b))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case xml.StartElement:
|
||||||
|
if x.Name.Local == "xref" {
|
||||||
|
inElem = true
|
||||||
|
use := false
|
||||||
|
for _, a := range x.Attr {
|
||||||
|
if a.Name.Local == "type" {
|
||||||
|
use = use || a.Value != "person"
|
||||||
|
}
|
||||||
|
if a.Name.Local == "data" && use {
|
||||||
|
// Patch up URLs to use https. From some links, the
|
||||||
|
// https version is different from the http one.
|
||||||
|
s := a.Value
|
||||||
|
s = strings.Replace(s, "http://", "https://", -1)
|
||||||
|
s = strings.Replace(s, "/unicode/", "/", -1)
|
||||||
|
attr = s + " "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case xml.EndElement:
|
||||||
|
inElem = false
|
||||||
|
fmt.Fprint(w, attr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "\n")
|
||||||
|
}
|
||||||
|
for _, x := range rec.Xref {
|
||||||
|
switch x.Type {
|
||||||
|
case "rfc":
|
||||||
|
fmt.Fprintf(w, "// Reference: %s\n", strings.ToUpper(x.Data))
|
||||||
|
case "uri":
|
||||||
|
fmt.Fprintf(w, "// Reference: %s\n", x.Data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "%s MIB = %s\n", constName, rec.MIB)
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, ")")
|
||||||
|
|
||||||
|
gen.WriteGoFile("mib.go", "identifier", w.Bytes())
|
||||||
|
}
|
|
@ -0,0 +1,161 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
// TODO: Emoji extensions?
|
||||||
|
// https://www.unicode.org/faq/emoji_dingbats.html
|
||||||
|
// https://www.unicode.org/Public/UNIDATA/EmojiSources.txt
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type entry struct {
|
||||||
|
jisCode, table int
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package japanese provides Japanese encodings such as EUC-JP and Shift JIS.\n")
|
||||||
|
fmt.Printf(`package japanese // import "golang.org/x/text/encoding/japanese"` + "\n\n")
|
||||||
|
|
||||||
|
reverse := [65536]entry{}
|
||||||
|
for i := range reverse {
|
||||||
|
reverse[i].table = -1
|
||||||
|
}
|
||||||
|
|
||||||
|
tables := []struct {
|
||||||
|
url string
|
||||||
|
name string
|
||||||
|
}{
|
||||||
|
{"http://encoding.spec.whatwg.org/index-jis0208.txt", "0208"},
|
||||||
|
{"http://encoding.spec.whatwg.org/index-jis0212.txt", "0212"},
|
||||||
|
}
|
||||||
|
for i, table := range tables {
|
||||||
|
res, err := http.Get(table.url)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%q: Get: %v", table.url, err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := 0, uint16(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("%q: could not parse %q", table.url, s)
|
||||||
|
}
|
||||||
|
if x < 0 || 120*94 <= x {
|
||||||
|
log.Fatalf("%q: JIS code %d is out of range", table.url, x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
if reverse[y].table == -1 {
|
||||||
|
reverse[y] = entry{jisCode: x, table: i}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("%q: scanner error: %v", table.url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// jis%sDecode is the decoding table from JIS %s code to Unicode.\n// It is defined at %s\n",
|
||||||
|
table.name, table.name, table.url)
|
||||||
|
fmt.Printf("var jis%sDecode = [...]uint16{\n", table.name)
|
||||||
|
for i, m := range mapping {
|
||||||
|
if m != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%04X,\n", i, m)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v.table == -1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const (\n")
|
||||||
|
fmt.Printf("\tjis0208 = 1\n")
|
||||||
|
fmt.Printf("\tjis0212 = 2\n")
|
||||||
|
fmt.Printf("\tcodeMask = 0x7f\n")
|
||||||
|
fmt.Printf("\tcodeShift = 7\n")
|
||||||
|
fmt.Printf("\ttableShift = 14\n")
|
||||||
|
fmt.Printf(")\n\n")
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to JIS code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%5d, %5d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("//\n")
|
||||||
|
fmt.Printf("// The high two bits of the value record whether the JIS code comes from the\n")
|
||||||
|
fmt.Printf("// JIS0208 table (high bits == 1) or the JIS0212 table (high bits == 2).\n")
|
||||||
|
fmt.Printf("// The low 14 bits are two 7-bit unsigned integers j1 and j2 that form the\n")
|
||||||
|
fmt.Printf("// JIS code (94*j1 + j2) within that table.\n")
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x.table == -1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d - %d: jis%s<<14 | 0x%02X<<7 | 0x%02X,\n",
|
||||||
|
j, v.low, tables[x.table].name, x.jisCode/94, x.jisCode%94)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
|
@ -0,0 +1,143 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package korean provides Korean encodings such as EUC-KR.\n")
|
||||||
|
fmt.Printf(`package korean // import "golang.org/x/text/encoding/korean"` + "\n\n")
|
||||||
|
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-euc-kr.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint16{}
|
||||||
|
reverse := [65536]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint16(0), uint16(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 178*(0xc7-0x81)+(0xfe-0xc7)*94+(0xff-0xa1) <= x {
|
||||||
|
log.Fatalf("EUC-KR code %d is out of range", x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
if reverse[y] == 0 {
|
||||||
|
c0, c1 := uint16(0), uint16(0)
|
||||||
|
if x < 178*(0xc7-0x81) {
|
||||||
|
c0 = uint16(x/178) + 0x81
|
||||||
|
c1 = uint16(x % 178)
|
||||||
|
switch {
|
||||||
|
case c1 < 1*26:
|
||||||
|
c1 += 0x41
|
||||||
|
case c1 < 2*26:
|
||||||
|
c1 += 0x47
|
||||||
|
default:
|
||||||
|
c1 += 0x4d
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
x -= 178 * (0xc7 - 0x81)
|
||||||
|
c0 = uint16(x/94) + 0xc7
|
||||||
|
c1 = uint16(x%94) + 0xa1
|
||||||
|
}
|
||||||
|
reverse[y] = c0<<8 | c1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("scanner error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// decode is the decoding table from EUC-KR code to Unicode.\n")
|
||||||
|
fmt.Printf("// It is defined at http://encoding.spec.whatwg.org/index-euc-kr.txt\n")
|
||||||
|
fmt.Printf("var decode = [...]uint16{\n")
|
||||||
|
for i, v := range mapping {
|
||||||
|
if v != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%04X,\n", i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to EUC-KR code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%5d, %5d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d-%d: 0x%04X,\n", j, v.low, x)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
161
vendor/golang.org/x/text/encoding/simplifiedchinese/maketables.go
generated
vendored
Normal file
161
vendor/golang.org/x/text/encoding/simplifiedchinese/maketables.go
generated
vendored
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package simplifiedchinese provides Simplified Chinese encodings such as GBK.\n")
|
||||||
|
fmt.Printf(`package simplifiedchinese // import "golang.org/x/text/encoding/simplifiedchinese"` + "\n\n")
|
||||||
|
|
||||||
|
printGB18030()
|
||||||
|
printGBK()
|
||||||
|
}
|
||||||
|
|
||||||
|
func printGB18030() {
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-gb18030.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
fmt.Printf("// gb18030 is the table from http://encoding.spec.whatwg.org/index-gb18030.txt\n")
|
||||||
|
fmt.Printf("var gb18030 = [...][2]uint16{\n")
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint32(0), uint32(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0x10000 && y < 0x10000 {
|
||||||
|
fmt.Printf("\t{0x%04x, 0x%04x},\n", x, y)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func printGBK() {
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-gbk.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint16{}
|
||||||
|
reverse := [65536]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint16(0), uint16(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 126*190 <= x {
|
||||||
|
log.Fatalf("GBK code %d is out of range", x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
if reverse[y] == 0 {
|
||||||
|
c0, c1 := x/190, x%190
|
||||||
|
if c1 >= 0x3f {
|
||||||
|
c1++
|
||||||
|
}
|
||||||
|
reverse[y] = (0x81+c0)<<8 | (0x40 + c1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("scanner error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// decode is the decoding table from GBK code to Unicode.\n")
|
||||||
|
fmt.Printf("// It is defined at http://encoding.spec.whatwg.org/index-gbk.txt\n")
|
||||||
|
fmt.Printf("var decode = [...]uint16{\n")
|
||||||
|
for i, v := range mapping {
|
||||||
|
if v != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%04X,\n", i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to GBK code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%5d, %5d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d-%d: 0x%04X,\n", j, v.low, x)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
140
vendor/golang.org/x/text/encoding/traditionalchinese/maketables.go
generated
vendored
Normal file
140
vendor/golang.org/x/text/encoding/traditionalchinese/maketables.go
generated
vendored
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This program generates tables.go:
|
||||||
|
// go run maketables.go | gofmt > tables.go
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fmt.Printf("// generated by go run maketables.go; DO NOT EDIT\n\n")
|
||||||
|
fmt.Printf("// Package traditionalchinese provides Traditional Chinese encodings such as Big5.\n")
|
||||||
|
fmt.Printf(`package traditionalchinese // import "golang.org/x/text/encoding/traditionalchinese"` + "\n\n")
|
||||||
|
|
||||||
|
res, err := http.Get("http://encoding.spec.whatwg.org/index-big5.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
mapping := [65536]uint32{}
|
||||||
|
reverse := [65536 * 4]uint16{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(res.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
s := strings.TrimSpace(scanner.Text())
|
||||||
|
if s == "" || s[0] == '#' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, y := uint16(0), uint32(0)
|
||||||
|
if _, err := fmt.Sscanf(s, "%d 0x%x", &x, &y); err != nil {
|
||||||
|
log.Fatalf("could not parse %q", s)
|
||||||
|
}
|
||||||
|
if x < 0 || 126*157 <= x {
|
||||||
|
log.Fatalf("Big5 code %d is out of range", x)
|
||||||
|
}
|
||||||
|
mapping[x] = y
|
||||||
|
|
||||||
|
// The WHATWG spec http://encoding.spec.whatwg.org/#indexes says that
|
||||||
|
// "The index pointer for code point in index is the first pointer
|
||||||
|
// corresponding to code point in index", which would normally mean
|
||||||
|
// that the code below should be guarded by "if reverse[y] == 0", but
|
||||||
|
// last instead of first seems to match the behavior of
|
||||||
|
// "iconv -f UTF-8 -t BIG5". For example, U+8005 者 occurs twice in
|
||||||
|
// http://encoding.spec.whatwg.org/index-big5.txt, as index 2148
|
||||||
|
// (encoded as "\x8e\xcd") and index 6543 (encoded as "\xaa\xcc")
|
||||||
|
// and "echo 者 | iconv -f UTF-8 -t BIG5 | xxd" gives "\xaa\xcc".
|
||||||
|
c0, c1 := x/157, x%157
|
||||||
|
if c1 < 0x3f {
|
||||||
|
c1 += 0x40
|
||||||
|
} else {
|
||||||
|
c1 += 0x62
|
||||||
|
}
|
||||||
|
reverse[y] = (0x81+c0)<<8 | c1
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
log.Fatalf("scanner error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("// decode is the decoding table from Big5 code to Unicode.\n")
|
||||||
|
fmt.Printf("// It is defined at http://encoding.spec.whatwg.org/index-big5.txt\n")
|
||||||
|
fmt.Printf("var decode = [...]uint32{\n")
|
||||||
|
for i, v := range mapping {
|
||||||
|
if v != 0 {
|
||||||
|
fmt.Printf("\t%d: 0x%08X,\n", i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
|
||||||
|
// Any run of at least separation continuous zero entries in the reverse map will
|
||||||
|
// be a separate encode table.
|
||||||
|
const separation = 1024
|
||||||
|
|
||||||
|
intervals := []interval(nil)
|
||||||
|
low, high := -1, -1
|
||||||
|
for i, v := range reverse {
|
||||||
|
if v == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if low < 0 {
|
||||||
|
low = i
|
||||||
|
} else if i-high >= separation {
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
low = i
|
||||||
|
}
|
||||||
|
high = i + 1
|
||||||
|
}
|
||||||
|
if high >= 0 {
|
||||||
|
intervals = append(intervals, interval{low, high})
|
||||||
|
}
|
||||||
|
sort.Sort(byDecreasingLength(intervals))
|
||||||
|
|
||||||
|
fmt.Printf("const numEncodeTables = %d\n\n", len(intervals))
|
||||||
|
fmt.Printf("// encodeX are the encoding tables from Unicode to Big5 code,\n")
|
||||||
|
fmt.Printf("// sorted by decreasing length.\n")
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("// encode%d: %5d entries for runes in [%6d, %6d).\n", i, v.len(), v.low, v.high)
|
||||||
|
}
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
for i, v := range intervals {
|
||||||
|
fmt.Printf("const encode%dLow, encode%dHigh = %d, %d\n\n", i, i, v.low, v.high)
|
||||||
|
fmt.Printf("var encode%d = [...]uint16{\n", i)
|
||||||
|
for j := v.low; j < v.high; j++ {
|
||||||
|
x := reverse[j]
|
||||||
|
if x == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("\t%d-%d: 0x%04X,\n", j, v.low, x)
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// interval is a half-open interval [low, high).
|
||||||
|
type interval struct {
|
||||||
|
low, high int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i interval) len() int { return i.high - i.low }
|
||||||
|
|
||||||
|
// byDecreasingLength sorts intervals by decreasing length.
|
||||||
|
type byDecreasingLength []interval
|
||||||
|
|
||||||
|
func (b byDecreasingLength) Len() int { return len(b) }
|
||||||
|
func (b byDecreasingLength) Less(i, j int) bool { return b[i].len() > b[j].len() }
|
||||||
|
func (b byDecreasingLength) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
|
@ -0,0 +1,64 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Language tag table generator.
|
||||||
|
// Data read from the web.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/unicode/cldr"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
test = flag.Bool("test",
|
||||||
|
false,
|
||||||
|
"test existing tables; can be used to compare web data with package data.")
|
||||||
|
outputFile = flag.String("output",
|
||||||
|
"tables.go",
|
||||||
|
"output file for generated tables")
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("tables.go", "compact")
|
||||||
|
|
||||||
|
fmt.Fprintln(w, `import "golang.org/x/text/internal/language"`)
|
||||||
|
|
||||||
|
b := newBuilder(w)
|
||||||
|
gen.WriteCLDRVersion(w)
|
||||||
|
|
||||||
|
b.writeCompactIndex()
|
||||||
|
}
|
||||||
|
|
||||||
|
type builder struct {
|
||||||
|
w *gen.CodeWriter
|
||||||
|
data *cldr.CLDR
|
||||||
|
supp *cldr.SupplementalData
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBuilder(w *gen.CodeWriter) *builder {
|
||||||
|
r := gen.OpenCLDRCoreZip()
|
||||||
|
defer r.Close()
|
||||||
|
d := &cldr.Decoder{}
|
||||||
|
data, err := d.DecodeZip(r)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
b := builder{
|
||||||
|
w: w,
|
||||||
|
data: data,
|
||||||
|
supp: data.Supplemental(),
|
||||||
|
}
|
||||||
|
return &b
|
||||||
|
}
|
|
@ -0,0 +1,113 @@
|
||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This file generates derivative tables based on the language package itself.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/language"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Compact indices:
|
||||||
|
// Note -va-X variants only apply to localization variants.
|
||||||
|
// BCP variants only ever apply to language.
|
||||||
|
// The only ambiguity between tags is with regions.
|
||||||
|
|
||||||
|
func (b *builder) writeCompactIndex() {
|
||||||
|
// Collect all language tags for which we have any data in CLDR.
|
||||||
|
m := map[language.Tag]bool{}
|
||||||
|
for _, lang := range b.data.Locales() {
|
||||||
|
// We include all locales unconditionally to be consistent with en_US.
|
||||||
|
// We want en_US, even though it has no data associated with it.
|
||||||
|
|
||||||
|
// TODO: put any of the languages for which no data exists at the end
|
||||||
|
// of the index. This allows all components based on ICU to use that
|
||||||
|
// as the cutoff point.
|
||||||
|
// if x := data.RawLDML(lang); false ||
|
||||||
|
// x.LocaleDisplayNames != nil ||
|
||||||
|
// x.Characters != nil ||
|
||||||
|
// x.Delimiters != nil ||
|
||||||
|
// x.Measurement != nil ||
|
||||||
|
// x.Dates != nil ||
|
||||||
|
// x.Numbers != nil ||
|
||||||
|
// x.Units != nil ||
|
||||||
|
// x.ListPatterns != nil ||
|
||||||
|
// x.Collations != nil ||
|
||||||
|
// x.Segmentations != nil ||
|
||||||
|
// x.Rbnf != nil ||
|
||||||
|
// x.Annotations != nil ||
|
||||||
|
// x.Metadata != nil {
|
||||||
|
|
||||||
|
// TODO: support POSIX natively, albeit non-standard.
|
||||||
|
tag := language.Make(strings.Replace(lang, "_POSIX", "-u-va-posix", 1))
|
||||||
|
m[tag] = true
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: plural rules are also defined for the deprecated tags:
|
||||||
|
// iw mo sh tl
|
||||||
|
// Consider removing these as compact tags.
|
||||||
|
|
||||||
|
// Include locales for plural rules, which uses a different structure.
|
||||||
|
for _, plurals := range b.supp.Plurals {
|
||||||
|
for _, rules := range plurals.PluralRules {
|
||||||
|
for _, lang := range strings.Split(rules.Locales, " ") {
|
||||||
|
m[language.Make(lang)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var coreTags []language.CompactCoreInfo
|
||||||
|
var special []string
|
||||||
|
|
||||||
|
for t := range m {
|
||||||
|
if x := t.Extensions(); len(x) != 0 && fmt.Sprint(x) != "[u-va-posix]" {
|
||||||
|
log.Fatalf("Unexpected extension %v in %v", x, t)
|
||||||
|
}
|
||||||
|
if len(t.Variants()) == 0 && len(t.Extensions()) == 0 {
|
||||||
|
cci, ok := language.GetCompactCore(t)
|
||||||
|
if !ok {
|
||||||
|
log.Fatalf("Locale for non-basic language %q", t)
|
||||||
|
}
|
||||||
|
coreTags = append(coreTags, cci)
|
||||||
|
} else {
|
||||||
|
special = append(special, t.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
w := b.w
|
||||||
|
|
||||||
|
sort.Slice(coreTags, func(i, j int) bool { return coreTags[i] < coreTags[j] })
|
||||||
|
sort.Strings(special)
|
||||||
|
|
||||||
|
w.WriteComment(`
|
||||||
|
NumCompactTags is the number of common tags. The maximum tag is
|
||||||
|
NumCompactTags-1.`)
|
||||||
|
w.WriteConst("NumCompactTags", len(m))
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
for i, t := range coreTags {
|
||||||
|
fmt.Fprintf(w, "%s ID = %d\n", ident(t.Tag().String()), i)
|
||||||
|
}
|
||||||
|
for i, t := range special {
|
||||||
|
fmt.Fprintf(w, "%s ID = %d\n", ident(t), i+len(coreTags))
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, ")")
|
||||||
|
|
||||||
|
w.WriteVar("coreTags", coreTags)
|
||||||
|
|
||||||
|
w.WriteConst("specialTagsStr", strings.Join(special, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
func ident(s string) string {
|
||||||
|
return strings.Replace(s, "-", "", -1) + "Index"
|
||||||
|
}
|
|
@ -0,0 +1,54 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/language"
|
||||||
|
"golang.org/x/text/internal/language/compact"
|
||||||
|
"golang.org/x/text/unicode/cldr"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
r := gen.OpenCLDRCoreZip()
|
||||||
|
defer r.Close()
|
||||||
|
|
||||||
|
d := &cldr.Decoder{}
|
||||||
|
data, err := d.DecodeZip(r)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("DecodeZip: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("parents.go", "compact")
|
||||||
|
|
||||||
|
// Create parents table.
|
||||||
|
type ID uint16
|
||||||
|
parents := make([]ID, compact.NumCompactTags)
|
||||||
|
for _, loc := range data.Locales() {
|
||||||
|
tag := language.MustParse(loc)
|
||||||
|
index, ok := compact.FromTag(tag)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
parentIndex := compact.ID(0) // und
|
||||||
|
for p := tag.Parent(); p != language.Und; p = p.Parent() {
|
||||||
|
if x, ok := compact.FromTag(p); ok {
|
||||||
|
parentIndex = x
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parents[index] = ID(parentIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
w.WriteComment(`
|
||||||
|
parents maps a compact index of a tag to the compact index of the parent of
|
||||||
|
this tag.`)
|
||||||
|
w.WriteVar("parents", parents)
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,20 @@
|
||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// This file contains code common to the maketables.go and the package code.
|
||||||
|
|
||||||
|
// AliasType is the type of an alias in AliasMap.
|
||||||
|
type AliasType int8
|
||||||
|
|
||||||
|
const (
|
||||||
|
Deprecated AliasType = iota
|
||||||
|
Macro
|
||||||
|
Legacy
|
||||||
|
|
||||||
|
AliasTypeUnknown AliasType = -1
|
||||||
|
)
|
|
@ -0,0 +1,305 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Language tag table generator.
|
||||||
|
// Data read from the web.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/language"
|
||||||
|
"golang.org/x/text/unicode/cldr"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
test = flag.Bool("test",
|
||||||
|
false,
|
||||||
|
"test existing tables; can be used to compare web data with package data.")
|
||||||
|
outputFile = flag.String("output",
|
||||||
|
"tables.go",
|
||||||
|
"output file for generated tables")
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteGoFile("tables.go", "language")
|
||||||
|
|
||||||
|
b := newBuilder(w)
|
||||||
|
gen.WriteCLDRVersion(w)
|
||||||
|
|
||||||
|
b.writeConstants()
|
||||||
|
b.writeMatchData()
|
||||||
|
}
|
||||||
|
|
||||||
|
type builder struct {
|
||||||
|
w *gen.CodeWriter
|
||||||
|
hw io.Writer // MultiWriter for w and w.Hash
|
||||||
|
data *cldr.CLDR
|
||||||
|
supp *cldr.SupplementalData
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) langIndex(s string) uint16 {
|
||||||
|
return uint16(language.MustParseBase(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) regionIndex(s string) int {
|
||||||
|
return int(language.MustParseRegion(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) scriptIndex(s string) int {
|
||||||
|
return int(language.MustParseScript(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBuilder(w *gen.CodeWriter) *builder {
|
||||||
|
r := gen.OpenCLDRCoreZip()
|
||||||
|
defer r.Close()
|
||||||
|
d := &cldr.Decoder{}
|
||||||
|
data, err := d.DecodeZip(r)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
b := builder{
|
||||||
|
w: w,
|
||||||
|
hw: io.MultiWriter(w, w.Hash),
|
||||||
|
data: data,
|
||||||
|
supp: data.Supplemental(),
|
||||||
|
}
|
||||||
|
return &b
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeConsts computes f(v) for all v in values and writes the results
|
||||||
|
// as constants named _v to a single constant block.
|
||||||
|
func (b *builder) writeConsts(f func(string) int, values ...string) {
|
||||||
|
fmt.Fprintln(b.w, "const (")
|
||||||
|
for _, v := range values {
|
||||||
|
fmt.Fprintf(b.w, "\t_%s = %v\n", v, f(v))
|
||||||
|
}
|
||||||
|
fmt.Fprintln(b.w, ")")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: region inclusion data will probably not be use used in future matchers.
|
||||||
|
|
||||||
|
var langConsts = []string{
|
||||||
|
"de", "en", "fr", "it", "mo", "no", "nb", "pt", "sh", "mul", "und",
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptConsts = []string{
|
||||||
|
"Latn", "Hani", "Hans", "Hant", "Qaaa", "Qaai", "Qabx", "Zinh", "Zyyy",
|
||||||
|
"Zzzz",
|
||||||
|
}
|
||||||
|
|
||||||
|
var regionConsts = []string{
|
||||||
|
"001", "419", "BR", "CA", "ES", "GB", "MD", "PT", "UK", "US",
|
||||||
|
"ZZ", "XA", "XC", "XK", // Unofficial tag for Kosovo.
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) writeConstants() {
|
||||||
|
b.writeConsts(func(s string) int { return int(b.langIndex(s)) }, langConsts...)
|
||||||
|
b.writeConsts(b.regionIndex, regionConsts...)
|
||||||
|
b.writeConsts(b.scriptIndex, scriptConsts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
type mutualIntelligibility struct {
|
||||||
|
want, have uint16
|
||||||
|
distance uint8
|
||||||
|
oneway bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type scriptIntelligibility struct {
|
||||||
|
wantLang, haveLang uint16
|
||||||
|
wantScript, haveScript uint8
|
||||||
|
distance uint8
|
||||||
|
// Always oneway
|
||||||
|
}
|
||||||
|
|
||||||
|
type regionIntelligibility struct {
|
||||||
|
lang uint16 // compact language id
|
||||||
|
script uint8 // 0 means any
|
||||||
|
group uint8 // 0 means any; if bit 7 is set it means inverse
|
||||||
|
distance uint8
|
||||||
|
// Always twoway.
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeMatchData writes tables with languages and scripts for which there is
|
||||||
|
// mutual intelligibility. The data is based on CLDR's languageMatching data.
|
||||||
|
// Note that we use a different algorithm than the one defined by CLDR and that
|
||||||
|
// we slightly modify the data. For example, we convert scores to confidence levels.
|
||||||
|
// We also drop all region-related data as we use a different algorithm to
|
||||||
|
// determine region equivalence.
|
||||||
|
func (b *builder) writeMatchData() {
|
||||||
|
lm := b.supp.LanguageMatching.LanguageMatches
|
||||||
|
cldr.MakeSlice(&lm).SelectAnyOf("type", "written_new")
|
||||||
|
|
||||||
|
regionHierarchy := map[string][]string{}
|
||||||
|
for _, g := range b.supp.TerritoryContainment.Group {
|
||||||
|
regions := strings.Split(g.Contains, " ")
|
||||||
|
regionHierarchy[g.Type] = append(regionHierarchy[g.Type], regions...)
|
||||||
|
}
|
||||||
|
regionToGroups := make([]uint8, language.NumRegions)
|
||||||
|
|
||||||
|
idToIndex := map[string]uint8{}
|
||||||
|
for i, mv := range lm[0].MatchVariable {
|
||||||
|
if i > 6 {
|
||||||
|
log.Fatalf("Too many groups: %d", i)
|
||||||
|
}
|
||||||
|
idToIndex[mv.Id] = uint8(i + 1)
|
||||||
|
// TODO: also handle '-'
|
||||||
|
for _, r := range strings.Split(mv.Value, "+") {
|
||||||
|
todo := []string{r}
|
||||||
|
for k := 0; k < len(todo); k++ {
|
||||||
|
r := todo[k]
|
||||||
|
regionToGroups[b.regionIndex(r)] |= 1 << uint8(i)
|
||||||
|
todo = append(todo, regionHierarchy[r]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b.w.WriteVar("regionToGroups", regionToGroups)
|
||||||
|
|
||||||
|
// maps language id to in- and out-of-group region.
|
||||||
|
paradigmLocales := [][3]uint16{}
|
||||||
|
locales := strings.Split(lm[0].ParadigmLocales[0].Locales, " ")
|
||||||
|
for i := 0; i < len(locales); i += 2 {
|
||||||
|
x := [3]uint16{}
|
||||||
|
for j := 0; j < 2; j++ {
|
||||||
|
pc := strings.SplitN(locales[i+j], "-", 2)
|
||||||
|
x[0] = b.langIndex(pc[0])
|
||||||
|
if len(pc) == 2 {
|
||||||
|
x[1+j] = uint16(b.regionIndex(pc[1]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
paradigmLocales = append(paradigmLocales, x)
|
||||||
|
}
|
||||||
|
b.w.WriteVar("paradigmLocales", paradigmLocales)
|
||||||
|
|
||||||
|
b.w.WriteType(mutualIntelligibility{})
|
||||||
|
b.w.WriteType(scriptIntelligibility{})
|
||||||
|
b.w.WriteType(regionIntelligibility{})
|
||||||
|
|
||||||
|
matchLang := []mutualIntelligibility{}
|
||||||
|
matchScript := []scriptIntelligibility{}
|
||||||
|
matchRegion := []regionIntelligibility{}
|
||||||
|
// Convert the languageMatch entries in lists keyed by desired language.
|
||||||
|
for _, m := range lm[0].LanguageMatch {
|
||||||
|
// Different versions of CLDR use different separators.
|
||||||
|
desired := strings.Replace(m.Desired, "-", "_", -1)
|
||||||
|
supported := strings.Replace(m.Supported, "-", "_", -1)
|
||||||
|
d := strings.Split(desired, "_")
|
||||||
|
s := strings.Split(supported, "_")
|
||||||
|
if len(d) != len(s) {
|
||||||
|
log.Fatalf("not supported: desired=%q; supported=%q", desired, supported)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
distance, _ := strconv.ParseInt(m.Distance, 10, 8)
|
||||||
|
switch len(d) {
|
||||||
|
case 2:
|
||||||
|
if desired == supported && desired == "*_*" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// language-script pair.
|
||||||
|
matchScript = append(matchScript, scriptIntelligibility{
|
||||||
|
wantLang: uint16(b.langIndex(d[0])),
|
||||||
|
haveLang: uint16(b.langIndex(s[0])),
|
||||||
|
wantScript: uint8(b.scriptIndex(d[1])),
|
||||||
|
haveScript: uint8(b.scriptIndex(s[1])),
|
||||||
|
distance: uint8(distance),
|
||||||
|
})
|
||||||
|
if m.Oneway != "true" {
|
||||||
|
matchScript = append(matchScript, scriptIntelligibility{
|
||||||
|
wantLang: uint16(b.langIndex(s[0])),
|
||||||
|
haveLang: uint16(b.langIndex(d[0])),
|
||||||
|
wantScript: uint8(b.scriptIndex(s[1])),
|
||||||
|
haveScript: uint8(b.scriptIndex(d[1])),
|
||||||
|
distance: uint8(distance),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
case 1:
|
||||||
|
if desired == supported && desired == "*" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if distance == 1 {
|
||||||
|
// nb == no is already handled by macro mapping. Check there
|
||||||
|
// really is only this case.
|
||||||
|
if d[0] != "no" || s[0] != "nb" {
|
||||||
|
log.Fatalf("unhandled equivalence %s == %s", s[0], d[0])
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// TODO: consider dropping oneway field and just doubling the entry.
|
||||||
|
matchLang = append(matchLang, mutualIntelligibility{
|
||||||
|
want: uint16(b.langIndex(d[0])),
|
||||||
|
have: uint16(b.langIndex(s[0])),
|
||||||
|
distance: uint8(distance),
|
||||||
|
oneway: m.Oneway == "true",
|
||||||
|
})
|
||||||
|
case 3:
|
||||||
|
if desired == supported && desired == "*_*_*" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if desired != supported {
|
||||||
|
// This is now supported by CLDR, but only one case, which
|
||||||
|
// should already be covered by paradigm locales. For instance,
|
||||||
|
// test case "und, en, en-GU, en-IN, en-GB ; en-ZA ; en-GB" in
|
||||||
|
// testdata/CLDRLocaleMatcherTest.txt tests this.
|
||||||
|
if supported != "en_*_GB" {
|
||||||
|
log.Fatalf("not supported: desired=%q; supported=%q", desired, supported)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ri := regionIntelligibility{
|
||||||
|
lang: b.langIndex(d[0]),
|
||||||
|
distance: uint8(distance),
|
||||||
|
}
|
||||||
|
if d[1] != "*" {
|
||||||
|
ri.script = uint8(b.scriptIndex(d[1]))
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case d[2] == "*":
|
||||||
|
ri.group = 0x80 // not contained in anything
|
||||||
|
case strings.HasPrefix(d[2], "$!"):
|
||||||
|
ri.group = 0x80
|
||||||
|
d[2] = "$" + d[2][len("$!"):]
|
||||||
|
fallthrough
|
||||||
|
case strings.HasPrefix(d[2], "$"):
|
||||||
|
ri.group |= idToIndex[d[2]]
|
||||||
|
}
|
||||||
|
matchRegion = append(matchRegion, ri)
|
||||||
|
default:
|
||||||
|
log.Fatalf("not supported: desired=%q; supported=%q", desired, supported)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.SliceStable(matchLang, func(i, j int) bool {
|
||||||
|
return matchLang[i].distance < matchLang[j].distance
|
||||||
|
})
|
||||||
|
b.w.WriteComment(`
|
||||||
|
matchLang holds pairs of langIDs of base languages that are typically
|
||||||
|
mutually intelligible. Each pair is associated with a confidence and
|
||||||
|
whether the intelligibility goes one or both ways.`)
|
||||||
|
b.w.WriteVar("matchLang", matchLang)
|
||||||
|
|
||||||
|
b.w.WriteComment(`
|
||||||
|
matchScript holds pairs of scriptIDs where readers of one script
|
||||||
|
can typically also read the other. Each is associated with a confidence.`)
|
||||||
|
sort.SliceStable(matchScript, func(i, j int) bool {
|
||||||
|
return matchScript[i].distance < matchScript[j].distance
|
||||||
|
})
|
||||||
|
b.w.WriteVar("matchScript", matchScript)
|
||||||
|
|
||||||
|
sort.SliceStable(matchRegion, func(i, j int) bool {
|
||||||
|
return matchRegion[i].distance < matchRegion[j].distance
|
||||||
|
})
|
||||||
|
b.w.WriteVar("matchRegion", matchRegion)
|
||||||
|
}
|
|
@ -0,0 +1,133 @@
|
||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/triegen"
|
||||||
|
"golang.org/x/text/internal/ucd"
|
||||||
|
)
|
||||||
|
|
||||||
|
var outputFile = flag.String("out", "tables.go", "output file")
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
gen.Repackage("gen_trieval.go", "trieval.go", "bidi")
|
||||||
|
gen.Repackage("gen_ranges.go", "ranges_test.go", "bidi")
|
||||||
|
|
||||||
|
genTables()
|
||||||
|
}
|
||||||
|
|
||||||
|
// bidiClass names and codes taken from class "bc" in
|
||||||
|
// https://www.unicode.org/Public/8.0.0/ucd/PropertyValueAliases.txt
|
||||||
|
var bidiClass = map[string]Class{
|
||||||
|
"AL": AL, // ArabicLetter
|
||||||
|
"AN": AN, // ArabicNumber
|
||||||
|
"B": B, // ParagraphSeparator
|
||||||
|
"BN": BN, // BoundaryNeutral
|
||||||
|
"CS": CS, // CommonSeparator
|
||||||
|
"EN": EN, // EuropeanNumber
|
||||||
|
"ES": ES, // EuropeanSeparator
|
||||||
|
"ET": ET, // EuropeanTerminator
|
||||||
|
"L": L, // LeftToRight
|
||||||
|
"NSM": NSM, // NonspacingMark
|
||||||
|
"ON": ON, // OtherNeutral
|
||||||
|
"R": R, // RightToLeft
|
||||||
|
"S": S, // SegmentSeparator
|
||||||
|
"WS": WS, // WhiteSpace
|
||||||
|
|
||||||
|
"FSI": Control,
|
||||||
|
"PDF": Control,
|
||||||
|
"PDI": Control,
|
||||||
|
"LRE": Control,
|
||||||
|
"LRI": Control,
|
||||||
|
"LRO": Control,
|
||||||
|
"RLE": Control,
|
||||||
|
"RLI": Control,
|
||||||
|
"RLO": Control,
|
||||||
|
}
|
||||||
|
|
||||||
|
func genTables() {
|
||||||
|
if numClass > 0x0F {
|
||||||
|
log.Fatalf("Too many Class constants (%#x > 0x0F).", numClass)
|
||||||
|
}
|
||||||
|
w := gen.NewCodeWriter()
|
||||||
|
defer w.WriteVersionedGoFile(*outputFile, "bidi")
|
||||||
|
|
||||||
|
gen.WriteUnicodeVersion(w)
|
||||||
|
|
||||||
|
t := triegen.NewTrie("bidi")
|
||||||
|
|
||||||
|
// Build data about bracket mapping. These bits need to be or-ed with
|
||||||
|
// any other bits.
|
||||||
|
orMask := map[rune]uint64{}
|
||||||
|
|
||||||
|
xorMap := map[rune]int{}
|
||||||
|
xorMasks := []rune{0} // First value is no-op.
|
||||||
|
|
||||||
|
ucd.Parse(gen.OpenUCDFile("BidiBrackets.txt"), func(p *ucd.Parser) {
|
||||||
|
r1 := p.Rune(0)
|
||||||
|
r2 := p.Rune(1)
|
||||||
|
xor := r1 ^ r2
|
||||||
|
if _, ok := xorMap[xor]; !ok {
|
||||||
|
xorMap[xor] = len(xorMasks)
|
||||||
|
xorMasks = append(xorMasks, xor)
|
||||||
|
}
|
||||||
|
entry := uint64(xorMap[xor]) << xorMaskShift
|
||||||
|
switch p.String(2) {
|
||||||
|
case "o":
|
||||||
|
entry |= openMask
|
||||||
|
case "c", "n":
|
||||||
|
default:
|
||||||
|
log.Fatalf("Unknown bracket class %q.", p.String(2))
|
||||||
|
}
|
||||||
|
orMask[r1] = entry
|
||||||
|
})
|
||||||
|
|
||||||
|
w.WriteComment(`
|
||||||
|
xorMasks contains masks to be xor-ed with brackets to get the reverse
|
||||||
|
version.`)
|
||||||
|
w.WriteVar("xorMasks", xorMasks)
|
||||||
|
|
||||||
|
done := map[rune]bool{}
|
||||||
|
|
||||||
|
insert := func(r rune, c Class) {
|
||||||
|
if !done[r] {
|
||||||
|
t.Insert(r, orMask[r]|uint64(c))
|
||||||
|
done[r] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert the derived BiDi properties.
|
||||||
|
ucd.Parse(gen.OpenUCDFile("extracted/DerivedBidiClass.txt"), func(p *ucd.Parser) {
|
||||||
|
r := p.Rune(0)
|
||||||
|
class, ok := bidiClass[p.String(1)]
|
||||||
|
if !ok {
|
||||||
|
log.Fatalf("%U: Unknown BiDi class %q", r, p.String(1))
|
||||||
|
}
|
||||||
|
insert(r, class)
|
||||||
|
})
|
||||||
|
visitDefaults(insert)
|
||||||
|
|
||||||
|
// TODO: use sparse blocks. This would reduce table size considerably
|
||||||
|
// from the looks of it.
|
||||||
|
|
||||||
|
sz, err := t.Gen(w)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
w.Size += sz
|
||||||
|
}
|
||||||
|
|
||||||
|
// dummy values to make methods in gen_common compile. The real versions
|
||||||
|
// will be generated by this file to tables.go.
|
||||||
|
var (
|
||||||
|
xorMasks []rune
|
||||||
|
)
|
|
@ -0,0 +1,57 @@
|
||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unicode"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/ucd"
|
||||||
|
"golang.org/x/text/unicode/rangetable"
|
||||||
|
)
|
||||||
|
|
||||||
|
// These tables are hand-extracted from:
|
||||||
|
// https://www.unicode.org/Public/8.0.0/ucd/extracted/DerivedBidiClass.txt
|
||||||
|
func visitDefaults(fn func(r rune, c Class)) {
|
||||||
|
// first write default values for ranges listed above.
|
||||||
|
visitRunes(fn, AL, []rune{
|
||||||
|
0x0600, 0x07BF, // Arabic
|
||||||
|
0x08A0, 0x08FF, // Arabic Extended-A
|
||||||
|
0xFB50, 0xFDCF, // Arabic Presentation Forms
|
||||||
|
0xFDF0, 0xFDFF,
|
||||||
|
0xFE70, 0xFEFF,
|
||||||
|
0x0001EE00, 0x0001EEFF, // Arabic Mathematical Alpha Symbols
|
||||||
|
})
|
||||||
|
visitRunes(fn, R, []rune{
|
||||||
|
0x0590, 0x05FF, // Hebrew
|
||||||
|
0x07C0, 0x089F, // Nko et al.
|
||||||
|
0xFB1D, 0xFB4F,
|
||||||
|
0x00010800, 0x00010FFF, // Cypriot Syllabary et. al.
|
||||||
|
0x0001E800, 0x0001EDFF,
|
||||||
|
0x0001EF00, 0x0001EFFF,
|
||||||
|
})
|
||||||
|
visitRunes(fn, ET, []rune{ // European Terminator
|
||||||
|
0x20A0, 0x20Cf, // Currency symbols
|
||||||
|
})
|
||||||
|
rangetable.Visit(unicode.Noncharacter_Code_Point, func(r rune) {
|
||||||
|
fn(r, BN) // Boundary Neutral
|
||||||
|
})
|
||||||
|
ucd.Parse(gen.OpenUCDFile("DerivedCoreProperties.txt"), func(p *ucd.Parser) {
|
||||||
|
if p.String(1) == "Default_Ignorable_Code_Point" {
|
||||||
|
fn(p.Rune(0), BN) // Boundary Neutral
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func visitRunes(fn func(r rune, c Class), c Class, runes []rune) {
|
||||||
|
for i := 0; i < len(runes); i += 2 {
|
||||||
|
lo, hi := runes[i], runes[i+1]
|
||||||
|
for j := lo; j <= hi; j++ {
|
||||||
|
fn(j, c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,64 @@
|
||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// Class is the Unicode BiDi class. Each rune has a single class.
|
||||||
|
type Class uint
|
||||||
|
|
||||||
|
const (
|
||||||
|
L Class = iota // LeftToRight
|
||||||
|
R // RightToLeft
|
||||||
|
EN // EuropeanNumber
|
||||||
|
ES // EuropeanSeparator
|
||||||
|
ET // EuropeanTerminator
|
||||||
|
AN // ArabicNumber
|
||||||
|
CS // CommonSeparator
|
||||||
|
B // ParagraphSeparator
|
||||||
|
S // SegmentSeparator
|
||||||
|
WS // WhiteSpace
|
||||||
|
ON // OtherNeutral
|
||||||
|
BN // BoundaryNeutral
|
||||||
|
NSM // NonspacingMark
|
||||||
|
AL // ArabicLetter
|
||||||
|
Control // Control LRO - PDI
|
||||||
|
|
||||||
|
numClass
|
||||||
|
|
||||||
|
LRO // LeftToRightOverride
|
||||||
|
RLO // RightToLeftOverride
|
||||||
|
LRE // LeftToRightEmbedding
|
||||||
|
RLE // RightToLeftEmbedding
|
||||||
|
PDF // PopDirectionalFormat
|
||||||
|
LRI // LeftToRightIsolate
|
||||||
|
RLI // RightToLeftIsolate
|
||||||
|
FSI // FirstStrongIsolate
|
||||||
|
PDI // PopDirectionalIsolate
|
||||||
|
|
||||||
|
unknownClass = ^Class(0)
|
||||||
|
)
|
||||||
|
|
||||||
|
var controlToClass = map[rune]Class{
|
||||||
|
0x202D: LRO, // LeftToRightOverride,
|
||||||
|
0x202E: RLO, // RightToLeftOverride,
|
||||||
|
0x202A: LRE, // LeftToRightEmbedding,
|
||||||
|
0x202B: RLE, // RightToLeftEmbedding,
|
||||||
|
0x202C: PDF, // PopDirectionalFormat,
|
||||||
|
0x2066: LRI, // LeftToRightIsolate,
|
||||||
|
0x2067: RLI, // RightToLeftIsolate,
|
||||||
|
0x2068: FSI, // FirstStrongIsolate,
|
||||||
|
0x2069: PDI, // PopDirectionalIsolate,
|
||||||
|
}
|
||||||
|
|
||||||
|
// A trie entry has the following bits:
|
||||||
|
// 7..5 XOR mask for brackets
|
||||||
|
// 4 1: Bracket open, 0: Bracket close
|
||||||
|
// 3..0 Class type
|
||||||
|
|
||||||
|
const (
|
||||||
|
openMask = 0x10
|
||||||
|
xorMaskShift = 5
|
||||||
|
)
|
|
@ -0,0 +1,986 @@
|
||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Normalization table generator.
|
||||||
|
// Data read from the web.
|
||||||
|
// See forminfo.go for a description of the trie values associated with each rune.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/text/internal/gen"
|
||||||
|
"golang.org/x/text/internal/triegen"
|
||||||
|
"golang.org/x/text/internal/ucd"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
gen.Init()
|
||||||
|
loadUnicodeData()
|
||||||
|
compactCCC()
|
||||||
|
loadCompositionExclusions()
|
||||||
|
completeCharFields(FCanonical)
|
||||||
|
completeCharFields(FCompatibility)
|
||||||
|
computeNonStarterCounts()
|
||||||
|
verifyComputed()
|
||||||
|
printChars()
|
||||||
|
testDerived()
|
||||||
|
printTestdata()
|
||||||
|
makeTables()
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
tablelist = flag.String("tables",
|
||||||
|
"all",
|
||||||
|
"comma-separated list of which tables to generate; "+
|
||||||
|
"can be 'decomp', 'recomp', 'info' and 'all'")
|
||||||
|
test = flag.Bool("test",
|
||||||
|
false,
|
||||||
|
"test existing tables against DerivedNormalizationProps and generate test data for regression testing")
|
||||||
|
verbose = flag.Bool("verbose",
|
||||||
|
false,
|
||||||
|
"write data to stdout as it is parsed")
|
||||||
|
)
|
||||||
|
|
||||||
|
const MaxChar = 0x10FFFF // anything above this shouldn't exist
|
||||||
|
|
||||||
|
// Quick Check properties of runes allow us to quickly
|
||||||
|
// determine whether a rune may occur in a normal form.
|
||||||
|
// For a given normal form, a rune may be guaranteed to occur
|
||||||
|
// verbatim (QC=Yes), may or may not combine with another
|
||||||
|
// rune (QC=Maybe), or may not occur (QC=No).
|
||||||
|
type QCResult int
|
||||||
|
|
||||||
|
const (
|
||||||
|
QCUnknown QCResult = iota
|
||||||
|
QCYes
|
||||||
|
QCNo
|
||||||
|
QCMaybe
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r QCResult) String() string {
|
||||||
|
switch r {
|
||||||
|
case QCYes:
|
||||||
|
return "Yes"
|
||||||
|
case QCNo:
|
||||||
|
return "No"
|
||||||
|
case QCMaybe:
|
||||||
|
return "Maybe"
|
||||||
|
}
|
||||||
|
return "***UNKNOWN***"
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
FCanonical = iota // NFC or NFD
|
||||||
|
FCompatibility // NFKC or NFKD
|
||||||
|
FNumberOfFormTypes
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
MComposed = iota // NFC or NFKC
|
||||||
|
MDecomposed // NFD or NFKD
|
||||||
|
MNumberOfModes
|
||||||
|
)
|
||||||
|
|
||||||
|
// This contains only the properties we're interested in.
|
||||||
|
type Char struct {
|
||||||
|
name string
|
||||||
|
codePoint rune // if zero, this index is not a valid code point.
|
||||||
|
ccc uint8 // canonical combining class
|
||||||
|
origCCC uint8
|
||||||
|
excludeInComp bool // from CompositionExclusions.txt
|
||||||
|
compatDecomp bool // it has a compatibility expansion
|
||||||
|
|
||||||
|
nTrailingNonStarters uint8
|
||||||
|
nLeadingNonStarters uint8 // must be equal to trailing if non-zero
|
||||||
|
|
||||||
|
forms [FNumberOfFormTypes]FormInfo // For FCanonical and FCompatibility
|
||||||
|
|
||||||
|
state State
|
||||||
|
}
|
||||||
|
|
||||||
|
var chars = make([]Char, MaxChar+1)
|
||||||
|
var cccMap = make(map[uint8]uint8)
|
||||||
|
|
||||||
|
func (c Char) String() string {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
|
||||||
|
fmt.Fprintf(buf, "%U [%s]:\n", c.codePoint, c.name)
|
||||||
|
fmt.Fprintf(buf, " ccc: %v\n", c.ccc)
|
||||||
|
fmt.Fprintf(buf, " excludeInComp: %v\n", c.excludeInComp)
|
||||||
|
fmt.Fprintf(buf, " compatDecomp: %v\n", c.compatDecomp)
|
||||||
|
fmt.Fprintf(buf, " state: %v\n", c.state)
|
||||||
|
fmt.Fprintf(buf, " NFC:\n")
|
||||||
|
fmt.Fprint(buf, c.forms[FCanonical])
|
||||||
|
fmt.Fprintf(buf, " NFKC:\n")
|
||||||
|
fmt.Fprint(buf, c.forms[FCompatibility])
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// In UnicodeData.txt, some ranges are marked like this:
|
||||||
|
// 3400;<CJK Ideograph Extension A, First>;Lo;0;L;;;;;N;;;;;
|
||||||
|
// 4DB5;<CJK Ideograph Extension A, Last>;Lo;0;L;;;;;N;;;;;
|
||||||
|
// parseCharacter keeps a state variable indicating the weirdness.
|
||||||
|
type State int
|
||||||
|
|
||||||
|
const (
|
||||||
|
SNormal State = iota // known to be zero for the type
|
||||||
|
SFirst
|
||||||
|
SLast
|
||||||
|
SMissing
|
||||||
|
)
|
||||||
|
|
||||||
|
var lastChar = rune('\u0000')
|
||||||
|
|
||||||
|
func (c Char) isValid() bool {
|
||||||
|
return c.codePoint != 0 && c.state != SMissing
|
||||||
|
}
|
||||||
|
|
||||||
|
type FormInfo struct {
|
||||||
|
quickCheck [MNumberOfModes]QCResult // index: MComposed or MDecomposed
|
||||||
|
verified [MNumberOfModes]bool // index: MComposed or MDecomposed
|
||||||
|
|
||||||
|
combinesForward bool // May combine with rune on the right
|
||||||
|
combinesBackward bool // May combine with rune on the left
|
||||||
|
isOneWay bool // Never appears in result
|
||||||
|
inDecomp bool // Some decompositions result in this char.
|
||||||
|
decomp Decomposition
|
||||||
|
expandedDecomp Decomposition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f FormInfo) String() string {
|
||||||
|
buf := bytes.NewBuffer(make([]byte, 0))
|
||||||
|
|
||||||
|
fmt.Fprintf(buf, " quickCheck[C]: %v\n", f.quickCheck[MComposed])
|
||||||
|
fmt.Fprintf(buf, " quickCheck[D]: %v\n", f.quickCheck[MDecomposed])
|
||||||
|
fmt.Fprintf(buf, " cmbForward: %v\n", f.combinesForward)
|
||||||
|
fmt.Fprintf(buf, " cmbBackward: %v\n", f.combinesBackward)
|
||||||
|
fmt.Fprintf(buf, " isOneWay: %v\n", f.isOneWay)
|
||||||
|
fmt.Fprintf(buf, " inDecomp: %v\n", f.inDecomp)
|
||||||
|
fmt.Fprintf(buf, " decomposition: %X\n", f.decomp)
|
||||||
|
fmt.Fprintf(buf, " expandedDecomp: %X\n", f.expandedDecomp)
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
type Decomposition []rune
|
||||||
|
|
||||||
|
func parseDecomposition(s string, skipfirst bool) (a []rune, err error) {
|
||||||
|
decomp := strings.Split(s, " ")
|
||||||
|
if len(decomp) > 0 && skipfirst {
|
||||||
|
decomp = decomp[1:]
|
||||||
|
}
|
||||||
|
for _, d := range decomp {
|
||||||
|
point, err := strconv.ParseUint(d, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
return a, err
|
||||||
|
}
|
||||||
|
a = append(a, rune(point))
|
||||||
|
}
|
||||||
|
return a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadUnicodeData() {
|
||||||
|
f := gen.OpenUCDFile("UnicodeData.txt")
|
||||||
|
defer f.Close()
|
||||||
|
p := ucd.New(f)
|
||||||
|
for p.Next() {
|
||||||
|
r := p.Rune(ucd.CodePoint)
|
||||||
|
char := &chars[r]
|
||||||
|
|
||||||
|
char.ccc = uint8(p.Uint(ucd.CanonicalCombiningClass))
|
||||||
|
decmap := p.String(ucd.DecompMapping)
|
||||||
|
|
||||||
|
exp, err := parseDecomposition(decmap, false)
|
||||||
|
isCompat := false
|
||||||
|
if err != nil {
|
||||||
|
if len(decmap) > 0 {
|
||||||
|
exp, err = parseDecomposition(decmap, true)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf(`%U: bad decomp |%v|: "%s"`, r, decmap, err)
|
||||||
|
}
|
||||||
|
isCompat = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
char.name = p.String(ucd.Name)
|
||||||
|
char.codePoint = r
|
||||||
|
char.forms[FCompatibility].decomp = exp
|
||||||
|
if !isCompat {
|
||||||
|
char.forms[FCanonical].decomp = exp
|
||||||
|
} else {
|
||||||
|
char.compatDecomp = true
|
||||||
|
}
|
||||||
|
if len(decmap) > 0 {
|
||||||
|
char.forms[FCompatibility].decomp = exp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := p.Err(); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// compactCCC converts the sparse set of CCC values to a continguous one,
|
||||||
|
// reducing the number of bits needed from 8 to 6.
|
||||||
|
func compactCCC() {
|
||||||
|
m := make(map[uint8]uint8)
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
m[c.ccc] = 0
|
||||||
|
}
|
||||||
|
cccs := []int{}
|
||||||
|
for v, _ := range m {
|
||||||
|
cccs = append(cccs, int(v))
|
||||||
|
}
|
||||||
|
sort.Ints(cccs)
|
||||||
|
for i, c := range cccs {
|
||||||
|
cccMap[uint8(i)] = uint8(c)
|
||||||
|
m[uint8(c)] = uint8(i)
|
||||||
|
}
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
c.origCCC = c.ccc
|
||||||
|
c.ccc = m[c.ccc]
|
||||||
|
}
|
||||||
|
if len(m) >= 1<<6 {
|
||||||
|
log.Fatalf("too many difference CCC values: %d >= 64", len(m))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompositionExclusions.txt has form:
|
||||||
|
// 0958 # ...
|
||||||
|
// See https://unicode.org/reports/tr44/ for full explanation
|
||||||
|
func loadCompositionExclusions() {
|
||||||
|
f := gen.OpenUCDFile("CompositionExclusions.txt")
|
||||||
|
defer f.Close()
|
||||||
|
p := ucd.New(f)
|
||||||
|
for p.Next() {
|
||||||
|
c := &chars[p.Rune(0)]
|
||||||
|
if c.excludeInComp {
|
||||||
|
log.Fatalf("%U: Duplicate entry in exclusions.", c.codePoint)
|
||||||
|
}
|
||||||
|
c.excludeInComp = true
|
||||||
|
}
|
||||||
|
if e := p.Err(); e != nil {
|
||||||
|
log.Fatal(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasCompatDecomp returns true if any of the recursive
|
||||||
|
// decompositions contains a compatibility expansion.
|
||||||
|
// In this case, the character may not occur in NFK*.
|
||||||
|
func hasCompatDecomp(r rune) bool {
|
||||||
|
c := &chars[r]
|
||||||
|
if c.compatDecomp {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for _, d := range c.forms[FCompatibility].decomp {
|
||||||
|
if hasCompatDecomp(d) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hangul related constants.
|
||||||
|
const (
|
||||||
|
HangulBase = 0xAC00
|
||||||
|
HangulEnd = 0xD7A4 // hangulBase + Jamo combinations (19 * 21 * 28)
|
||||||
|
|
||||||
|
JamoLBase = 0x1100
|
||||||
|
JamoLEnd = 0x1113
|
||||||
|
JamoVBase = 0x1161
|
||||||
|
JamoVEnd = 0x1176
|
||||||
|
JamoTBase = 0x11A8
|
||||||
|
JamoTEnd = 0x11C3
|
||||||
|
|
||||||
|
JamoLVTCount = 19 * 21 * 28
|
||||||
|
JamoTCount = 28
|
||||||
|
)
|
||||||
|
|
||||||
|
func isHangul(r rune) bool {
|
||||||
|
return HangulBase <= r && r < HangulEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
func isHangulWithoutJamoT(r rune) bool {
|
||||||
|
if !isHangul(r) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
r -= HangulBase
|
||||||
|
return r < JamoLVTCount && r%JamoTCount == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func ccc(r rune) uint8 {
|
||||||
|
return chars[r].ccc
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert a rune in a buffer, ordered by Canonical Combining Class.
|
||||||
|
func insertOrdered(b Decomposition, r rune) Decomposition {
|
||||||
|
n := len(b)
|
||||||
|
b = append(b, 0)
|
||||||
|
cc := ccc(r)
|
||||||
|
if cc > 0 {
|
||||||
|
// Use bubble sort.
|
||||||
|
for ; n > 0; n-- {
|
||||||
|
if ccc(b[n-1]) <= cc {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
b[n] = b[n-1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b[n] = r
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively decompose.
|
||||||
|
func decomposeRecursive(form int, r rune, d Decomposition) Decomposition {
|
||||||
|
dcomp := chars[r].forms[form].decomp
|
||||||
|
if len(dcomp) == 0 {
|
||||||
|
return insertOrdered(d, r)
|
||||||
|
}
|
||||||
|
for _, c := range dcomp {
|
||||||
|
d = decomposeRecursive(form, c, d)
|
||||||
|
}
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func completeCharFields(form int) {
|
||||||
|
// Phase 0: pre-expand decomposition.
|
||||||
|
for i := range chars {
|
||||||
|
f := &chars[i].forms[form]
|
||||||
|
if len(f.decomp) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
exp := make(Decomposition, 0)
|
||||||
|
for _, c := range f.decomp {
|
||||||
|
exp = decomposeRecursive(form, c, exp)
|
||||||
|
}
|
||||||
|
f.expandedDecomp = exp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 1: composition exclusion, mark decomposition.
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
f := &c.forms[form]
|
||||||
|
|
||||||
|
// Marks script-specific exclusions and version restricted.
|
||||||
|
f.isOneWay = c.excludeInComp
|
||||||
|
|
||||||
|
// Singletons
|
||||||
|
f.isOneWay = f.isOneWay || len(f.decomp) == 1
|
||||||
|
|
||||||
|
// Non-starter decompositions
|
||||||
|
if len(f.decomp) > 1 {
|
||||||
|
chk := c.ccc != 0 || chars[f.decomp[0]].ccc != 0
|
||||||
|
f.isOneWay = f.isOneWay || chk
|
||||||
|
}
|
||||||
|
|
||||||
|
// Runes that decompose into more than two runes.
|
||||||
|
f.isOneWay = f.isOneWay || len(f.decomp) > 2
|
||||||
|
|
||||||
|
if form == FCompatibility {
|
||||||
|
f.isOneWay = f.isOneWay || hasCompatDecomp(c.codePoint)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range f.decomp {
|
||||||
|
chars[r].forms[form].inDecomp = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 2: forward and backward combining.
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
f := &c.forms[form]
|
||||||
|
|
||||||
|
if !f.isOneWay && len(f.decomp) == 2 {
|
||||||
|
f0 := &chars[f.decomp[0]].forms[form]
|
||||||
|
f1 := &chars[f.decomp[1]].forms[form]
|
||||||
|
if !f0.isOneWay {
|
||||||
|
f0.combinesForward = true
|
||||||
|
}
|
||||||
|
if !f1.isOneWay {
|
||||||
|
f1.combinesBackward = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isHangulWithoutJamoT(rune(i)) {
|
||||||
|
f.combinesForward = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 3: quick check values.
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
f := &c.forms[form]
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case len(f.decomp) > 0:
|
||||||
|
f.quickCheck[MDecomposed] = QCNo
|
||||||
|
case isHangul(rune(i)):
|
||||||
|
f.quickCheck[MDecomposed] = QCNo
|
||||||
|
default:
|
||||||
|
f.quickCheck[MDecomposed] = QCYes
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case f.isOneWay:
|
||||||
|
f.quickCheck[MComposed] = QCNo
|
||||||
|
case (i & 0xffff00) == JamoLBase:
|
||||||
|
f.quickCheck[MComposed] = QCYes
|
||||||
|
if JamoLBase <= i && i < JamoLEnd {
|
||||||
|
f.combinesForward = true
|
||||||
|
}
|
||||||
|
if JamoVBase <= i && i < JamoVEnd {
|
||||||
|
f.quickCheck[MComposed] = QCMaybe
|
||||||
|
f.combinesBackward = true
|
||||||
|
f.combinesForward = true
|
||||||
|
}
|
||||||
|
if JamoTBase <= i && i < JamoTEnd {
|
||||||
|
f.quickCheck[MComposed] = QCMaybe
|
||||||
|
f.combinesBackward = true
|
||||||
|
}
|
||||||
|
case !f.combinesBackward:
|
||||||
|
f.quickCheck[MComposed] = QCYes
|
||||||
|
default:
|
||||||
|
f.quickCheck[MComposed] = QCMaybe
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func computeNonStarterCounts() {
|
||||||
|
// Phase 4: leading and trailing non-starter count
|
||||||
|
for i := range chars {
|
||||||
|
c := &chars[i]
|
||||||
|
|
||||||
|
runes := []rune{rune(i)}
|
||||||
|
// We always use FCompatibility so that the CGJ insertion points do not
|
||||||
|
// change for repeated normalizations with different forms.
|
||||||
|
if exp := c.forms[FCompatibility].expandedDecomp; len(exp) > 0 {
|
||||||
|
runes = exp
|
||||||
|
}
|
||||||
|
// We consider runes that combine backwards to be non-starters for the
|
||||||
|
// purpose of Stream-Safe Text Processing.
|
||||||
|
for _, r := range runes {
|
||||||
|
if cr := &chars[r]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
c.nLeadingNonStarters++
|
||||||
|
}
|
||||||
|
for i := len(runes) - 1; i >= 0; i-- {
|
||||||
|
if cr := &chars[runes[i]]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
c.nTrailingNonStarters++
|
||||||
|
}
|
||||||
|
if c.nTrailingNonStarters > 3 {
|
||||||
|
log.Fatalf("%U: Decomposition with more than 3 (%d) trailing modifiers (%U)", i, c.nTrailingNonStarters, runes)
|
||||||
|
}
|
||||||
|
|
||||||
|
if isHangul(rune(i)) {
|
||||||
|
c.nTrailingNonStarters = 2
|
||||||
|
if isHangulWithoutJamoT(rune(i)) {
|
||||||
|
c.nTrailingNonStarters = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if l, t := c.nLeadingNonStarters, c.nTrailingNonStarters; l > 0 && l != t {
|
||||||
|
log.Fatalf("%U: number of leading and trailing non-starters should be equal (%d vs %d)", i, l, t)
|
||||||
|
}
|
||||||
|
if t := c.nTrailingNonStarters; t > 3 {
|
||||||
|
log.Fatalf("%U: number of trailing non-starters is %d > 3", t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printBytes(w io.Writer, b []byte, name string) {
|
||||||
|
fmt.Fprintf(w, "// %s: %d bytes\n", name, len(b))
|
||||||
|
fmt.Fprintf(w, "var %s = [...]byte {", name)
|
||||||
|
for i, c := range b {
|
||||||
|
switch {
|
||||||
|
case i%64 == 0:
|
||||||
|
fmt.Fprintf(w, "\n// Bytes %x - %x\n", i, i+63)
|
||||||
|
case i%8 == 0:
|
||||||
|
fmt.Fprintf(w, "\n")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "0x%.2X, ", c)
|
||||||
|
}
|
||||||
|
fmt.Fprint(w, "\n}\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// See forminfo.go for format.
|
||||||
|
func makeEntry(f *FormInfo, c *Char) uint16 {
|
||||||
|
e := uint16(0)
|
||||||
|
if r := c.codePoint; HangulBase <= r && r < HangulEnd {
|
||||||
|
e |= 0x40
|
||||||
|
}
|
||||||
|
if f.combinesForward {
|
||||||
|
e |= 0x20
|
||||||
|
}
|
||||||
|
if f.quickCheck[MDecomposed] == QCNo {
|
||||||
|
e |= 0x4
|
||||||
|
}
|
||||||
|
switch f.quickCheck[MComposed] {
|
||||||
|
case QCYes:
|
||||||
|
case QCNo:
|
||||||
|
e |= 0x10
|
||||||
|
case QCMaybe:
|
||||||
|
e |= 0x18
|
||||||
|
default:
|
||||||
|
log.Fatalf("Illegal quickcheck value %v.", f.quickCheck[MComposed])
|
||||||
|
}
|
||||||
|
e |= uint16(c.nTrailingNonStarters)
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// decompSet keeps track of unique decompositions, grouped by whether
|
||||||
|
// the decomposition is followed by a trailing and/or leading CCC.
|
||||||
|
type decompSet [7]map[string]bool
|
||||||
|
|
||||||
|
const (
|
||||||
|
normalDecomp = iota
|
||||||
|
firstMulti
|
||||||
|
firstCCC
|
||||||
|
endMulti
|
||||||
|
firstLeadingCCC
|
||||||
|
firstCCCZeroExcept
|
||||||
|
firstStarterWithNLead
|
||||||
|
lastDecomp
|
||||||
|
)
|
||||||
|
|
||||||
|
var cname = []string{"firstMulti", "firstCCC", "endMulti", "firstLeadingCCC", "firstCCCZeroExcept", "firstStarterWithNLead", "lastDecomp"}
|
||||||
|
|
||||||
|
func makeDecompSet() decompSet {
|
||||||
|
m := decompSet{}
|
||||||
|
for i := range m {
|
||||||
|
m[i] = make(map[string]bool)
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
func (m *decompSet) insert(key int, s string) {
|
||||||
|
m[key][s] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func printCharInfoTables(w io.Writer) int {
|
||||||
|
mkstr := func(r rune, f *FormInfo) (int, string) {
|
||||||
|
d := f.expandedDecomp
|
||||||
|
s := string([]rune(d))
|
||||||
|
if max := 1 << 6; len(s) >= max {
|
||||||
|
const msg = "%U: too many bytes in decomposition: %d >= %d"
|
||||||
|
log.Fatalf(msg, r, len(s), max)
|
||||||
|
}
|
||||||
|
head := uint8(len(s))
|
||||||
|
if f.quickCheck[MComposed] != QCYes {
|
||||||
|
head |= 0x40
|
||||||
|
}
|
||||||
|
if f.combinesForward {
|
||||||
|
head |= 0x80
|
||||||
|
}
|
||||||
|
s = string([]byte{head}) + s
|
||||||
|
|
||||||
|
lccc := ccc(d[0])
|
||||||
|
tccc := ccc(d[len(d)-1])
|
||||||
|
cc := ccc(r)
|
||||||
|
if cc != 0 && lccc == 0 && tccc == 0 {
|
||||||
|
log.Fatalf("%U: trailing and leading ccc are 0 for non-zero ccc %d", r, cc)
|
||||||
|
}
|
||||||
|
if tccc < lccc && lccc != 0 {
|
||||||
|
const msg = "%U: lccc (%d) must be <= tcc (%d)"
|
||||||
|
log.Fatalf(msg, r, lccc, tccc)
|
||||||
|
}
|
||||||
|
index := normalDecomp
|
||||||
|
nTrail := chars[r].nTrailingNonStarters
|
||||||
|
nLead := chars[r].nLeadingNonStarters
|
||||||
|
if tccc > 0 || lccc > 0 || nTrail > 0 {
|
||||||
|
tccc <<= 2
|
||||||
|
tccc |= nTrail
|
||||||
|
s += string([]byte{tccc})
|
||||||
|
index = endMulti
|
||||||
|
for _, r := range d[1:] {
|
||||||
|
if ccc(r) == 0 {
|
||||||
|
index = firstCCC
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lccc > 0 || nLead > 0 {
|
||||||
|
s += string([]byte{lccc})
|
||||||
|
if index == firstCCC {
|
||||||
|
log.Fatalf("%U: multi-segment decomposition not supported for decompositions with leading CCC != 0", r)
|
||||||
|
}
|
||||||
|
index = firstLeadingCCC
|
||||||
|
}
|
||||||
|
if cc != lccc {
|
||||||
|
if cc != 0 {
|
||||||
|
log.Fatalf("%U: for lccc != ccc, expected ccc to be 0; was %d", r, cc)
|
||||||
|
}
|
||||||
|
index = firstCCCZeroExcept
|
||||||
|
}
|
||||||
|
} else if len(d) > 1 {
|
||||||
|
index = firstMulti
|
||||||
|
}
|
||||||
|
return index, s
|
||||||
|
}
|
||||||
|
|
||||||
|
decompSet := makeDecompSet()
|
||||||
|
const nLeadStr = "\x00\x01" // 0-byte length and tccc with nTrail.
|
||||||
|
decompSet.insert(firstStarterWithNLead, nLeadStr)
|
||||||
|
|
||||||
|
// Store the uniqued decompositions in a byte buffer,
|
||||||
|
// preceded by their byte length.
|
||||||
|
for _, c := range chars {
|
||||||
|
for _, f := range c.forms {
|
||||||
|
if len(f.expandedDecomp) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if f.combinesBackward {
|
||||||
|
log.Fatalf("%U: combinesBackward and decompose", c.codePoint)
|
||||||
|
}
|
||||||
|
index, s := mkstr(c.codePoint, &f)
|
||||||
|
decompSet.insert(index, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
decompositions := bytes.NewBuffer(make([]byte, 0, 10000))
|
||||||
|
size := 0
|
||||||
|
positionMap := make(map[string]uint16)
|
||||||
|
decompositions.WriteString("\000")
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
for i, m := range decompSet {
|
||||||
|
sa := []string{}
|
||||||
|
for s := range m {
|
||||||
|
sa = append(sa, s)
|
||||||
|
}
|
||||||
|
sort.Strings(sa)
|
||||||
|
for _, s := range sa {
|
||||||
|
p := decompositions.Len()
|
||||||
|
decompositions.WriteString(s)
|
||||||
|
positionMap[s] = uint16(p)
|
||||||
|
}
|
||||||
|
if cname[i] != "" {
|
||||||
|
fmt.Fprintf(w, "%s = 0x%X\n", cname[i], decompositions.Len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "maxDecomp = 0x8000")
|
||||||
|
fmt.Fprintln(w, ")")
|
||||||
|
b := decompositions.Bytes()
|
||||||
|
printBytes(w, b, "decomps")
|
||||||
|
size += len(b)
|
||||||
|
|
||||||
|
varnames := []string{"nfc", "nfkc"}
|
||||||
|
for i := 0; i < FNumberOfFormTypes; i++ {
|
||||||
|
trie := triegen.NewTrie(varnames[i])
|
||||||
|
|
||||||
|
for r, c := range chars {
|
||||||
|
f := c.forms[i]
|
||||||
|
d := f.expandedDecomp
|
||||||
|
if len(d) != 0 {
|
||||||
|
_, key := mkstr(c.codePoint, &f)
|
||||||
|
trie.Insert(rune(r), uint64(positionMap[key]))
|
||||||
|
if c.ccc != ccc(d[0]) {
|
||||||
|
// We assume the lead ccc of a decomposition !=0 in this case.
|
||||||
|
if ccc(d[0]) == 0 {
|
||||||
|
log.Fatalf("Expected leading CCC to be non-zero; ccc is %d", c.ccc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if c.nLeadingNonStarters > 0 && len(f.expandedDecomp) == 0 && c.ccc == 0 && !f.combinesBackward {
|
||||||
|
// Handle cases where it can't be detected that the nLead should be equal
|
||||||
|
// to nTrail.
|
||||||
|
trie.Insert(c.codePoint, uint64(positionMap[nLeadStr]))
|
||||||
|
} else if v := makeEntry(&f, &c)<<8 | uint16(c.ccc); v != 0 {
|
||||||
|
trie.Insert(c.codePoint, uint64(0x8000|v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sz, err := trie.Gen(w, triegen.Compact(&normCompacter{name: varnames[i]}))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
size += sz
|
||||||
|
}
|
||||||
|
return size
|
||||||
|
}
|
||||||
|
|
||||||
|
func contains(sa []string, s string) bool {
|
||||||
|
for _, a := range sa {
|
||||||
|
if a == s {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeTables() {
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
|
||||||
|
size := 0
|
||||||
|
if *tablelist == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
list := strings.Split(*tablelist, ",")
|
||||||
|
if *tablelist == "all" {
|
||||||
|
list = []string{"recomp", "info"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute maximum decomposition size.
|
||||||
|
max := 0
|
||||||
|
for _, c := range chars {
|
||||||
|
if n := len(string(c.forms[FCompatibility].expandedDecomp)); n > max {
|
||||||
|
max = n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, `import "sync"`)
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "const (")
|
||||||
|
fmt.Fprintln(w, "\t// Version is the Unicode edition from which the tables are derived.")
|
||||||
|
fmt.Fprintf(w, "\tVersion = %q\n", gen.UnicodeVersion())
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
fmt.Fprintln(w, "\t// MaxTransformChunkSize indicates the maximum number of bytes that Transform")
|
||||||
|
fmt.Fprintln(w, "\t// may need to write atomically for any Form. Making a destination buffer at")
|
||||||
|
fmt.Fprintln(w, "\t// least this size ensures that Transform can always make progress and that")
|
||||||
|
fmt.Fprintln(w, "\t// the user does not need to grow the buffer on an ErrShortDst.")
|
||||||
|
fmt.Fprintf(w, "\tMaxTransformChunkSize = %d+maxNonStarters*4\n", len(string(0x034F))+max)
|
||||||
|
fmt.Fprintln(w, ")\n")
|
||||||
|
|
||||||
|
// Print the CCC remap table.
|
||||||
|
size += len(cccMap)
|
||||||
|
fmt.Fprintf(w, "var ccc = [%d]uint8{", len(cccMap))
|
||||||
|
for i := 0; i < len(cccMap); i++ {
|
||||||
|
if i%8 == 0 {
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "%3d, ", cccMap[uint8(i)])
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "\n}\n")
|
||||||
|
|
||||||
|
if contains(list, "info") {
|
||||||
|
size += printCharInfoTables(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
if contains(list, "recomp") {
|
||||||
|
// Note that we use 32 bit keys, instead of 64 bit.
|
||||||
|
// This clips the bits of three entries, but we know
|
||||||
|
// this won't cause a collision. The compiler will catch
|
||||||
|
// any changes made to UnicodeData.txt that introduces
|
||||||
|
// a collision.
|
||||||
|
// Note that the recomposition map for NFC and NFKC
|
||||||
|
// are identical.
|
||||||
|
|
||||||
|
// Recomposition map
|
||||||
|
nrentries := 0
|
||||||
|
for _, c := range chars {
|
||||||
|
f := c.forms[FCanonical]
|
||||||
|
if !f.isOneWay && len(f.decomp) > 0 {
|
||||||
|
nrentries++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sz := nrentries * 8
|
||||||
|
size += sz
|
||||||
|
fmt.Fprintf(w, "// recompMap: %d bytes (entries only)\n", sz)
|
||||||
|
fmt.Fprintln(w, "var recompMap map[uint32]rune")
|
||||||
|
fmt.Fprintln(w, "var recompMapOnce sync.Once\n")
|
||||||
|
fmt.Fprintln(w, `const recompMapPacked = "" +`)
|
||||||
|
var buf [8]byte
|
||||||
|
for i, c := range chars {
|
||||||
|
f := c.forms[FCanonical]
|
||||||
|
d := f.decomp
|
||||||
|
if !f.isOneWay && len(d) > 0 {
|
||||||
|
key := uint32(uint16(d[0]))<<16 + uint32(uint16(d[1]))
|
||||||
|
binary.BigEndian.PutUint32(buf[:4], key)
|
||||||
|
binary.BigEndian.PutUint32(buf[4:], uint32(i))
|
||||||
|
fmt.Fprintf(w, "\t\t%q + // 0x%.8X: 0x%.8X\n", string(buf[:]), key, uint32(i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// hack so we don't have to special case the trailing plus sign
|
||||||
|
fmt.Fprintf(w, ` ""`)
|
||||||
|
fmt.Fprintln(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, "// Total size of tables: %dKB (%d bytes)\n", (size+512)/1024, size)
|
||||||
|
gen.WriteVersionedGoFile("tables.go", "norm", w.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
func printChars() {
|
||||||
|
if *verbose {
|
||||||
|
for _, c := range chars {
|
||||||
|
if !c.isValid() || c.state == SMissing {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Println(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// verifyComputed does various consistency tests.
|
||||||
|
func verifyComputed() {
|
||||||
|
for i, c := range chars {
|
||||||
|
for _, f := range c.forms {
|
||||||
|
isNo := (f.quickCheck[MDecomposed] == QCNo)
|
||||||
|
if (len(f.decomp) > 0) != isNo && !isHangul(rune(i)) {
|
||||||
|
log.Fatalf("%U: NF*D QC must be No if rune decomposes", i)
|
||||||
|
}
|
||||||
|
|
||||||
|
isMaybe := f.quickCheck[MComposed] == QCMaybe
|
||||||
|
if f.combinesBackward != isMaybe {
|
||||||
|
log.Fatalf("%U: NF*C QC must be Maybe if combinesBackward", i)
|
||||||
|
}
|
||||||
|
if len(f.decomp) > 0 && f.combinesForward && isMaybe {
|
||||||
|
log.Fatalf("%U: NF*C QC must be Yes or No if combinesForward and decomposes", i)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.expandedDecomp) != 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if a, b := c.nLeadingNonStarters > 0, (c.ccc > 0 || f.combinesBackward); a != b {
|
||||||
|
// We accept these runes to be treated differently (it only affects
|
||||||
|
// segment breaking in iteration, most likely on improper use), but
|
||||||
|
// reconsider if more characters are added.
|
||||||
|
// U+FF9E HALFWIDTH KATAKANA VOICED SOUND MARK;Lm;0;L;<narrow> 3099;;;;N;;;;;
|
||||||
|
// U+FF9F HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK;Lm;0;L;<narrow> 309A;;;;N;;;;;
|
||||||
|
// U+3133 HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<compat> 11AA;;;;N;HANGUL LETTER GIYEOG SIOS;;;;
|
||||||
|
// U+318E HANGUL LETTER ARAEAE;Lo;0;L;<compat> 11A1;;;;N;HANGUL LETTER ALAE AE;;;;
|
||||||
|
// U+FFA3 HALFWIDTH HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<narrow> 3133;;;;N;HALFWIDTH HANGUL LETTER GIYEOG SIOS;;;;
|
||||||
|
// U+FFDC HALFWIDTH HANGUL LETTER I;Lo;0;L;<narrow> 3163;;;;N;;;;;
|
||||||
|
if i != 0xFF9E && i != 0xFF9F && !(0x3133 <= i && i <= 0x318E) && !(0xFFA3 <= i && i <= 0xFFDC) {
|
||||||
|
log.Fatalf("%U: nLead was %v; want %v", i, a, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
nfc := c.forms[FCanonical]
|
||||||
|
nfkc := c.forms[FCompatibility]
|
||||||
|
if nfc.combinesBackward != nfkc.combinesBackward {
|
||||||
|
log.Fatalf("%U: Cannot combine combinesBackward\n", c.codePoint)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use values in DerivedNormalizationProps.txt to compare against the
|
||||||
|
// values we computed.
|
||||||
|
// DerivedNormalizationProps.txt has form:
|
||||||
|
// 00C0..00C5 ; NFD_QC; N # ...
|
||||||
|
// 0374 ; NFD_QC; N # ...
|
||||||
|
// See https://unicode.org/reports/tr44/ for full explanation
|
||||||
|
func testDerived() {
|
||||||
|
f := gen.OpenUCDFile("DerivedNormalizationProps.txt")
|
||||||
|
defer f.Close()
|
||||||
|
p := ucd.New(f)
|
||||||
|
for p.Next() {
|
||||||
|
r := p.Rune(0)
|
||||||
|
c := &chars[r]
|
||||||
|
|
||||||
|
var ftype, mode int
|
||||||
|
qt := p.String(1)
|
||||||
|
switch qt {
|
||||||
|
case "NFC_QC":
|
||||||
|
ftype, mode = FCanonical, MComposed
|
||||||
|
case "NFD_QC":
|
||||||
|
ftype, mode = FCanonical, MDecomposed
|
||||||
|
case "NFKC_QC":
|
||||||
|
ftype, mode = FCompatibility, MComposed
|
||||||
|
case "NFKD_QC":
|
||||||
|
ftype, mode = FCompatibility, MDecomposed
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var qr QCResult
|
||||||
|
switch p.String(2) {
|
||||||
|
case "Y":
|
||||||
|
qr = QCYes
|
||||||
|
case "N":
|
||||||
|
qr = QCNo
|
||||||
|
case "M":
|
||||||
|
qr = QCMaybe
|
||||||
|
default:
|
||||||
|
log.Fatalf(`Unexpected quick check value "%s"`, p.String(2))
|
||||||
|
}
|
||||||
|
if got := c.forms[ftype].quickCheck[mode]; got != qr {
|
||||||
|
log.Printf("%U: FAILED %s (was %v need %v)\n", r, qt, got, qr)
|
||||||
|
}
|
||||||
|
c.forms[ftype].verified[mode] = true
|
||||||
|
}
|
||||||
|
if err := p.Err(); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
// Any unspecified value must be QCYes. Verify this.
|
||||||
|
for i, c := range chars {
|
||||||
|
for j, fd := range c.forms {
|
||||||
|
for k, qr := range fd.quickCheck {
|
||||||
|
if !fd.verified[k] && qr != QCYes {
|
||||||
|
m := "%U: FAIL F:%d M:%d (was %v need Yes) %s\n"
|
||||||
|
log.Printf(m, i, j, k, qr, c.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var testHeader = `const (
|
||||||
|
Yes = iota
|
||||||
|
No
|
||||||
|
Maybe
|
||||||
|
)
|
||||||
|
|
||||||
|
type formData struct {
|
||||||
|
qc uint8
|
||||||
|
combinesForward bool
|
||||||
|
decomposition string
|
||||||
|
}
|
||||||
|
|
||||||
|
type runeData struct {
|
||||||
|
r rune
|
||||||
|
ccc uint8
|
||||||
|
nLead uint8
|
||||||
|
nTrail uint8
|
||||||
|
f [2]formData // 0: canonical; 1: compatibility
|
||||||
|
}
|
||||||
|
|
||||||
|
func f(qc uint8, cf bool, dec string) [2]formData {
|
||||||
|
return [2]formData{{qc, cf, dec}, {qc, cf, dec}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func g(qc, qck uint8, cf, cfk bool, d, dk string) [2]formData {
|
||||||
|
return [2]formData{{qc, cf, d}, {qck, cfk, dk}}
|
||||||
|
}
|
||||||
|
|
||||||
|
var testData = []runeData{
|
||||||
|
`
|
||||||
|
|
||||||
|
func printTestdata() {
|
||||||
|
type lastInfo struct {
|
||||||
|
ccc uint8
|
||||||
|
nLead uint8
|
||||||
|
nTrail uint8
|
||||||
|
f string
|
||||||
|
}
|
||||||
|
|
||||||
|
last := lastInfo{}
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
fmt.Fprintf(w, testHeader)
|
||||||
|
for r, c := range chars {
|
||||||
|
f := c.forms[FCanonical]
|
||||||
|
qc, cf, d := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
|
||||||
|
f = c.forms[FCompatibility]
|
||||||
|
qck, cfk, dk := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
|
||||||
|
s := ""
|
||||||
|
if d == dk && qc == qck && cf == cfk {
|
||||||
|
s = fmt.Sprintf("f(%s, %v, %q)", qc, cf, d)
|
||||||
|
} else {
|
||||||
|
s = fmt.Sprintf("g(%s, %s, %v, %v, %q, %q)", qc, qck, cf, cfk, d, dk)
|
||||||
|
}
|
||||||
|
current := lastInfo{c.ccc, c.nLeadingNonStarters, c.nTrailingNonStarters, s}
|
||||||
|
if last != current {
|
||||||
|
fmt.Fprintf(w, "\t{0x%x, %d, %d, %d, %s},\n", r, c.origCCC, c.nLeadingNonStarters, c.nTrailingNonStarters, s)
|
||||||
|
last = current
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, "}")
|
||||||
|
gen.WriteVersionedGoFile("data_test.go", "norm", w.Bytes())
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// Trie table generator.
|
||||||
|
// Used by make*tables tools to generate a go file with trie data structures
|
||||||
|
// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte
|
||||||
|
// sequence are used to lookup offsets in the index table to be used for the
|
||||||
|
// next byte. The last byte is used to index into a table with 16-bit values.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
const maxSparseEntries = 16
|
||||||
|
|
||||||
|
type normCompacter struct {
|
||||||
|
sparseBlocks [][]uint64
|
||||||
|
sparseOffset []uint16
|
||||||
|
sparseCount int
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func mostFrequentStride(a []uint64) int {
|
||||||
|
counts := make(map[int]int)
|
||||||
|
var v int
|
||||||
|
for _, x := range a {
|
||||||
|
if stride := int(x) - v; v != 0 && stride >= 0 {
|
||||||
|
counts[stride]++
|
||||||
|
}
|
||||||
|
v = int(x)
|
||||||
|
}
|
||||||
|
var maxs, maxc int
|
||||||
|
for stride, cnt := range counts {
|
||||||
|
if cnt > maxc || (cnt == maxc && stride < maxs) {
|
||||||
|
maxs, maxc = stride, cnt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return maxs
|
||||||
|
}
|
||||||
|
|
||||||
|
func countSparseEntries(a []uint64) int {
|
||||||
|
stride := mostFrequentStride(a)
|
||||||
|
var v, count int
|
||||||
|
for _, tv := range a {
|
||||||
|
if int(tv)-v != stride {
|
||||||
|
if tv != 0 {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = int(tv)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Size(v []uint64) (sz int, ok bool) {
|
||||||
|
if n := countSparseEntries(v); n <= maxSparseEntries {
|
||||||
|
return (n+1)*4 + 2, true
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Store(v []uint64) uint32 {
|
||||||
|
h := uint32(len(c.sparseOffset))
|
||||||
|
c.sparseBlocks = append(c.sparseBlocks, v)
|
||||||
|
c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount))
|
||||||
|
c.sparseCount += countSparseEntries(v) + 1
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Handler() string {
|
||||||
|
return c.name + "Sparse.lookup"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *normCompacter) Print(w io.Writer) (retErr error) {
|
||||||
|
p := func(f string, x ...interface{}) {
|
||||||
|
if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil {
|
||||||
|
retErr = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ls := len(c.sparseBlocks)
|
||||||
|
p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2)
|
||||||
|
p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset)
|
||||||
|
|
||||||
|
ns := c.sparseCount
|
||||||
|
p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4)
|
||||||
|
p("var %sSparseValues = [%d]valueRange {", c.name, ns)
|
||||||
|
for i, b := range c.sparseBlocks {
|
||||||
|
p("\n// Block %#x, offset %#x", i, c.sparseOffset[i])
|
||||||
|
var v int
|
||||||
|
stride := mostFrequentStride(b)
|
||||||
|
n := countSparseEntries(b)
|
||||||
|
p("\n{value:%#04x,lo:%#02x},", stride, uint8(n))
|
||||||
|
for i, nv := range b {
|
||||||
|
if int(nv)-v != stride {
|
||||||
|
if v != 0 {
|
||||||
|
p(",hi:%#02x},", 0x80+i-1)
|
||||||
|
}
|
||||||
|
if nv != 0 {
|
||||||
|
p("\n{value:%#04x,lo:%#02x", nv, 0x80+i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = int(nv)
|
||||||
|
}
|
||||||
|
if v != 0 {
|
||||||
|
p(",hi:%#02x},", 0x80+len(b)-1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p("\n}\n\n")
|
||||||
|
return
|
||||||
|
}
|
|
@ -1,45 +1,45 @@
|
||||||
# cloud.google.com/go v0.45.1
|
# cloud.google.com/go v0.45.1
|
||||||
cloud.google.com/go/compute/metadata
|
cloud.google.com/go/storage
|
||||||
cloud.google.com/go/iam
|
cloud.google.com/go/iam
|
||||||
cloud.google.com/go/internal
|
cloud.google.com/go/internal
|
||||||
cloud.google.com/go/internal/optional
|
cloud.google.com/go/internal/optional
|
||||||
cloud.google.com/go/internal/trace
|
cloud.google.com/go/internal/trace
|
||||||
cloud.google.com/go/internal/version
|
cloud.google.com/go/internal/version
|
||||||
cloud.google.com/go/storage
|
cloud.google.com/go/compute/metadata
|
||||||
# github.com/Azure/azure-sdk-for-go v21.3.0+incompatible
|
# github.com/Azure/azure-sdk-for-go v21.3.0+incompatible
|
||||||
github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/resources/mgmt/resources
|
github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/resources/mgmt/resources
|
||||||
github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/storage/mgmt/storage
|
github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/storage/mgmt/storage
|
||||||
|
github.com/Azure/azure-sdk-for-go/storage
|
||||||
github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources
|
github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources
|
||||||
github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage
|
github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage
|
||||||
github.com/Azure/azure-sdk-for-go/storage
|
|
||||||
github.com/Azure/azure-sdk-for-go/version
|
github.com/Azure/azure-sdk-for-go/version
|
||||||
# github.com/Azure/go-autorest v10.15.4+incompatible
|
# github.com/Azure/go-autorest v10.15.4+incompatible
|
||||||
github.com/Azure/go-autorest/autorest
|
github.com/Azure/go-autorest/autorest
|
||||||
github.com/Azure/go-autorest/autorest/adal
|
github.com/Azure/go-autorest/autorest/adal
|
||||||
github.com/Azure/go-autorest/autorest/azure
|
github.com/Azure/go-autorest/autorest/azure
|
||||||
github.com/Azure/go-autorest/autorest/azure/cli
|
|
||||||
github.com/Azure/go-autorest/autorest/date
|
|
||||||
github.com/Azure/go-autorest/autorest/to
|
|
||||||
github.com/Azure/go-autorest/autorest/validation
|
|
||||||
github.com/Azure/go-autorest/logger
|
github.com/Azure/go-autorest/logger
|
||||||
github.com/Azure/go-autorest/version
|
github.com/Azure/go-autorest/version
|
||||||
|
github.com/Azure/go-autorest/autorest/date
|
||||||
|
github.com/Azure/go-autorest/autorest/azure/cli
|
||||||
|
github.com/Azure/go-autorest/autorest/to
|
||||||
|
github.com/Azure/go-autorest/autorest/validation
|
||||||
# github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4
|
# github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4
|
||||||
github.com/Azure/go-ntlmssp
|
github.com/Azure/go-ntlmssp
|
||||||
# github.com/ChrisTrenkamp/goxpath v0.0.0-20170922090931-c385f95c6022
|
# github.com/ChrisTrenkamp/goxpath v0.0.0-20170922090931-c385f95c6022
|
||||||
github.com/ChrisTrenkamp/goxpath
|
github.com/ChrisTrenkamp/goxpath
|
||||||
|
github.com/ChrisTrenkamp/goxpath/tree
|
||||||
|
github.com/ChrisTrenkamp/goxpath/tree/xmltree
|
||||||
github.com/ChrisTrenkamp/goxpath/internal/execxp
|
github.com/ChrisTrenkamp/goxpath/internal/execxp
|
||||||
|
github.com/ChrisTrenkamp/goxpath/parser
|
||||||
|
github.com/ChrisTrenkamp/goxpath/tree/xmltree/xmlbuilder
|
||||||
|
github.com/ChrisTrenkamp/goxpath/tree/xmltree/xmlele
|
||||||
github.com/ChrisTrenkamp/goxpath/internal/execxp/findutil
|
github.com/ChrisTrenkamp/goxpath/internal/execxp/findutil
|
||||||
github.com/ChrisTrenkamp/goxpath/internal/execxp/intfns
|
github.com/ChrisTrenkamp/goxpath/internal/execxp/intfns
|
||||||
github.com/ChrisTrenkamp/goxpath/internal/xsort
|
github.com/ChrisTrenkamp/goxpath/internal/xsort
|
||||||
github.com/ChrisTrenkamp/goxpath/lexer
|
github.com/ChrisTrenkamp/goxpath/lexer
|
||||||
github.com/ChrisTrenkamp/goxpath/parser
|
|
||||||
github.com/ChrisTrenkamp/goxpath/parser/pathexpr
|
github.com/ChrisTrenkamp/goxpath/parser/pathexpr
|
||||||
github.com/ChrisTrenkamp/goxpath/tree
|
|
||||||
github.com/ChrisTrenkamp/goxpath/tree/xmltree
|
|
||||||
github.com/ChrisTrenkamp/goxpath/tree/xmltree/xmlbuilder
|
|
||||||
github.com/ChrisTrenkamp/goxpath/tree/xmltree/xmlele
|
|
||||||
github.com/ChrisTrenkamp/goxpath/tree/xmltree/xmlnode
|
|
||||||
github.com/ChrisTrenkamp/goxpath/xconst
|
github.com/ChrisTrenkamp/goxpath/xconst
|
||||||
|
github.com/ChrisTrenkamp/goxpath/tree/xmltree/xmlnode
|
||||||
# github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292
|
# github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292
|
||||||
github.com/Unknwon/com
|
github.com/Unknwon/com
|
||||||
# github.com/agext/levenshtein v1.2.2
|
# github.com/agext/levenshtein v1.2.2
|
||||||
|
@ -49,17 +49,17 @@ github.com/agl/ed25519
|
||||||
github.com/agl/ed25519/edwards25519
|
github.com/agl/ed25519/edwards25519
|
||||||
# github.com/aliyun/alibaba-cloud-sdk-go v0.0.0-20190329064014-6e358769c32a
|
# github.com/aliyun/alibaba-cloud-sdk-go v0.0.0-20190329064014-6e358769c32a
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth
|
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/provider
|
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers
|
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints
|
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors
|
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses
|
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils
|
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/services/location
|
github.com/aliyun/alibaba-cloud-sdk-go/services/location
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go/services/sts
|
github.com/aliyun/alibaba-cloud-sdk-go/services/sts
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/provider
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers
|
||||||
# github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190103054945-8205d1f41e70
|
# github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190103054945-8205d1f41e70
|
||||||
github.com/aliyun/aliyun-oss-go-sdk/oss
|
github.com/aliyun/aliyun-oss-go-sdk/oss
|
||||||
# github.com/aliyun/aliyun-tablestore-go-sdk v4.1.2+incompatible
|
# github.com/aliyun/aliyun-tablestore-go-sdk v4.1.2+incompatible
|
||||||
|
@ -82,47 +82,47 @@ github.com/armon/circbuf
|
||||||
github.com/armon/go-radix
|
github.com/armon/go-radix
|
||||||
# github.com/aws/aws-sdk-go v1.25.3
|
# github.com/aws/aws-sdk-go v1.25.3
|
||||||
github.com/aws/aws-sdk-go/aws
|
github.com/aws/aws-sdk-go/aws
|
||||||
github.com/aws/aws-sdk-go/aws/arn
|
|
||||||
github.com/aws/aws-sdk-go/aws/awserr
|
github.com/aws/aws-sdk-go/aws/awserr
|
||||||
|
github.com/aws/aws-sdk-go/service/dynamodb
|
||||||
|
github.com/aws/aws-sdk-go/service/s3
|
||||||
|
github.com/aws/aws-sdk-go/aws/credentials
|
||||||
|
github.com/aws/aws-sdk-go/aws/endpoints
|
||||||
|
github.com/aws/aws-sdk-go/internal/sdkio
|
||||||
github.com/aws/aws-sdk-go/aws/awsutil
|
github.com/aws/aws-sdk-go/aws/awsutil
|
||||||
github.com/aws/aws-sdk-go/aws/client
|
github.com/aws/aws-sdk-go/aws/client
|
||||||
github.com/aws/aws-sdk-go/aws/client/metadata
|
github.com/aws/aws-sdk-go/aws/client/metadata
|
||||||
github.com/aws/aws-sdk-go/aws/corehandlers
|
|
||||||
github.com/aws/aws-sdk-go/aws/credentials
|
|
||||||
github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds
|
|
||||||
github.com/aws/aws-sdk-go/aws/credentials/endpointcreds
|
|
||||||
github.com/aws/aws-sdk-go/aws/credentials/processcreds
|
|
||||||
github.com/aws/aws-sdk-go/aws/credentials/stscreds
|
|
||||||
github.com/aws/aws-sdk-go/aws/crr
|
github.com/aws/aws-sdk-go/aws/crr
|
||||||
github.com/aws/aws-sdk-go/aws/csm
|
|
||||||
github.com/aws/aws-sdk-go/aws/defaults
|
|
||||||
github.com/aws/aws-sdk-go/aws/ec2metadata
|
|
||||||
github.com/aws/aws-sdk-go/aws/endpoints
|
|
||||||
github.com/aws/aws-sdk-go/aws/request
|
github.com/aws/aws-sdk-go/aws/request
|
||||||
github.com/aws/aws-sdk-go/aws/session
|
|
||||||
github.com/aws/aws-sdk-go/aws/signer/v4
|
github.com/aws/aws-sdk-go/aws/signer/v4
|
||||||
github.com/aws/aws-sdk-go/internal/ini
|
|
||||||
github.com/aws/aws-sdk-go/internal/s3err
|
|
||||||
github.com/aws/aws-sdk-go/internal/sdkio
|
|
||||||
github.com/aws/aws-sdk-go/internal/sdkmath
|
|
||||||
github.com/aws/aws-sdk-go/internal/sdkrand
|
|
||||||
github.com/aws/aws-sdk-go/internal/sdkuri
|
|
||||||
github.com/aws/aws-sdk-go/internal/shareddefaults
|
|
||||||
github.com/aws/aws-sdk-go/private/protocol
|
github.com/aws/aws-sdk-go/private/protocol
|
||||||
|
github.com/aws/aws-sdk-go/private/protocol/jsonrpc
|
||||||
|
github.com/aws/aws-sdk-go/internal/s3err
|
||||||
github.com/aws/aws-sdk-go/private/protocol/eventstream
|
github.com/aws/aws-sdk-go/private/protocol/eventstream
|
||||||
github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi
|
github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi
|
||||||
github.com/aws/aws-sdk-go/private/protocol/json/jsonutil
|
|
||||||
github.com/aws/aws-sdk-go/private/protocol/jsonrpc
|
|
||||||
github.com/aws/aws-sdk-go/private/protocol/query
|
|
||||||
github.com/aws/aws-sdk-go/private/protocol/query/queryutil
|
|
||||||
github.com/aws/aws-sdk-go/private/protocol/rest
|
github.com/aws/aws-sdk-go/private/protocol/rest
|
||||||
github.com/aws/aws-sdk-go/private/protocol/restxml
|
github.com/aws/aws-sdk-go/private/protocol/restxml
|
||||||
github.com/aws/aws-sdk-go/private/protocol/xml/xmlutil
|
github.com/aws/aws-sdk-go/private/protocol/xml/xmlutil
|
||||||
github.com/aws/aws-sdk-go/service/dynamodb
|
github.com/aws/aws-sdk-go/aws/arn
|
||||||
|
github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds
|
||||||
|
github.com/aws/aws-sdk-go/aws/credentials/stscreds
|
||||||
|
github.com/aws/aws-sdk-go/aws/defaults
|
||||||
|
github.com/aws/aws-sdk-go/aws/ec2metadata
|
||||||
|
github.com/aws/aws-sdk-go/aws/session
|
||||||
github.com/aws/aws-sdk-go/service/iam
|
github.com/aws/aws-sdk-go/service/iam
|
||||||
github.com/aws/aws-sdk-go/service/s3
|
|
||||||
github.com/aws/aws-sdk-go/service/sts
|
github.com/aws/aws-sdk-go/service/sts
|
||||||
|
github.com/aws/aws-sdk-go/internal/ini
|
||||||
|
github.com/aws/aws-sdk-go/internal/shareddefaults
|
||||||
|
github.com/aws/aws-sdk-go/internal/sdkrand
|
||||||
|
github.com/aws/aws-sdk-go/internal/sdkmath
|
||||||
|
github.com/aws/aws-sdk-go/private/protocol/json/jsonutil
|
||||||
|
github.com/aws/aws-sdk-go/private/protocol/query
|
||||||
|
github.com/aws/aws-sdk-go/internal/sdkuri
|
||||||
github.com/aws/aws-sdk-go/service/sts/stsiface
|
github.com/aws/aws-sdk-go/service/sts/stsiface
|
||||||
|
github.com/aws/aws-sdk-go/aws/corehandlers
|
||||||
|
github.com/aws/aws-sdk-go/aws/credentials/endpointcreds
|
||||||
|
github.com/aws/aws-sdk-go/aws/credentials/processcreds
|
||||||
|
github.com/aws/aws-sdk-go/aws/csm
|
||||||
|
github.com/aws/aws-sdk-go/private/protocol/query/queryutil
|
||||||
# github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d
|
# github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d
|
||||||
github.com/bgentry/go-netrc/netrc
|
github.com/bgentry/go-netrc/netrc
|
||||||
# github.com/bgentry/speakeasy v0.1.0
|
# github.com/bgentry/speakeasy v0.1.0
|
||||||
|
@ -134,19 +134,19 @@ github.com/bmatcuk/doublestar
|
||||||
# github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e
|
# github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e
|
||||||
github.com/chzyer/readline
|
github.com/chzyer/readline
|
||||||
# github.com/coreos/etcd v3.3.10+incompatible
|
# github.com/coreos/etcd v3.3.10+incompatible
|
||||||
github.com/coreos/etcd/auth/authpb
|
|
||||||
github.com/coreos/etcd/client
|
github.com/coreos/etcd/client
|
||||||
github.com/coreos/etcd/clientv3
|
github.com/coreos/etcd/clientv3
|
||||||
github.com/coreos/etcd/clientv3/concurrency
|
github.com/coreos/etcd/clientv3/concurrency
|
||||||
|
github.com/coreos/etcd/pkg/transport
|
||||||
|
github.com/coreos/etcd/pkg/pathutil
|
||||||
|
github.com/coreos/etcd/pkg/srv
|
||||||
|
github.com/coreos/etcd/pkg/types
|
||||||
|
github.com/coreos/etcd/version
|
||||||
|
github.com/coreos/etcd/auth/authpb
|
||||||
github.com/coreos/etcd/etcdserver/api/v3rpc/rpctypes
|
github.com/coreos/etcd/etcdserver/api/v3rpc/rpctypes
|
||||||
github.com/coreos/etcd/etcdserver/etcdserverpb
|
github.com/coreos/etcd/etcdserver/etcdserverpb
|
||||||
github.com/coreos/etcd/mvcc/mvccpb
|
github.com/coreos/etcd/mvcc/mvccpb
|
||||||
github.com/coreos/etcd/pkg/pathutil
|
|
||||||
github.com/coreos/etcd/pkg/srv
|
|
||||||
github.com/coreos/etcd/pkg/tlsutil
|
github.com/coreos/etcd/pkg/tlsutil
|
||||||
github.com/coreos/etcd/pkg/transport
|
|
||||||
github.com/coreos/etcd/pkg/types
|
|
||||||
github.com/coreos/etcd/version
|
|
||||||
# github.com/coreos/go-semver v0.2.0
|
# github.com/coreos/go-semver v0.2.0
|
||||||
github.com/coreos/go-semver/semver
|
github.com/coreos/go-semver/semver
|
||||||
# github.com/davecgh/go-spew v1.1.1
|
# github.com/davecgh/go-spew v1.1.1
|
||||||
|
@ -171,14 +171,14 @@ github.com/gogo/protobuf/protoc-gen-gogo/descriptor
|
||||||
github.com/golang/mock/gomock
|
github.com/golang/mock/gomock
|
||||||
# github.com/golang/protobuf v1.3.2
|
# github.com/golang/protobuf v1.3.2
|
||||||
github.com/golang/protobuf/proto
|
github.com/golang/protobuf/proto
|
||||||
github.com/golang/protobuf/protoc-gen-go/descriptor
|
|
||||||
github.com/golang/protobuf/ptypes
|
github.com/golang/protobuf/ptypes
|
||||||
github.com/golang/protobuf/ptypes/any
|
github.com/golang/protobuf/ptypes/any
|
||||||
github.com/golang/protobuf/ptypes/duration
|
github.com/golang/protobuf/ptypes/duration
|
||||||
github.com/golang/protobuf/ptypes/timestamp
|
github.com/golang/protobuf/ptypes/timestamp
|
||||||
|
github.com/golang/protobuf/protoc-gen-go/descriptor
|
||||||
# github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db
|
# github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db
|
||||||
github.com/golang/snappy
|
github.com/golang/snappy
|
||||||
# github.com/google/go-cmp v0.3.0
|
# github.com/google/go-cmp v0.3.1
|
||||||
github.com/google/go-cmp/cmp
|
github.com/google/go-cmp/cmp
|
||||||
github.com/google/go-cmp/cmp/cmpopts
|
github.com/google/go-cmp/cmp/cmpopts
|
||||||
github.com/google/go-cmp/cmp/internal/diff
|
github.com/google/go-cmp/cmp/internal/diff
|
||||||
|
@ -193,8 +193,14 @@ github.com/google/uuid
|
||||||
github.com/googleapis/gax-go/v2
|
github.com/googleapis/gax-go/v2
|
||||||
# github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968
|
# github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968
|
||||||
github.com/gophercloud/gophercloud
|
github.com/gophercloud/gophercloud
|
||||||
github.com/gophercloud/gophercloud/internal
|
|
||||||
github.com/gophercloud/gophercloud/openstack
|
github.com/gophercloud/gophercloud/openstack
|
||||||
|
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/containers
|
||||||
|
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/objects
|
||||||
|
github.com/gophercloud/gophercloud/pagination
|
||||||
|
github.com/gophercloud/gophercloud/openstack/identity/v2/tokens
|
||||||
|
github.com/gophercloud/gophercloud/openstack/identity/v3/tokens
|
||||||
|
github.com/gophercloud/gophercloud/openstack/utils
|
||||||
|
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/accounts
|
||||||
github.com/gophercloud/gophercloud/openstack/blockstorage/extensions/volumeactions
|
github.com/gophercloud/gophercloud/openstack/blockstorage/extensions/volumeactions
|
||||||
github.com/gophercloud/gophercloud/openstack/blockstorage/v1/volumes
|
github.com/gophercloud/gophercloud/openstack/blockstorage/v1/volumes
|
||||||
github.com/gophercloud/gophercloud/openstack/blockstorage/v2/snapshots
|
github.com/gophercloud/gophercloud/openstack/blockstorage/v2/snapshots
|
||||||
|
@ -219,19 +225,15 @@ github.com/gophercloud/gophercloud/openstack/containerinfra/v1/clusters
|
||||||
github.com/gophercloud/gophercloud/openstack/containerinfra/v1/clustertemplates
|
github.com/gophercloud/gophercloud/openstack/containerinfra/v1/clustertemplates
|
||||||
github.com/gophercloud/gophercloud/openstack/db/v1/configurations
|
github.com/gophercloud/gophercloud/openstack/db/v1/configurations
|
||||||
github.com/gophercloud/gophercloud/openstack/db/v1/databases
|
github.com/gophercloud/gophercloud/openstack/db/v1/databases
|
||||||
github.com/gophercloud/gophercloud/openstack/db/v1/datastores
|
|
||||||
github.com/gophercloud/gophercloud/openstack/db/v1/instances
|
github.com/gophercloud/gophercloud/openstack/db/v1/instances
|
||||||
github.com/gophercloud/gophercloud/openstack/db/v1/users
|
github.com/gophercloud/gophercloud/openstack/db/v1/users
|
||||||
github.com/gophercloud/gophercloud/openstack/dns/v2/recordsets
|
github.com/gophercloud/gophercloud/openstack/dns/v2/recordsets
|
||||||
github.com/gophercloud/gophercloud/openstack/dns/v2/zones
|
github.com/gophercloud/gophercloud/openstack/dns/v2/zones
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v2/tenants
|
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v2/tokens
|
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v3/endpoints
|
github.com/gophercloud/gophercloud/openstack/identity/v3/endpoints
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v3/groups
|
github.com/gophercloud/gophercloud/openstack/identity/v3/groups
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v3/projects
|
github.com/gophercloud/gophercloud/openstack/identity/v3/projects
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v3/roles
|
github.com/gophercloud/gophercloud/openstack/identity/v3/roles
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v3/services
|
github.com/gophercloud/gophercloud/openstack/identity/v3/services
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v3/tokens
|
|
||||||
github.com/gophercloud/gophercloud/openstack/identity/v3/users
|
github.com/gophercloud/gophercloud/openstack/identity/v3/users
|
||||||
github.com/gophercloud/gophercloud/openstack/imageservice/v2/imagedata
|
github.com/gophercloud/gophercloud/openstack/imageservice/v2/imagedata
|
||||||
github.com/gophercloud/gophercloud/openstack/imageservice/v2/images
|
github.com/gophercloud/gophercloud/openstack/imageservice/v2/images
|
||||||
|
@ -268,9 +270,6 @@ github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/vpnaas/sit
|
||||||
github.com/gophercloud/gophercloud/openstack/networking/v2/networks
|
github.com/gophercloud/gophercloud/openstack/networking/v2/networks
|
||||||
github.com/gophercloud/gophercloud/openstack/networking/v2/ports
|
github.com/gophercloud/gophercloud/openstack/networking/v2/ports
|
||||||
github.com/gophercloud/gophercloud/openstack/networking/v2/subnets
|
github.com/gophercloud/gophercloud/openstack/networking/v2/subnets
|
||||||
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/accounts
|
|
||||||
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/containers
|
|
||||||
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/objects
|
|
||||||
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/swauth
|
github.com/gophercloud/gophercloud/openstack/objectstorage/v1/swauth
|
||||||
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/errors
|
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/errors
|
||||||
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/messages
|
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/messages
|
||||||
|
@ -278,16 +277,17 @@ github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/securityservic
|
||||||
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/sharenetworks
|
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/sharenetworks
|
||||||
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/shares
|
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/shares
|
||||||
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/snapshots
|
github.com/gophercloud/gophercloud/openstack/sharedfilesystems/v2/snapshots
|
||||||
github.com/gophercloud/gophercloud/openstack/utils
|
github.com/gophercloud/gophercloud/openstack/identity/v2/tenants
|
||||||
github.com/gophercloud/gophercloud/pagination
|
github.com/gophercloud/gophercloud/openstack/db/v1/datastores
|
||||||
|
github.com/gophercloud/gophercloud/internal
|
||||||
# github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01
|
# github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01
|
||||||
github.com/gophercloud/utils/openstack/clientconfig
|
github.com/gophercloud/utils/openstack/clientconfig
|
||||||
# github.com/hashicorp/aws-sdk-go-base v0.4.0
|
# github.com/hashicorp/aws-sdk-go-base v0.4.0
|
||||||
github.com/hashicorp/aws-sdk-go-base
|
github.com/hashicorp/aws-sdk-go-base
|
||||||
# github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089
|
# github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089
|
||||||
github.com/hashicorp/consul/api
|
github.com/hashicorp/consul/api
|
||||||
github.com/hashicorp/consul/lib/freeport
|
|
||||||
github.com/hashicorp/consul/testutil
|
github.com/hashicorp/consul/testutil
|
||||||
|
github.com/hashicorp/consul/lib/freeport
|
||||||
github.com/hashicorp/consul/testutil/retry
|
github.com/hashicorp/consul/testutil/retry
|
||||||
# github.com/hashicorp/errwrap v1.0.0
|
# github.com/hashicorp/errwrap v1.0.0
|
||||||
github.com/hashicorp/errwrap
|
github.com/hashicorp/errwrap
|
||||||
|
@ -296,7 +296,7 @@ github.com/hashicorp/go-azure-helpers/authentication
|
||||||
github.com/hashicorp/go-azure-helpers/storage
|
github.com/hashicorp/go-azure-helpers/storage
|
||||||
# github.com/hashicorp/go-checkpoint v0.5.0
|
# github.com/hashicorp/go-checkpoint v0.5.0
|
||||||
github.com/hashicorp/go-checkpoint
|
github.com/hashicorp/go-checkpoint
|
||||||
# github.com/hashicorp/go-cleanhttp v0.5.0
|
# github.com/hashicorp/go-cleanhttp v0.5.1
|
||||||
github.com/hashicorp/go-cleanhttp
|
github.com/hashicorp/go-cleanhttp
|
||||||
# github.com/hashicorp/go-getter v1.4.0
|
# github.com/hashicorp/go-getter v1.4.0
|
||||||
github.com/hashicorp/go-getter
|
github.com/hashicorp/go-getter
|
||||||
|
@ -320,7 +320,7 @@ github.com/hashicorp/go-slug
|
||||||
github.com/hashicorp/go-tfe
|
github.com/hashicorp/go-tfe
|
||||||
# github.com/hashicorp/go-uuid v1.0.1
|
# github.com/hashicorp/go-uuid v1.0.1
|
||||||
github.com/hashicorp/go-uuid
|
github.com/hashicorp/go-uuid
|
||||||
# github.com/hashicorp/go-version v1.1.0
|
# github.com/hashicorp/go-version v1.2.0
|
||||||
github.com/hashicorp/go-version
|
github.com/hashicorp/go-version
|
||||||
# github.com/hashicorp/golang-lru v0.5.1
|
# github.com/hashicorp/golang-lru v0.5.1
|
||||||
github.com/hashicorp/golang-lru/simplelru
|
github.com/hashicorp/golang-lru/simplelru
|
||||||
|
@ -329,31 +329,31 @@ github.com/hashicorp/hcl
|
||||||
github.com/hashicorp/hcl/hcl/ast
|
github.com/hashicorp/hcl/hcl/ast
|
||||||
github.com/hashicorp/hcl/hcl/parser
|
github.com/hashicorp/hcl/hcl/parser
|
||||||
github.com/hashicorp/hcl/hcl/printer
|
github.com/hashicorp/hcl/hcl/printer
|
||||||
github.com/hashicorp/hcl/hcl/scanner
|
|
||||||
github.com/hashicorp/hcl/hcl/strconv
|
|
||||||
github.com/hashicorp/hcl/hcl/token
|
github.com/hashicorp/hcl/hcl/token
|
||||||
github.com/hashicorp/hcl/json/parser
|
github.com/hashicorp/hcl/json/parser
|
||||||
|
github.com/hashicorp/hcl/hcl/scanner
|
||||||
|
github.com/hashicorp/hcl/hcl/strconv
|
||||||
github.com/hashicorp/hcl/json/scanner
|
github.com/hashicorp/hcl/json/scanner
|
||||||
github.com/hashicorp/hcl/json/token
|
github.com/hashicorp/hcl/json/token
|
||||||
# github.com/hashicorp/hcl/v2 v2.0.0
|
# github.com/hashicorp/hcl/v2 v2.0.0
|
||||||
github.com/hashicorp/hcl/v2
|
github.com/hashicorp/hcl/v2
|
||||||
github.com/hashicorp/hcl/v2/ext/dynblock
|
|
||||||
github.com/hashicorp/hcl/v2/ext/typeexpr
|
|
||||||
github.com/hashicorp/hcl/v2/gohcl
|
|
||||||
github.com/hashicorp/hcl/v2/hcldec
|
|
||||||
github.com/hashicorp/hcl/v2/hcled
|
|
||||||
github.com/hashicorp/hcl/v2/hclparse
|
|
||||||
github.com/hashicorp/hcl/v2/hclsyntax
|
github.com/hashicorp/hcl/v2/hclsyntax
|
||||||
github.com/hashicorp/hcl/v2/hcltest
|
github.com/hashicorp/hcl/v2/hcldec
|
||||||
github.com/hashicorp/hcl/v2/hclwrite
|
github.com/hashicorp/hcl/v2/hclwrite
|
||||||
github.com/hashicorp/hcl/v2/json
|
github.com/hashicorp/hcl/v2/json
|
||||||
|
github.com/hashicorp/hcl/v2/hcled
|
||||||
|
github.com/hashicorp/hcl/v2/hclparse
|
||||||
|
github.com/hashicorp/hcl/v2/gohcl
|
||||||
|
github.com/hashicorp/hcl/v2/ext/typeexpr
|
||||||
|
github.com/hashicorp/hcl/v2/ext/dynblock
|
||||||
|
github.com/hashicorp/hcl/v2/hcltest
|
||||||
# github.com/hashicorp/hcl2 v0.0.0-20190821123243-0c888d1241f6
|
# github.com/hashicorp/hcl2 v0.0.0-20190821123243-0c888d1241f6
|
||||||
github.com/hashicorp/hcl2/gohcl
|
github.com/hashicorp/hcl2/gohcl
|
||||||
github.com/hashicorp/hcl2/hcl
|
github.com/hashicorp/hcl2/hcl
|
||||||
github.com/hashicorp/hcl2/hcl/hclsyntax
|
github.com/hashicorp/hcl2/hcl/hclsyntax
|
||||||
github.com/hashicorp/hcl2/hcl/json
|
|
||||||
github.com/hashicorp/hcl2/hclparse
|
github.com/hashicorp/hcl2/hclparse
|
||||||
github.com/hashicorp/hcl2/hclwrite
|
github.com/hashicorp/hcl2/hclwrite
|
||||||
|
github.com/hashicorp/hcl2/hcl/json
|
||||||
# github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590
|
# github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590
|
||||||
github.com/hashicorp/hil
|
github.com/hashicorp/hil
|
||||||
github.com/hashicorp/hil/ast
|
github.com/hashicorp/hil/ast
|
||||||
|
@ -365,10 +365,14 @@ github.com/hashicorp/logutils
|
||||||
github.com/hashicorp/serf/coordinate
|
github.com/hashicorp/serf/coordinate
|
||||||
# github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4
|
# github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4
|
||||||
github.com/hashicorp/terraform-config-inspect/tfconfig
|
github.com/hashicorp/terraform-config-inspect/tfconfig
|
||||||
|
# github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596
|
||||||
|
github.com/hashicorp/terraform-svchost
|
||||||
|
github.com/hashicorp/terraform-svchost/auth
|
||||||
|
github.com/hashicorp/terraform-svchost/disco
|
||||||
# github.com/hashicorp/vault v0.10.4
|
# github.com/hashicorp/vault v0.10.4
|
||||||
github.com/hashicorp/vault/helper/compressutil
|
|
||||||
github.com/hashicorp/vault/helper/jsonutil
|
|
||||||
github.com/hashicorp/vault/helper/pgpkeys
|
github.com/hashicorp/vault/helper/pgpkeys
|
||||||
|
github.com/hashicorp/vault/helper/jsonutil
|
||||||
|
github.com/hashicorp/vault/helper/compressutil
|
||||||
# github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb
|
# github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb
|
||||||
github.com/hashicorp/yamux
|
github.com/hashicorp/yamux
|
||||||
# github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af
|
# github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af
|
||||||
|
@ -376,23 +380,23 @@ github.com/jmespath/go-jmespath
|
||||||
# github.com/joyent/triton-go v0.0.0-20180313100802-d8f9c0314926
|
# github.com/joyent/triton-go v0.0.0-20180313100802-d8f9c0314926
|
||||||
github.com/joyent/triton-go
|
github.com/joyent/triton-go
|
||||||
github.com/joyent/triton-go/authentication
|
github.com/joyent/triton-go/authentication
|
||||||
github.com/joyent/triton-go/client
|
|
||||||
github.com/joyent/triton-go/errors
|
github.com/joyent/triton-go/errors
|
||||||
github.com/joyent/triton-go/storage
|
github.com/joyent/triton-go/storage
|
||||||
|
github.com/joyent/triton-go/client
|
||||||
# github.com/json-iterator/go v1.1.5
|
# github.com/json-iterator/go v1.1.5
|
||||||
github.com/json-iterator/go
|
github.com/json-iterator/go
|
||||||
# github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0
|
# github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0
|
||||||
github.com/kardianos/osext
|
github.com/kardianos/osext
|
||||||
# github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba
|
# github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba
|
||||||
github.com/keybase/go-crypto/brainpool
|
|
||||||
github.com/keybase/go-crypto/cast5
|
|
||||||
github.com/keybase/go-crypto/openpgp
|
github.com/keybase/go-crypto/openpgp
|
||||||
github.com/keybase/go-crypto/openpgp/armor
|
|
||||||
github.com/keybase/go-crypto/openpgp/elgamal
|
|
||||||
github.com/keybase/go-crypto/openpgp/errors
|
|
||||||
github.com/keybase/go-crypto/openpgp/packet
|
github.com/keybase/go-crypto/openpgp/packet
|
||||||
|
github.com/keybase/go-crypto/openpgp/armor
|
||||||
|
github.com/keybase/go-crypto/openpgp/errors
|
||||||
github.com/keybase/go-crypto/openpgp/s2k
|
github.com/keybase/go-crypto/openpgp/s2k
|
||||||
github.com/keybase/go-crypto/rsa
|
github.com/keybase/go-crypto/rsa
|
||||||
|
github.com/keybase/go-crypto/brainpool
|
||||||
|
github.com/keybase/go-crypto/cast5
|
||||||
|
github.com/keybase/go-crypto/openpgp/elgamal
|
||||||
# github.com/lib/pq v1.0.0
|
# github.com/lib/pq v1.0.0
|
||||||
github.com/lib/pq
|
github.com/lib/pq
|
||||||
github.com/lib/pq/oid
|
github.com/lib/pq/oid
|
||||||
|
@ -451,8 +455,8 @@ github.com/pkg/browser
|
||||||
github.com/pkg/errors
|
github.com/pkg/errors
|
||||||
# github.com/posener/complete v1.2.1
|
# github.com/posener/complete v1.2.1
|
||||||
github.com/posener/complete
|
github.com/posener/complete
|
||||||
github.com/posener/complete/cmd
|
|
||||||
github.com/posener/complete/cmd/install
|
github.com/posener/complete/cmd/install
|
||||||
|
github.com/posener/complete/cmd
|
||||||
github.com/posener/complete/match
|
github.com/posener/complete/match
|
||||||
# github.com/satori/go.uuid v1.2.0
|
# github.com/satori/go.uuid v1.2.0
|
||||||
github.com/satori/go.uuid
|
github.com/satori/go.uuid
|
||||||
|
@ -467,9 +471,9 @@ github.com/terraform-providers/terraform-provider-openstack/openstack
|
||||||
github.com/ugorji/go/codec
|
github.com/ugorji/go/codec
|
||||||
# github.com/ulikunitz/xz v0.5.5
|
# github.com/ulikunitz/xz v0.5.5
|
||||||
github.com/ulikunitz/xz
|
github.com/ulikunitz/xz
|
||||||
github.com/ulikunitz/xz/internal/hash
|
|
||||||
github.com/ulikunitz/xz/internal/xlog
|
github.com/ulikunitz/xz/internal/xlog
|
||||||
github.com/ulikunitz/xz/lzma
|
github.com/ulikunitz/xz/lzma
|
||||||
|
github.com/ulikunitz/xz/internal/hash
|
||||||
# github.com/vmihailenco/msgpack v4.0.1+incompatible
|
# github.com/vmihailenco/msgpack v4.0.1+incompatible
|
||||||
github.com/vmihailenco/msgpack
|
github.com/vmihailenco/msgpack
|
||||||
github.com/vmihailenco/msgpack/codes
|
github.com/vmihailenco/msgpack/codes
|
||||||
|
@ -479,82 +483,87 @@ github.com/xanzy/ssh-agent
|
||||||
github.com/xlab/treeprint
|
github.com/xlab/treeprint
|
||||||
# github.com/zclconf/go-cty v1.1.0
|
# github.com/zclconf/go-cty v1.1.0
|
||||||
github.com/zclconf/go-cty/cty
|
github.com/zclconf/go-cty/cty
|
||||||
|
github.com/zclconf/go-cty/cty/gocty
|
||||||
github.com/zclconf/go-cty/cty/convert
|
github.com/zclconf/go-cty/cty/convert
|
||||||
|
github.com/zclconf/go-cty/cty/json
|
||||||
github.com/zclconf/go-cty/cty/function
|
github.com/zclconf/go-cty/cty/function
|
||||||
github.com/zclconf/go-cty/cty/function/stdlib
|
github.com/zclconf/go-cty/cty/function/stdlib
|
||||||
github.com/zclconf/go-cty/cty/gocty
|
|
||||||
github.com/zclconf/go-cty/cty/json
|
|
||||||
github.com/zclconf/go-cty/cty/msgpack
|
github.com/zclconf/go-cty/cty/msgpack
|
||||||
github.com/zclconf/go-cty/cty/set
|
github.com/zclconf/go-cty/cty/set
|
||||||
# github.com/zclconf/go-cty-yaml v1.0.1
|
# github.com/zclconf/go-cty-yaml v1.0.1
|
||||||
github.com/zclconf/go-cty-yaml
|
github.com/zclconf/go-cty-yaml
|
||||||
# go.opencensus.io v0.22.0
|
# go.opencensus.io v0.22.0
|
||||||
go.opencensus.io
|
go.opencensus.io/trace
|
||||||
go.opencensus.io/internal
|
|
||||||
go.opencensus.io/internal/tagencoding
|
|
||||||
go.opencensus.io/metric/metricdata
|
|
||||||
go.opencensus.io/metric/metricproducer
|
|
||||||
go.opencensus.io/plugin/ochttp
|
go.opencensus.io/plugin/ochttp
|
||||||
|
go.opencensus.io/internal
|
||||||
|
go.opencensus.io/trace/internal
|
||||||
|
go.opencensus.io/trace/tracestate
|
||||||
go.opencensus.io/plugin/ochttp/propagation/b3
|
go.opencensus.io/plugin/ochttp/propagation/b3
|
||||||
go.opencensus.io/resource
|
|
||||||
go.opencensus.io/stats
|
go.opencensus.io/stats
|
||||||
go.opencensus.io/stats/internal
|
|
||||||
go.opencensus.io/stats/view
|
go.opencensus.io/stats/view
|
||||||
go.opencensus.io/tag
|
go.opencensus.io/tag
|
||||||
go.opencensus.io/trace
|
|
||||||
go.opencensus.io/trace/internal
|
|
||||||
go.opencensus.io/trace/propagation
|
go.opencensus.io/trace/propagation
|
||||||
go.opencensus.io/trace/tracestate
|
go.opencensus.io
|
||||||
|
go.opencensus.io/metric/metricdata
|
||||||
|
go.opencensus.io/stats/internal
|
||||||
|
go.opencensus.io/internal/tagencoding
|
||||||
|
go.opencensus.io/metric/metricproducer
|
||||||
|
go.opencensus.io/resource
|
||||||
# golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
|
# golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
|
||||||
golang.org/x/crypto/bcrypt
|
|
||||||
golang.org/x/crypto/blowfish
|
|
||||||
golang.org/x/crypto/cast5
|
|
||||||
golang.org/x/crypto/curve25519
|
|
||||||
golang.org/x/crypto/ed25519
|
|
||||||
golang.org/x/crypto/ed25519/internal/edwards25519
|
|
||||||
golang.org/x/crypto/internal/chacha20
|
|
||||||
golang.org/x/crypto/internal/subtle
|
|
||||||
golang.org/x/crypto/md4
|
|
||||||
golang.org/x/crypto/openpgp
|
|
||||||
golang.org/x/crypto/openpgp/armor
|
|
||||||
golang.org/x/crypto/openpgp/elgamal
|
|
||||||
golang.org/x/crypto/openpgp/errors
|
|
||||||
golang.org/x/crypto/openpgp/packet
|
|
||||||
golang.org/x/crypto/openpgp/s2k
|
|
||||||
golang.org/x/crypto/pkcs12
|
|
||||||
golang.org/x/crypto/pkcs12/internal/rc2
|
|
||||||
golang.org/x/crypto/poly1305
|
|
||||||
golang.org/x/crypto/ssh
|
golang.org/x/crypto/ssh
|
||||||
golang.org/x/crypto/ssh/agent
|
golang.org/x/crypto/ssh/agent
|
||||||
golang.org/x/crypto/ssh/knownhosts
|
golang.org/x/crypto/ssh/knownhosts
|
||||||
# golang.org/x/net v0.0.0-20190620200207-3b0461eec859
|
golang.org/x/crypto/bcrypt
|
||||||
|
golang.org/x/crypto/openpgp
|
||||||
|
golang.org/x/crypto/pkcs12
|
||||||
|
golang.org/x/crypto/curve25519
|
||||||
|
golang.org/x/crypto/ed25519
|
||||||
|
golang.org/x/crypto/internal/chacha20
|
||||||
|
golang.org/x/crypto/poly1305
|
||||||
|
golang.org/x/crypto/blowfish
|
||||||
|
golang.org/x/crypto/openpgp/armor
|
||||||
|
golang.org/x/crypto/openpgp/errors
|
||||||
|
golang.org/x/crypto/openpgp/packet
|
||||||
|
golang.org/x/crypto/openpgp/s2k
|
||||||
|
golang.org/x/crypto/pkcs12/internal/rc2
|
||||||
|
golang.org/x/crypto/ed25519/internal/edwards25519
|
||||||
|
golang.org/x/crypto/internal/subtle
|
||||||
|
golang.org/x/crypto/md4
|
||||||
|
golang.org/x/crypto/cast5
|
||||||
|
golang.org/x/crypto/openpgp/elgamal
|
||||||
|
# golang.org/x/net v0.0.0-20191009170851-d66e71096ffb
|
||||||
golang.org/x/net/context
|
golang.org/x/net/context
|
||||||
|
golang.org/x/net/idna
|
||||||
|
golang.org/x/net/trace
|
||||||
golang.org/x/net/context/ctxhttp
|
golang.org/x/net/context/ctxhttp
|
||||||
golang.org/x/net/html
|
|
||||||
golang.org/x/net/html/atom
|
|
||||||
golang.org/x/net/html/charset
|
golang.org/x/net/html/charset
|
||||||
golang.org/x/net/http/httpguts
|
golang.org/x/net/internal/timeseries
|
||||||
golang.org/x/net/http2
|
golang.org/x/net/http2
|
||||||
golang.org/x/net/http2/hpack
|
golang.org/x/net/http2/hpack
|
||||||
golang.org/x/net/idna
|
golang.org/x/net/html
|
||||||
golang.org/x/net/internal/timeseries
|
golang.org/x/net/http/httpguts
|
||||||
golang.org/x/net/trace
|
golang.org/x/net/html/atom
|
||||||
# golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
# golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
||||||
golang.org/x/oauth2
|
golang.org/x/oauth2
|
||||||
golang.org/x/oauth2/google
|
golang.org/x/oauth2/jwt
|
||||||
golang.org/x/oauth2/internal
|
golang.org/x/oauth2/internal
|
||||||
golang.org/x/oauth2/jws
|
golang.org/x/oauth2/jws
|
||||||
golang.org/x/oauth2/jwt
|
golang.org/x/oauth2/google
|
||||||
# golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa
|
# golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa
|
||||||
golang.org/x/sys/cpu
|
|
||||||
golang.org/x/sys/unix
|
|
||||||
golang.org/x/sys/windows
|
golang.org/x/sys/windows
|
||||||
|
golang.org/x/sys/unix
|
||||||
|
golang.org/x/sys/cpu
|
||||||
# golang.org/x/text v0.3.2
|
# golang.org/x/text v0.3.2
|
||||||
|
golang.org/x/text/unicode/norm
|
||||||
|
golang.org/x/text/transform
|
||||||
|
golang.org/x/text/secure/bidirule
|
||||||
|
golang.org/x/text/unicode/bidi
|
||||||
golang.org/x/text/encoding
|
golang.org/x/text/encoding
|
||||||
golang.org/x/text/encoding/charmap
|
golang.org/x/text/encoding/charmap
|
||||||
golang.org/x/text/encoding/htmlindex
|
golang.org/x/text/encoding/htmlindex
|
||||||
golang.org/x/text/encoding/internal
|
golang.org/x/text/language
|
||||||
golang.org/x/text/encoding/internal/identifier
|
golang.org/x/text/encoding/internal/identifier
|
||||||
|
golang.org/x/text/encoding/internal
|
||||||
golang.org/x/text/encoding/japanese
|
golang.org/x/text/encoding/japanese
|
||||||
golang.org/x/text/encoding/korean
|
golang.org/x/text/encoding/korean
|
||||||
golang.org/x/text/encoding/simplifiedchinese
|
golang.org/x/text/encoding/simplifiedchinese
|
||||||
|
@ -562,62 +571,58 @@ golang.org/x/text/encoding/traditionalchinese
|
||||||
golang.org/x/text/encoding/unicode
|
golang.org/x/text/encoding/unicode
|
||||||
golang.org/x/text/internal/language
|
golang.org/x/text/internal/language
|
||||||
golang.org/x/text/internal/language/compact
|
golang.org/x/text/internal/language/compact
|
||||||
golang.org/x/text/internal/tag
|
|
||||||
golang.org/x/text/internal/utf8internal
|
golang.org/x/text/internal/utf8internal
|
||||||
golang.org/x/text/language
|
|
||||||
golang.org/x/text/runes
|
golang.org/x/text/runes
|
||||||
golang.org/x/text/secure/bidirule
|
golang.org/x/text/internal/tag
|
||||||
golang.org/x/text/transform
|
|
||||||
golang.org/x/text/unicode/bidi
|
|
||||||
golang.org/x/text/unicode/norm
|
|
||||||
# golang.org/x/time v0.0.0-20190308202827-9d24e82272b4
|
# golang.org/x/time v0.0.0-20190308202827-9d24e82272b4
|
||||||
golang.org/x/time/rate
|
golang.org/x/time/rate
|
||||||
# google.golang.org/api v0.9.0
|
# google.golang.org/api v0.9.0
|
||||||
google.golang.org/api/gensupport
|
|
||||||
google.golang.org/api/googleapi
|
|
||||||
google.golang.org/api/googleapi/internal/uritemplates
|
|
||||||
google.golang.org/api/googleapi/transport
|
|
||||||
google.golang.org/api/internal
|
|
||||||
google.golang.org/api/iterator
|
google.golang.org/api/iterator
|
||||||
google.golang.org/api/option
|
google.golang.org/api/option
|
||||||
|
google.golang.org/api/googleapi
|
||||||
google.golang.org/api/storage/v1
|
google.golang.org/api/storage/v1
|
||||||
google.golang.org/api/transport/http
|
google.golang.org/api/transport/http
|
||||||
|
google.golang.org/api/internal
|
||||||
|
google.golang.org/api/googleapi/internal/uritemplates
|
||||||
|
google.golang.org/api/gensupport
|
||||||
|
google.golang.org/api/googleapi/transport
|
||||||
google.golang.org/api/transport/http/internal/propagation
|
google.golang.org/api/transport/http/internal/propagation
|
||||||
# google.golang.org/appengine v1.6.1
|
# google.golang.org/appengine v1.6.1
|
||||||
|
google.golang.org/appengine/urlfetch
|
||||||
google.golang.org/appengine
|
google.golang.org/appengine
|
||||||
google.golang.org/appengine/datastore
|
google.golang.org/appengine/datastore
|
||||||
google.golang.org/appengine/datastore/internal/cloudkey
|
|
||||||
google.golang.org/appengine/datastore/internal/cloudpb
|
|
||||||
google.golang.org/appengine/internal
|
google.golang.org/appengine/internal
|
||||||
google.golang.org/appengine/internal/app_identity
|
|
||||||
google.golang.org/appengine/internal/base
|
|
||||||
google.golang.org/appengine/internal/datastore
|
|
||||||
google.golang.org/appengine/internal/log
|
|
||||||
google.golang.org/appengine/internal/modules
|
|
||||||
google.golang.org/appengine/internal/remote_api
|
|
||||||
google.golang.org/appengine/internal/urlfetch
|
google.golang.org/appengine/internal/urlfetch
|
||||||
google.golang.org/appengine/urlfetch
|
google.golang.org/appengine/internal/app_identity
|
||||||
|
google.golang.org/appengine/internal/modules
|
||||||
|
google.golang.org/appengine/datastore/internal/cloudkey
|
||||||
|
google.golang.org/appengine/internal/datastore
|
||||||
|
google.golang.org/appengine/internal/base
|
||||||
|
google.golang.org/appengine/internal/log
|
||||||
|
google.golang.org/appengine/internal/remote_api
|
||||||
|
google.golang.org/appengine/datastore/internal/cloudpb
|
||||||
# google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55
|
# google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55
|
||||||
google.golang.org/genproto/googleapis/api/annotations
|
|
||||||
google.golang.org/genproto/googleapis/iam/v1
|
google.golang.org/genproto/googleapis/iam/v1
|
||||||
google.golang.org/genproto/googleapis/rpc/code
|
|
||||||
google.golang.org/genproto/googleapis/rpc/status
|
google.golang.org/genproto/googleapis/rpc/status
|
||||||
|
google.golang.org/genproto/googleapis/rpc/code
|
||||||
|
google.golang.org/genproto/googleapis/api/annotations
|
||||||
google.golang.org/genproto/googleapis/type/expr
|
google.golang.org/genproto/googleapis/type/expr
|
||||||
# google.golang.org/grpc v1.21.1
|
# google.golang.org/grpc v1.21.1
|
||||||
google.golang.org/grpc
|
google.golang.org/grpc
|
||||||
google.golang.org/grpc/balancer
|
google.golang.org/grpc/test/bufconn
|
||||||
google.golang.org/grpc/balancer/base
|
|
||||||
google.golang.org/grpc/balancer/roundrobin
|
|
||||||
google.golang.org/grpc/binarylog/grpc_binarylog_v1
|
|
||||||
google.golang.org/grpc/codes
|
google.golang.org/grpc/codes
|
||||||
google.golang.org/grpc/connectivity
|
google.golang.org/grpc/status
|
||||||
|
google.golang.org/grpc/metadata
|
||||||
google.golang.org/grpc/credentials
|
google.golang.org/grpc/credentials
|
||||||
google.golang.org/grpc/credentials/internal
|
|
||||||
google.golang.org/grpc/encoding
|
|
||||||
google.golang.org/grpc/encoding/proto
|
|
||||||
google.golang.org/grpc/grpclog
|
|
||||||
google.golang.org/grpc/health
|
google.golang.org/grpc/health
|
||||||
google.golang.org/grpc/health/grpc_health_v1
|
google.golang.org/grpc/health/grpc_health_v1
|
||||||
|
google.golang.org/grpc/grpclog
|
||||||
|
google.golang.org/grpc/keepalive
|
||||||
|
google.golang.org/grpc/balancer
|
||||||
|
google.golang.org/grpc/balancer/roundrobin
|
||||||
|
google.golang.org/grpc/connectivity
|
||||||
|
google.golang.org/grpc/encoding
|
||||||
|
google.golang.org/grpc/encoding/proto
|
||||||
google.golang.org/grpc/internal
|
google.golang.org/grpc/internal
|
||||||
google.golang.org/grpc/internal/backoff
|
google.golang.org/grpc/internal/backoff
|
||||||
google.golang.org/grpc/internal/balancerload
|
google.golang.org/grpc/internal/balancerload
|
||||||
|
@ -626,19 +631,18 @@ google.golang.org/grpc/internal/channelz
|
||||||
google.golang.org/grpc/internal/envconfig
|
google.golang.org/grpc/internal/envconfig
|
||||||
google.golang.org/grpc/internal/grpcrand
|
google.golang.org/grpc/internal/grpcrand
|
||||||
google.golang.org/grpc/internal/grpcsync
|
google.golang.org/grpc/internal/grpcsync
|
||||||
google.golang.org/grpc/internal/syscall
|
|
||||||
google.golang.org/grpc/internal/transport
|
google.golang.org/grpc/internal/transport
|
||||||
google.golang.org/grpc/keepalive
|
|
||||||
google.golang.org/grpc/metadata
|
|
||||||
google.golang.org/grpc/naming
|
google.golang.org/grpc/naming
|
||||||
google.golang.org/grpc/peer
|
google.golang.org/grpc/peer
|
||||||
google.golang.org/grpc/resolver
|
google.golang.org/grpc/resolver
|
||||||
google.golang.org/grpc/resolver/dns
|
google.golang.org/grpc/resolver/dns
|
||||||
google.golang.org/grpc/resolver/passthrough
|
google.golang.org/grpc/resolver/passthrough
|
||||||
google.golang.org/grpc/stats
|
google.golang.org/grpc/stats
|
||||||
google.golang.org/grpc/status
|
|
||||||
google.golang.org/grpc/tap
|
google.golang.org/grpc/tap
|
||||||
google.golang.org/grpc/test/bufconn
|
google.golang.org/grpc/credentials/internal
|
||||||
|
google.golang.org/grpc/balancer/base
|
||||||
|
google.golang.org/grpc/binarylog/grpc_binarylog_v1
|
||||||
|
google.golang.org/grpc/internal/syscall
|
||||||
# gopkg.in/ini.v1 v1.42.0
|
# gopkg.in/ini.v1 v1.42.0
|
||||||
gopkg.in/ini.v1
|
gopkg.in/ini.v1
|
||||||
# gopkg.in/yaml.v2 v2.2.2
|
# gopkg.in/yaml.v2 v2.2.2
|
||||||
|
|
Loading…
Reference in New Issue