diff --git a/.travis.yml b/.travis.yml index 2ab2fc2b412..951c154292e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,7 +19,7 @@ addons: matrix: include: - - go: 1.12.x + - go: 1.15.x services: - redis-server diff --git a/apidef/api_definitions.go b/apidef/api_definitions.go index 650dc79b469..4596bc614fd 100644 --- a/apidef/api_definitions.go +++ b/apidef/api_definitions.go @@ -520,6 +520,7 @@ type ProxyConfig struct { SSLInsecureSkipVerify bool `bson:"ssl_insecure_skip_verify" json:"ssl_insecure_skip_verify"` SSLCipherSuites []string `bson:"ssl_ciphers" json:"ssl_ciphers"` SSLMinVersion uint16 `bson:"ssl_min_version" json:"ssl_min_version"` + SSLMaxVersion uint16 `bson:"ssl_max_version" json:"ssl_max_version"` SSLForceCommonNameCheck bool `json:"ssl_force_common_name_check"` ProxyURL string `bson:"proxy_url" json:"proxy_url"` } `bson:"transport" json:"transport"` diff --git a/apidef/schema.go b/apidef/schema.go index c68efd367ae..7ed40a372b7 100644 --- a/apidef/schema.go +++ b/apidef/schema.go @@ -245,9 +245,9 @@ const Schema = `{ } } }, - "auth_configs":{ - "type": ["object", "null"] - }, + "auth_configs":{ + "type": ["object", "null"] + }, "definition": { "type": ["object", "null"], "id": "http://jsonschema.net/definition", @@ -320,6 +320,9 @@ const Schema = `{ "ssl_min_version": { "type": "number" }, + "ssl_max_version": { + "type": "number" + }, "proxy_url": { "type": "string" }, @@ -431,88 +434,88 @@ const Schema = `{ ] }, "graphql": { - "type": ["object", "null"], - "properties": { - "enabled": { - "type": "boolean" - }, - "execution_mode": { - "type": "string", - "enum": [ - "proxyOnly", - "executionEngine", - "" - ] - }, - "schema": { - "type": "string" - }, - "last_schema_update": { - "type": "string", - "format": "date-time" - }, - "type_field_configurations": { - "type": ["array", "null"], - "properties": { - "type_name": { - "type": "string" - }, - "field_name": { - "type": "string" - }, - "mapping": { - "type": ["object", "null"], - "properties": { - "disabled": { - "type": "boolean" - }, - "path": { - "type": "string" - } - }, - "required": [ - "disabled" - ] - }, - "data_source": { - "type": ["object", "null"], - "properties": { - "kind": { - "type": "boolean" - }, - "data_source_config": { - "type": ["object", "null"] - } - }, - "required": [ - "kind" - ] - } - }, - "required": [ - "type_name", - "field_name" - ] - }, - "playground": { - "type": ["object", "null"], - "properties": { - "enabled": { - "type": "boolean" - }, - "path": { - "type": "string" - } - }, - "required": [ - "enabled" - ] - } - }, - "required": [ - "enabled" - ] - } + "type": ["object", "null"], + "properties": { + "enabled": { + "type": "boolean" + }, + "execution_mode": { + "type": "string", + "enum": [ + "proxyOnly", + "executionEngine", + "" + ] + }, + "schema": { + "type": "string" + }, + "last_schema_update": { + "type": "string", + "format": "date-time" + }, + "type_field_configurations": { + "type": ["array", "null"], + "properties": { + "type_name": { + "type": "string" + }, + "field_name": { + "type": "string" + }, + "mapping": { + "type": ["object", "null"], + "properties": { + "disabled": { + "type": "boolean" + }, + "path": { + "type": "string" + } + }, + "required": [ + "disabled" + ] + }, + "data_source": { + "type": ["object", "null"], + "properties": { + "kind": { + "type": "boolean" + }, + "data_source_config": { + "type": ["object", "null"] + } + }, + "required": [ + "kind" + ] + } + }, + "required": [ + "type_name", + "field_name" + ] + }, + "playground": { + "type": ["object", "null"], + "properties": { + "enabled": { + "type": "boolean" + }, + "path": { + "type": "string" + } + }, + "required": [ + "enabled" + ] + } + }, + "required": [ + "enabled" + ] + } }, "required": [ "name", diff --git a/bin/integration_build.sh b/bin/integration_build.sh index 75bc1d99312..ba0540730c0 100755 --- a/bin/integration_build.sh +++ b/bin/integration_build.sh @@ -49,7 +49,7 @@ cp tyk.conf.example ${bdir}/tyk.conf cp -R coprocess ${bdir} echo "Building Tyk binaries" -go build -tags 'goplugin' -mod=vendor +go build -tags 'goplugin' mv tyk ${bdir} echo "Making tarball" diff --git a/certs/manager.go b/certs/manager.go index a7a8c9e1043..81b542f6bb5 100644 --- a/certs/manager.go +++ b/certs/manager.go @@ -345,22 +345,18 @@ func (c *CertificateManager) List(certIDs []string, mode CertificateType) (out [ continue } - if isSHA256(id) { - var val string - val, err = c.storage.GetKey("raw-" + id) - if err != nil { - c.logger.Warn("Can't retrieve certificate from Redis:", id, err) - out = append(out, nil) - continue - } - rawCert = []byte(val) - } else { + var val string + val, err = c.storage.GetKey("raw-" + id) + if err != nil { + // Try read from file rawCert, err = ioutil.ReadFile(id) if err != nil { - c.logger.Error("Error while reading certificate from file:", id, err) + c.logger.Warn("Can't retrieve certificate:", id, err) out = append(out, nil) continue } + } else { + rawCert = []byte(val) } cert, err = ParsePEMCertificate(rawCert, c.secret) @@ -476,6 +472,7 @@ func (c *CertificateManager) ListAllIds(prefix string) (out []string) { } keys := c.storage.GetKeys("raw-" + prefix + "*") + for _, key := range keys { if prefix != "" { c.storage.AppendToSet(indexKey, key) diff --git a/certs/manager_test.go b/certs/manager_test.go index 8a8ca0c5d21..fde5178cbe8 100644 --- a/certs/manager_test.go +++ b/certs/manager_test.go @@ -235,7 +235,7 @@ func TestCertificateStorage(t *testing.T) { t.Run("File certificates", func(t *testing.T) { certs := m.List([]string{certPath, "wrong"}, CertificatePublic) if len(certs) != 2 { - t.Fatal("Should contain 1 cert", len(certs)) + t.Fatal("Should contain 2 cert", len(certs)) } if certs[1] != nil { diff --git a/cli/linter/schema.json b/cli/linter/schema.json index b14b406ed8c..526f45f9797 100644 --- a/cli/linter/schema.json +++ b/cli/linter/schema.json @@ -506,6 +506,9 @@ "min_version": { "type": "integer" }, + "max_version": { + "type": "integer" + }, "override_defaults": { "type": "boolean" }, @@ -784,6 +787,9 @@ "proxy_ssl_min_version": { "type": "integer" }, + "proxy_ssl_max_version": { + "type": "integer" + }, "proxy_ssl_ciphers": { "type": [ "array", diff --git a/config/config.go b/config/config.go index 970ec816f3d..5ef30908add 100644 --- a/config/config.go +++ b/config/config.go @@ -190,6 +190,7 @@ type HttpServerOptionsConfig struct { SSLCertificates []string `json:"ssl_certificates"` ServerName string `json:"server_name"` MinVersion uint16 `json:"min_version"` + MaxVersion uint16 `json:"max_version"` FlushInterval int `json:"flush_interval"` SkipURLCleaning bool `json:"skip_url_cleaning"` SkipTargetPathEscaping bool `json:"skip_target_path_escaping"` @@ -386,6 +387,7 @@ type Config struct { ProxySSLInsecureSkipVerify bool `json:"proxy_ssl_insecure_skip_verify"` ProxyEnableHttp2 bool `json:"proxy_enable_http2"` ProxySSLMinVersion uint16 `json:"proxy_ssl_min_version"` + ProxySSLMaxVersion uint16 `json:"proxy_ssl_max_version"` ProxySSLCipherSuites []string `json:"proxy_ssl_ciphers"` ProxyDefaultTimeout float64 `json:"proxy_default_timeout"` ProxySSLDisableRenegotiation bool `json:"proxy_ssl_disable_renegotiation"` diff --git a/gateway/api_loader.go b/gateway/api_loader.go index 0f52acfea60..ff547896854 100644 --- a/gateway/api_loader.go +++ b/gateway/api_loader.go @@ -1,6 +1,7 @@ package gateway import ( + "crypto/tls" "fmt" "net/http" "net/url" @@ -121,6 +122,14 @@ func processSpec(spec *APISpec, apisByListen map[string]int, spec.Proxy.TargetURL = strings.Replace(spec.Proxy.TargetURL, "h2c://", "http://", 1) } + if spec.Proxy.Transport.SSLMaxVersion > 0 { + spec.Proxy.Transport.SSLMaxVersion = tls.VersionTLS12 + } + + if spec.Proxy.Transport.SSLMinVersion > spec.Proxy.Transport.SSLMaxVersion { + spec.Proxy.Transport.SSLMaxVersion = spec.Proxy.Transport.SSLMinVersion + } + if len(spec.TagHeaders) > 0 { // Ensure all headers marked for tagging are lowercase lowerCaseHeaders := make([]string, len(spec.TagHeaders)) diff --git a/gateway/batch_requests.go b/gateway/batch_requests.go index 55a3c8ad4ca..aeda76c0b14 100644 --- a/gateway/batch_requests.go +++ b/gateway/batch_requests.go @@ -48,6 +48,15 @@ func (b *BatchRequestHandler) doRequest(req *http.Request, relURL string) BatchR } tr.TLSClientConfig.InsecureSkipVerify = config.Global().ProxySSLInsecureSkipVerify + + if config.Global().ProxySSLMaxVersion > 0 { + tr.TLSClientConfig.MaxVersion = config.Global().ProxySSLMaxVersion + } + + if b.API != nil && b.API.Proxy.Transport.SSLMaxVersion > 0 { + tr.TLSClientConfig.MaxVersion = b.API.Proxy.Transport.SSLMaxVersion + } + tr.DialTLS = customDialTLSCheck(b.API, tr.TLSClientConfig) tr.Proxy = proxyFromAPI(b.API) diff --git a/gateway/batch_requests_test.go b/gateway/batch_requests_test.go index 320ada7f5ed..783dc0b4e19 100644 --- a/gateway/batch_requests_test.go +++ b/gateway/batch_requests_test.go @@ -12,6 +12,7 @@ import ( "strings" "sync/atomic" "testing" + "time" "github.com/TykTechnologies/tyk/apidef" "github.com/TykTechnologies/tyk/config" @@ -77,7 +78,7 @@ func TestBatch(t *testing.T) { } } -var virtBatchTest = `function batchTest(request, session, config) { +const virtBatchTest = `function batchTest(request, session, config) { // Set up a response object var response = { Body: "", @@ -133,6 +134,7 @@ func TestVirtualEndpointBatch(t *testing.T) { ClientAuth: tls.RequireAndVerifyClientCert, ClientCAs: pool, InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, } upstream.StartTLS() @@ -141,7 +143,7 @@ func TestVirtualEndpointBatch(t *testing.T) { clientCertID, _ := CertificateManager.Add(combinedClientPEM, "") defer CertificateManager.Delete(clientCertID, "") - virtBatchTest = strings.Replace(virtBatchTest, "{upstream_URL}", upstream.URL, 2) + js := strings.Replace(virtBatchTest, "{upstream_URL}", upstream.URL, 2) defer upstream.Close() upstreamHost := strings.TrimPrefix(upstream.URL, "https://") @@ -160,7 +162,7 @@ func TestVirtualEndpointBatch(t *testing.T) { virtualMeta := apidef.VirtualMeta{ ResponseFunctionName: "batchTest", FunctionSourceType: "blob", - FunctionSourceURI: base64.StdEncoding.EncodeToString([]byte(virtBatchTest)), + FunctionSourceURI: base64.StdEncoding.EncodeToString([]byte(js)), Path: "/virt", Method: "GET", } @@ -213,7 +215,7 @@ func TestBatchIgnoreCanonicalHeaderKey(t *testing.T) { }() upstream := "http://" + l.Addr().String() - virtBatchTest = strings.Replace(virtBatchTest, "{upstream_URL}", upstream, 2) + js := strings.Replace(virtBatchTest, "{upstream_URL}", upstream, 2) c := config.Global() c.IgnoreCanonicalMIMEHeaderKey = true config.SetGlobal(c) @@ -225,7 +227,7 @@ func TestBatchIgnoreCanonicalHeaderKey(t *testing.T) { virtualMeta := apidef.VirtualMeta{ ResponseFunctionName: "batchTest", FunctionSourceType: "blob", - FunctionSourceURI: base64.StdEncoding.EncodeToString([]byte(virtBatchTest)), + FunctionSourceURI: base64.StdEncoding.EncodeToString([]byte(js)), Path: "/virt", Method: "GET", } @@ -236,6 +238,10 @@ func TestBatchIgnoreCanonicalHeaderKey(t *testing.T) { } }) }) + + // Let the server start + time.Sleep(500 * time.Millisecond) + ts.Run(t, test.TestCase{Path: "/virt", Code: 202}) got := header.Load().(string) if got != NonCanonicalHeaderKey { diff --git a/gateway/cert_go1.10_test.go b/gateway/cert_go1.10_test.go index b35b75d930e..3f297708c17 100644 --- a/gateway/cert_go1.10_test.go +++ b/gateway/cert_go1.10_test.go @@ -48,6 +48,7 @@ func TestPublicKeyPinning(t *testing.T) { upstream.TLS = &tls.Config{ InsecureSkipVerify: true, Certificates: []tls.Certificate{serverCert}, + MaxVersion: tls.VersionTLS12, } upstream.StartTLS() @@ -106,6 +107,7 @@ func TestPublicKeyPinning(t *testing.T) { _, _, _, proxyCert := genServerCertificate() proxy := initProxy("https", &tls.Config{ Certificates: []tls.Certificate{proxyCert}, + MaxVersion: tls.VersionTLS12, }) globalConf := config.Global() @@ -145,6 +147,7 @@ func TestPublicKeyPinning(t *testing.T) { upstream.TLS = &tls.Config{ InsecureSkipVerify: true, Certificates: []tls.Certificate{serverCert}, + MaxVersion: tls.VersionTLS12, } upstream.StartTLS() @@ -162,6 +165,7 @@ func TestPublicKeyPinning(t *testing.T) { proxy := initProxy("http", &tls.Config{ Certificates: []tls.Certificate{proxyCert}, + MaxVersion: tls.VersionTLS12, }) defer proxy.Stop() @@ -199,9 +203,14 @@ func TestPublicKeyPinning(t *testing.T) { } func TestProxyTransport(t *testing.T) { - upstream := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + upstream := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte("test")) })) + upstream.TLS = &tls.Config{ + MaxVersion: tls.VersionTLS12, + } + upstream.StartTLS() + defer upstream.Close() defer ResetTestConfig() @@ -305,6 +314,7 @@ func TestProxyTransport(t *testing.T) { _, _, _, proxyCert := genServerCertificate() proxy := initProxy("https", &tls.Config{ Certificates: []tls.Certificate{proxyCert}, + MaxVersion: tls.VersionTLS12, }) defer proxy.Stop() diff --git a/gateway/cert_test.go b/gateway/cert_test.go index f94983cee7e..393365c0db9 100644 --- a/gateway/cert_test.go +++ b/gateway/cert_test.go @@ -60,7 +60,7 @@ func genServerCertificate() ([]byte, []byte, []byte, tls.Certificate) { } const ( - internalTLSErr = "tls: internal error" + internalTLSErr = "tls: unrecognized name" badcertErr = "tls: bad certificate" certNotMatchErr = "Client TLS certificate is required" ) @@ -724,6 +724,7 @@ func TestUpstreamMutualTLS(t *testing.T) { ClientAuth: tls.RequireAndVerifyClientCert, ClientCAs: pool, InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, } upstream.StartTLS() @@ -783,6 +784,7 @@ func TestSSLForceCommonName(t *testing.T) { upstream.TLS = &tls.Config{ Certificates: []tls.Certificate{cert}, + MaxVersion: tls.VersionTLS12, } upstream.StartTLS() @@ -998,6 +1000,7 @@ func TestAPICertificate(t *testing.T) { client := &http.Client{Transport: &http.Transport{TLSClientConfig: &tls.Config{ InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, }}} t.Run("Cert set via API", func(t *testing.T) { @@ -1016,7 +1019,7 @@ func TestAPICertificate(t *testing.T) { spec.Proxy.ListenPath = "/" }) - ts.Run(t, test.TestCase{ErrorMatch: "tls: internal error"}) + ts.Run(t, test.TestCase{ErrorMatch: internalTLSErr}) }) } @@ -1032,45 +1035,45 @@ func TestCertificateHandlerTLS(t *testing.T) { t.Run("List certificates, empty", func(t *testing.T) { ts.Run(t, test.TestCase{ - Path: "/tyk/certs", Code: 200, AdminAuth: true, BodyMatch: `{"certs":null}`, + Path: "/tyk/certs?org_id=1", Code: 200, AdminAuth: true, BodyMatch: `{"certs":null}`, }) }) t.Run("Should add certificates with and without private keys", func(t *testing.T) { ts.Run(t, []test.TestCase{ // Public Certificate - {Method: "POST", Path: "/tyk/certs", Data: string(clientPEM), AdminAuth: true, Code: 200, BodyMatch: `"id":"` + clientCertID}, + {Method: "POST", Path: "/tyk/certs?org_id=1", Data: string(clientPEM), AdminAuth: true, Code: 200, BodyMatch: `"id":"1` + clientCertID}, // Public + Private - {Method: "POST", Path: "/tyk/certs", Data: string(combinedServerPEM), AdminAuth: true, Code: 200, BodyMatch: `"id":"` + serverCertID}, + {Method: "POST", Path: "/tyk/certs?org_id=1", Data: string(combinedServerPEM), AdminAuth: true, Code: 200, BodyMatch: `"id":"1` + serverCertID}, }...) }) t.Run("List certificates, non empty", func(t *testing.T) { ts.Run(t, []test.TestCase{ - {Method: "GET", Path: "/tyk/certs", AdminAuth: true, Code: 200, BodyMatch: clientCertID}, - {Method: "GET", Path: "/tyk/certs", AdminAuth: true, Code: 200, BodyMatch: serverCertID}, + {Method: "GET", Path: "/tyk/certs?org_id=1", AdminAuth: true, Code: 200, BodyMatch: clientCertID}, + {Method: "GET", Path: "/tyk/certs?org_id=1", AdminAuth: true, Code: 200, BodyMatch: serverCertID}, }...) }) - certMetaTemplate := `{"id":"%s","fingerprint":"%s","has_private":%s` + certMetaTemplate := `{"id":"1%s","fingerprint":"%s","has_private":%s` t.Run("Certificate meta info", func(t *testing.T) { clientCertMeta := fmt.Sprintf(certMetaTemplate, clientCertID, clientCertID, "false") serverCertMeta := fmt.Sprintf(certMetaTemplate, serverCertID, serverCertID, "true") ts.Run(t, []test.TestCase{ - {Method: "GET", Path: "/tyk/certs/" + clientCertID, AdminAuth: true, Code: 200, BodyMatch: clientCertMeta}, - {Method: "GET", Path: "/tyk/certs/" + serverCertID, AdminAuth: true, Code: 200, BodyMatch: serverCertMeta}, - {Method: "GET", Path: "/tyk/certs/" + serverCertID + "," + clientCertID, AdminAuth: true, Code: 200, BodyMatch: `\[` + serverCertMeta}, - {Method: "GET", Path: "/tyk/certs/" + serverCertID + "," + clientCertID, AdminAuth: true, Code: 200, BodyMatch: clientCertMeta}, + {Method: "GET", Path: "/tyk/certs/1" + clientCertID + "?org_id=1", AdminAuth: true, Code: 200, BodyMatch: clientCertMeta}, + {Method: "GET", Path: "/tyk/certs/1" + serverCertID + "?org_id=1", AdminAuth: true, Code: 200, BodyMatch: serverCertMeta}, + {Method: "GET", Path: "/tyk/certs/1" + serverCertID + ",1" + clientCertID + "?org_id=1", AdminAuth: true, Code: 200, BodyMatch: `\[` + serverCertMeta}, + {Method: "GET", Path: "/tyk/certs/1" + serverCertID + ",1" + clientCertID + "?org_id=1", AdminAuth: true, Code: 200, BodyMatch: clientCertMeta}, }...) }) t.Run("Certificate removal", func(t *testing.T) { ts.Run(t, []test.TestCase{ - {Method: "DELETE", Path: "/tyk/certs/" + serverCertID, AdminAuth: true, Code: 200}, - {Method: "DELETE", Path: "/tyk/certs/" + clientCertID, AdminAuth: true, Code: 200}, - {Method: "GET", Path: "/tyk/certs", AdminAuth: true, Code: 200, BodyMatch: `{"certs":null}`}, + {Method: "DELETE", Path: "/tyk/certs/1" + serverCertID + "?org_id=1", AdminAuth: true, Code: 200}, + {Method: "DELETE", Path: "/tyk/certs/1" + clientCertID + "?org_id=1", AdminAuth: true, Code: 200}, + {Method: "GET", Path: "/tyk/certs?org_id=1", AdminAuth: true, Code: 200, BodyMatch: `{"certs":null}`}, }...) }) } @@ -1101,6 +1104,7 @@ func TestCipherSuites(t *testing.T) { client := &http.Client{Transport: &http.Transport{TLSClientConfig: &tls.Config{ CipherSuites: getCipherAliases([]string{"TLS_RSA_WITH_RC4_128_SHA", "TLS_RSA_WITH_3DES_EDE_CBC_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA"}), InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, }}} // If there is an internal TLS error it will fail test @@ -1112,6 +1116,7 @@ func TestCipherSuites(t *testing.T) { client := &http.Client{Transport: &http.Transport{TLSClientConfig: &tls.Config{ CipherSuites: getCipherAliases([]string{"TLS_RSA_WITH_AES_256_CBC_SHA"}), // not matching ciphers InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, }}} ts.Run(t, test.TestCase{Client: client, Path: "/", ErrorMatch: "tls: handshake failure"}) diff --git a/gateway/coprocess_bundle.go b/gateway/coprocess_bundle.go index a61d319f706..7d8ce255806 100644 --- a/gateway/coprocess_bundle.go +++ b/gateway/coprocess_bundle.go @@ -140,7 +140,10 @@ type MockBundleGetter struct { // Get performs an HTTP GET request. func (g *HTTPBundleGetter) Get() ([]byte, error) { tr := &(*http.DefaultTransport.(*http.Transport)) - tr.TLSClientConfig = &tls.Config{InsecureSkipVerify: g.InsecureSkipVerify} + tr.TLSClientConfig = &tls.Config{ + InsecureSkipVerify: g.InsecureSkipVerify, + MaxVersion: tls.VersionTLS12, + } client := &http.Client{Transport: tr} resp, err := client.Get(g.URL) diff --git a/gateway/dashboard_register.go b/gateway/dashboard_register.go index 3eb1f7efc0a..fb5a443a22d 100644 --- a/gateway/dashboard_register.go +++ b/gateway/dashboard_register.go @@ -54,6 +54,8 @@ func initialiseClient() *http.Client { // Setup HTTPS client tlsConfig := &tls.Config{ InsecureSkipVerify: config.Global().HttpServerOptions.SSLInsecureSkipVerify, + MinVersion: config.Global().HttpServerOptions.MinVersion, + MaxVersion: config.Global().HttpServerOptions.MaxVersion, } dashClient.Transport = &http.Transport{TLSClientConfig: tlsConfig} diff --git a/gateway/gateway_test.go b/gateway/gateway_test.go index d445c863cb1..a1740eebb7d 100644 --- a/gateway/gateway_test.go +++ b/gateway/gateway_test.go @@ -2067,7 +2067,9 @@ func TestStripRegex(t *testing.T) { func TestCache_singleErrorResponse(t *testing.T) { ts := StartTest() defer ts.Close() - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {})) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte("{}")) + })) defer srv.Close() BuildAndLoadAPI(func(spec *APISpec) { spec.UseKeylessAccess = true diff --git a/gateway/grpc_test.go b/gateway/grpc_test.go index e7b4741d861..a8f22f4d30f 100644 --- a/gateway/grpc_test.go +++ b/gateway/grpc_test.go @@ -469,12 +469,14 @@ func grpcServerCreds(t *testing.T, clientCert *x509.Certificate) []grpc.ServerOp ClientCAs: pool, InsecureSkipVerify: true, Certificates: []tls.Certificate{certificate}, + MaxVersion: tls.VersionTLS12, } pool.AddCert(clientCert) } else { tlsConfig = &tls.Config{ InsecureSkipVerify: true, Certificates: []tls.Certificate{certificate}, + MaxVersion: tls.VersionTLS12, } } @@ -522,7 +524,9 @@ func sayHelloWithGRPCClientH2C(t *testing.T, address string, name string) *pb.He } func grpcCreds(cert *tls.Certificate, caCert []byte, basicAuth bool, token string) []grpc.DialOption { - tlsConfig := &tls.Config{} + tlsConfig := &tls.Config{ + MaxVersion: tls.VersionTLS12, + } if cert != nil { tlsConfig.Certificates = []tls.Certificate{*cert} diff --git a/gateway/handler_error.go b/gateway/handler_error.go index 1776d8d012e..6deda00ac9f 100644 --- a/gateway/handler_error.go +++ b/gateway/handler_error.go @@ -99,6 +99,7 @@ type ErrorHandler struct { func (e *ErrorHandler) HandleError(w http.ResponseWriter, r *http.Request, errMsg string, errCode int, writeResponse bool) { defer e.Base().UpdateRequestSession(r) response := &http.Response{} + if writeResponse { var templateExtension string var contentType string diff --git a/gateway/handler_error_test.go b/gateway/handler_error_test.go index 03b2ea0f691..81f6560ed10 100644 --- a/gateway/handler_error_test.go +++ b/gateway/handler_error_test.go @@ -30,15 +30,19 @@ func TestHandleError_text_xml(t *testing.T) { 500 There was a problem proxying the request - - ` +` ts := StartTest() defer ts.Close() - h := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {})) - h.Close() + + // Simulate 500 error + h := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + panic("I should fail!") + })) + defer h.Close() + BuildAndLoadAPI(func(spec *APISpec) { spec.Proxy.ListenPath = "/" - spec.Proxy.TargetURL = ts.URL + spec.Proxy.TargetURL = h.URL }) ts.Run(t, test.TestCase{ Path: "/", diff --git a/gateway/host_checker.go b/gateway/host_checker.go index 087eea191c7..f0b7685fd85 100644 --- a/gateway/host_checker.go +++ b/gateway/host_checker.go @@ -284,6 +284,7 @@ func (h *HostUptimeChecker) CheckHost(toCheck HostData) { HostCheckerClient.Transport = &http.Transport{ TLSClientConfig: &tls.Config{ InsecureSkipVerify: config.Global().ProxySSLInsecureSkipVerify, + MaxVersion: config.Global().ProxySSLMaxVersion, }, } if toCheck.Timeout != 0 { diff --git a/gateway/host_checker_test.go b/gateway/host_checker_test.go index 5926c03bae8..7ca3504ccf6 100644 --- a/gateway/host_checker_test.go +++ b/gateway/host_checker_test.go @@ -296,7 +296,7 @@ func TestTestCheckerTCPHosts_correct_answers(t *testing.T) { ans := &answers{cancel: cancel} setTestMode(false) - hs.Init(1, 1, 0, map[string]HostData{ + hs.Init(1, 1, 1, map[string]HostData{ l.Addr().String(): data, }, ans.cb(), @@ -352,7 +352,7 @@ func TestTestCheckerTCPHosts_correct_answers_proxy_protocol(t *testing.T) { ans := &answers{cancel: cancel} setTestMode(false) - hs.Init(1, 1, 0, map[string]HostData{ + hs.Init(1, 1, 1, map[string]HostData{ l.Addr().String(): data, }, ans.cb(), @@ -401,7 +401,7 @@ func TestTestCheckerTCPHosts_correct_wrong_answers(t *testing.T) { hs := &HostUptimeChecker{} failed := false setTestMode(false) - hs.Init(1, 1, 0, map[string]HostData{ + hs.Init(1, 1, 1, map[string]HostData{ l.Addr().String(): data, }, HostCheckCallBacks{ @@ -421,7 +421,6 @@ func TestTestCheckerTCPHosts_correct_wrong_answers(t *testing.T) { } func TestProxyWhenHostIsDown(t *testing.T) { - g := config.Global() g.UptimeTests.Config.FailureTriggerSampleSize = 1 g.UptimeTests.Config.TimeWait = 5 @@ -448,7 +447,7 @@ func TestProxyWhenHostIsDown(t *testing.T) { GlobalHostChecker.checker.sampleTriggerLimit = 1 GlobalHostChecker.checkerMu.Unlock() - tick := time.NewTicker(time.Millisecond) + tick := time.NewTicker(10 * time.Millisecond) defer tick.Stop() x := 0 get := func() { @@ -515,7 +514,7 @@ func TestChecker_triggerSampleLimit(t *testing.T) { ping.Store(0) hs := &HostUptimeChecker{} - hs.Init(1, limit, 0, map[string]HostData{ + hs.Init(1, limit, 1, map[string]HostData{ l.Addr().String(): {CheckURL: "http://" + l.Addr().String()}, }, HostCheckCallBacks{ diff --git a/gateway/mw_graphql.go b/gateway/mw_graphql.go index 57ba40becc1..e61d6e99f2c 100644 --- a/gateway/mw_graphql.go +++ b/gateway/mw_graphql.go @@ -53,7 +53,7 @@ func (m *GraphQLMiddleware) Init() { if m.Spec.GraphQL.ExecutionMode == apidef.GraphQLExecutionModeExecutionEngine { absLogger := abstractlogger.NewLogrusLogger(log, absLoggerLevel(log.Level)) - m.Spec.GraphQLExecutor.Client = &http.Client{} + m.Spec.GraphQLExecutor.Client = &http.Client{Transport: &http.Transport{TLSClientConfig: tlsClientConfig(m.Spec)}} if m.Spec.GraphQL.Version == apidef.GraphQLConfigVersionNone || m.Spec.GraphQL.Version == apidef.GraphQLConfigVersion1 { m.initGraphQLEngineV1(absLogger) diff --git a/gateway/mw_js_plugin.go b/gateway/mw_js_plugin.go index a6088f267a3..3325ca6c909 100644 --- a/gateway/mw_js_plugin.go +++ b/gateway/mw_js_plugin.go @@ -517,7 +517,15 @@ func (j *JSVM) LoadTykJSApi() { } r.Close = true - tr := &http.Transport{TLSClientConfig: &tls.Config{}} + maxSSLVersion := config.Global().ProxySSLMaxVersion + if j.Spec.Proxy.Transport.SSLMaxVersion > 0 { + maxSSLVersion = j.Spec.Proxy.Transport.SSLMaxVersion + } + + tr := &http.Transport{TLSClientConfig: &tls.Config{ + MaxVersion: maxSSLVersion, + }} + if cert := getUpstreamCertificate(r.Host, j.Spec); cert != nil { tr.TLSClientConfig.Certificates = []tls.Certificate{*cert} } diff --git a/gateway/mw_organization_activity_test.go b/gateway/mw_organization_activity_test.go index 95175f403da..3b95c27f14f 100644 --- a/gateway/mw_organization_activity_test.go +++ b/gateway/mw_organization_activity_test.go @@ -36,6 +36,7 @@ func testPrepareProcessRequestQuotaLimit(tb testing.TB, ts *Test, data map[strin BodyMatch: expectBody, }) storage.DisableRedis(false) + ts.Run(tb, test.TestCase{ Path: "/tyk/org/keys/" + orgID + "?reset_quota=1", AdminAuth: true, @@ -62,7 +63,7 @@ func TestProcessRequestLiveQuotaLimit(t *testing.T) { map[string]interface{}{ "quota_max": 10, "quota_remaining": 10, - "quota_renewal_rate": 3, + "quota_renewal_rate": 1, }, ) @@ -73,19 +74,13 @@ func TestProcessRequestLiveQuotaLimit(t *testing.T) { Code: http.StatusOK, }) } - storage.DisableRedis(true) - ts.Run(t, test.TestCase{ - Code: http.StatusOK, - }) - storage.DisableRedis(false) - // next request should fail with 403 as it is out of quota ts.Run(t, test.TestCase{ Code: http.StatusForbidden, }) // wait for renewal - time.Sleep(4 * time.Second) + time.Sleep(2 * time.Second) // next one should be OK ts.Run(t, test.TestCase{ @@ -262,7 +257,7 @@ func TestProcessRequestLiveRedisRollingLimiter(t *testing.T) { } // wait for next time window - time.Sleep(2 * time.Second) + time.Sleep(1 * time.Second) // try to run over rate limit reqNum := 1 @@ -273,6 +268,11 @@ func TestProcessRequestLiveRedisRollingLimiter(t *testing.T) { break } reqNum++ + + if reqNum > 20 { + t.Errorf("Test takes too long to complete") + break + } } if reqNum < 10 { diff --git a/gateway/proxy_muxer.go b/gateway/proxy_muxer.go index c7a80d4b806..656bb1ab905 100644 --- a/gateway/proxy_muxer.go +++ b/gateway/proxy_muxer.go @@ -466,6 +466,7 @@ func (m *proxyMux) generateListener(listenPort int, protocol string) (l net.List GetCertificate: dummyGetCertificate, ServerName: httpServerOptions.ServerName, MinVersion: httpServerOptions.MinVersion, + MaxVersion: httpServerOptions.MaxVersion, ClientAuth: tls.NoClientCert, InsecureSkipVerify: httpServerOptions.SSLInsecureSkipVerify, CipherSuites: getCipherAliases(httpServerOptions.Ciphers), diff --git a/gateway/res_handler_transform_test.go b/gateway/res_handler_transform_test.go index bc2c3e7607a..29422a117cb 100644 --- a/gateway/res_handler_transform_test.go +++ b/gateway/res_handler_transform_test.go @@ -3,6 +3,7 @@ package gateway import ( "encoding/base64" "testing" + "time" "github.com/TykTechnologies/tyk/apidef" "github.com/TykTechnologies/tyk/test" @@ -171,8 +172,8 @@ func TestTransformResponse_WithCache(t *testing.T) { createAPI(true) ts.Run(t, []test.TestCase{ - {Path: path, Headers: map[string]string{"Foo": "Bar"}, Code: 200, BodyMatch: `{"foo":"Bar"}`}, // Returns response and caches it - {Path: path, Headers: map[string]string{"Foo": "Bar2"}, Code: 200, BodyMatch: `{"foo":"Bar"}`}, // Returns cached response directly + {Path: path, Headers: map[string]string{"Foo": "Bar"}, Code: 200, BodyMatch: `{"foo":"Bar"}`, Delay: 100 * time.Millisecond}, // Returns response and caches it + {Path: path, Headers: map[string]string{"Foo": "Bar2"}, Code: 200, BodyMatch: `{"foo":"Bar"}`}, // Returns cached response directly }...) } diff --git a/gateway/reverse_proxy.go b/gateway/reverse_proxy.go index f3a741a0db8..b6ba5834313 100644 --- a/gateway/reverse_proxy.go +++ b/gateway/reverse_proxy.go @@ -596,6 +596,14 @@ func tlsClientConfig(s *APISpec) *tls.Config { config.MinVersion = s.Proxy.Transport.SSLMinVersion } + if s.GlobalConfig.ProxySSLMaxVersion > 0 { + config.MaxVersion = s.GlobalConfig.ProxySSLMaxVersion + } + + if s.Proxy.Transport.SSLMaxVersion > 0 { + config.MaxVersion = s.Proxy.Transport.SSLMaxVersion + } + if len(s.GlobalConfig.ProxySSLCipherSuites) > 0 { config.CipherSuites = getCipherAliases(s.GlobalConfig.ProxySSLCipherSuites) } @@ -645,6 +653,14 @@ func httpTransport(timeOut float64, rw http.ResponseWriter, req *http.Request, p transport.TLSClientConfig.MinVersion = p.TykAPISpec.Proxy.Transport.SSLMinVersion } + if p.TykAPISpec.GlobalConfig.ProxySSLMaxVersion > 0 { + transport.TLSClientConfig.MaxVersion = p.TykAPISpec.GlobalConfig.ProxySSLMaxVersion + } + + if p.TykAPISpec.Proxy.Transport.SSLMaxVersion > 0 { + transport.TLSClientConfig.MaxVersion = p.TykAPISpec.Proxy.Transport.SSLMaxVersion + } + if len(p.TykAPISpec.GlobalConfig.ProxySSLCipherSuites) > 0 { transport.TLSClientConfig.CipherSuites = getCipherAliases(p.TykAPISpec.GlobalConfig.ProxySSLCipherSuites) } diff --git a/gateway/rpc_storage_handler.go b/gateway/rpc_storage_handler.go index 7dd2594f48d..fbad2201c2b 100644 --- a/gateway/rpc_storage_handler.go +++ b/gateway/rpc_storage_handler.go @@ -103,6 +103,8 @@ func (r *RPCStorageHandler) Connect() bool { rpcConfig := rpc.Config{ UseSSL: slaveOptions.UseSSL, SSLInsecureSkipVerify: slaveOptions.SSLInsecureSkipVerify, + SSLMinVersion: config.Global().HttpServerOptions.MinVersion, + SSLMaxVersion: config.Global().HttpServerOptions.MaxVersion, ConnectionString: slaveOptions.ConnectionString, RPCKey: slaveOptions.RPCKey, APIKey: slaveOptions.APIKey, diff --git a/gateway/rpc_test.go b/gateway/rpc_test.go index 09a327a7234..a0fe40a3165 100644 --- a/gateway/rpc_test.go +++ b/gateway/rpc_test.go @@ -383,16 +383,20 @@ func TestSyncAPISpecsRPC_redis_failure(t *testing.T) { event <- struct{}{} DoReload() } + defer func() { + OnConnect = nil + }() select { case <-event: t.Fatal("OnConnect should only run after reconnection") - case <-time.After(time.Second): + case <-time.After(1 * time.Second): } storage.DisableRedis(false) + select { case <-event: - case <-time.After(time.Second): + case <-time.After(3 * time.Second): t.Fatal("Expected redis to reconnect and call the callback") } time.Sleep(time.Second) diff --git a/gateway/server.go b/gateway/server.go index 8d7aeac9c01..b8d6ea5958f 100644 --- a/gateway/server.go +++ b/gateway/server.go @@ -2,6 +2,7 @@ package gateway import ( "context" + "crypto/tls" "fmt" "html/template" "io/ioutil" @@ -1006,6 +1007,32 @@ func initialiseSystem(ctx context.Context) error { } } + if globalConf.ProxySSLMaxVersion == 0 { + globalConf.ProxySSLMaxVersion = tls.VersionTLS12 + } + + if globalConf.ProxySSLMinVersion > globalConf.ProxySSLMaxVersion { + globalConf.ProxySSLMaxVersion = globalConf.ProxySSLMinVersion + } + + if globalConf.HttpServerOptions.MaxVersion == 0 { + globalConf.HttpServerOptions.MaxVersion = tls.VersionTLS12 + } + + if globalConf.HttpServerOptions.MinVersion > globalConf.HttpServerOptions.MaxVersion { + globalConf.HttpServerOptions.MaxVersion = globalConf.HttpServerOptions.MinVersion + } + + if globalConf.UseDBAppConfigs && globalConf.Policies.PolicySource != config.DefaultDashPolicySource { + globalConf.Policies.PolicySource = config.DefaultDashPolicySource + globalConf.Policies.PolicyConnectionString = globalConf.DBAppConfOptions.ConnectionString + if globalConf.Policies.PolicyRecordName == "" { + globalConf.Policies.PolicyRecordName = config.DefaultDashPolicyRecordName + } + } + + config.SetGlobal(globalConf) + getHostDetails() setupInstrumentation() diff --git a/go.mod b/go.mod index 113b9a3b695..c53c9ae1d77 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/TykTechnologies/tyk -go 1.12 +go 1.15 require ( github.com/Jeffail/gabs v1.4.0 diff --git a/rpc/rpc_client.go b/rpc/rpc_client.go index 98ac38c617a..8f1b9043a3f 100644 --- a/rpc/rpc_client.go +++ b/rpc/rpc_client.go @@ -136,6 +136,8 @@ const ( type Config struct { UseSSL bool `json:"use_ssl"` SSLInsecureSkipVerify bool `json:"ssl_insecure_skip_verify"` + SSLMinVersion uint16 `json:"ssl_min_version"` + SSLMaxVersion uint16 `json:"ssl_max_version"` ConnectionString string `json:"connection_string"` RPCKey string `json:"rpc_key"` APIKey string `json:"api_key"` @@ -230,6 +232,8 @@ func Connect(connConfig Config, suppressRegister bool, dispatcherFuncs map[strin if values.Config().UseSSL { clientCfg := &tls.Config{ InsecureSkipVerify: values.Config().SSLInsecureSkipVerify, + MinVersion: values.Config().SSLMinVersion, + MaxVersion: values.Config().SSLMaxVersion, } clientSingleton = gorpc.NewTLSClient(values.Config().ConnectionString, clientCfg) @@ -259,6 +263,8 @@ func Connect(connConfig Config, suppressRegister bool, dispatcherFuncs map[strin if useSSL { cfg := &tls.Config{ InsecureSkipVerify: values.Config().SSLInsecureSkipVerify, + MinVersion: values.Config().SSLMinVersion, + MaxVersion: values.Config().SSLMaxVersion, } conn, err = tls.DialWithDialer(dialer, "tcp", addr, cfg) diff --git a/storage/redis_cluster.go b/storage/redis_cluster.go index f55bcb9a6b2..823cfaa2a2b 100644 --- a/storage/redis_cluster.go +++ b/storage/redis_cluster.go @@ -43,7 +43,9 @@ func DisableRedis(ok bool) { disableRedis.Store(true) return } + disableRedis.Store(false) + WaitConnect(context.Background()) } func shouldConnect() bool { @@ -63,6 +65,21 @@ func Connected() bool { return false } +func WaitConnect(ctx context.Context) bool { + for { + select { + case <-ctx.Done(): + return false + default: + if Connected() { + return true + } + + time.Sleep(10 * time.Millisecond) + } + } +} + func singleton(cache bool) redis.UniversalClient { if cache { v := singleCachePool.Load() diff --git a/tcp/tcp_test.go b/tcp/tcp_test.go index 7be75f5b1e0..715f5854326 100644 --- a/tcp/tcp_test.go +++ b/tcp/tcp_test.go @@ -201,6 +201,7 @@ func testRunner(t *testing.T, proxy *Proxy, hostname string, useSSL bool, testCa tlsConfig := &tls.Config{ Certificates: []tls.Certificate{test.Cert("localhost")}, InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, } tlsConfig.BuildNameToCertificate() proxyLn, err = tls.Listen("tcp", ":0", tlsConfig) diff --git a/test/tcp.go b/test/tcp.go index 3aa501c6155..3b5981e4933 100644 --- a/test/tcp.go +++ b/test/tcp.go @@ -39,6 +39,7 @@ func (r TCPTestRunner) Run(t testing.TB, testCases ...TCPTestCase) error { r.TLSClientConfig = &tls.Config{ ServerName: r.Hostname, InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, } } client, err = tls.Dial("tcp", r.Target, r.TLSClientConfig) @@ -97,6 +98,7 @@ func TcpMock(useSSL bool, cb func(in []byte, err error) (out []byte)) net.Listen tlsConfig := &tls.Config{ Certificates: []tls.Certificate{Cert("localhost")}, InsecureSkipVerify: true, + MaxVersion: tls.VersionTLS12, } tlsConfig.BuildNameToCertificate() l, _ = tls.Listen("tcp", ":0", tlsConfig) diff --git a/vendor/github.com/BurntSushi/toml/.gitignore b/vendor/github.com/BurntSushi/toml/.gitignore deleted file mode 100644 index 0cd3800377d..00000000000 --- a/vendor/github.com/BurntSushi/toml/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -TAGS -tags -.*.swp -tomlcheck/tomlcheck -toml.test diff --git a/vendor/github.com/BurntSushi/toml/.travis.yml b/vendor/github.com/BurntSushi/toml/.travis.yml deleted file mode 100644 index 8b8afc4f0e0..00000000000 --- a/vendor/github.com/BurntSushi/toml/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: go -go: - - 1.1 - - 1.2 - - 1.3 - - 1.4 - - 1.5 - - 1.6 - - tip -install: - - go install ./... - - go get github.com/BurntSushi/toml-test -script: - - export PATH="$PATH:$HOME/gopath/bin" - - make test diff --git a/vendor/github.com/BurntSushi/toml/COMPATIBLE b/vendor/github.com/BurntSushi/toml/COMPATIBLE deleted file mode 100644 index 6efcfd0ce55..00000000000 --- a/vendor/github.com/BurntSushi/toml/COMPATIBLE +++ /dev/null @@ -1,3 +0,0 @@ -Compatible with TOML version -[v0.4.0](https://github.com/toml-lang/toml/blob/v0.4.0/versions/en/toml-v0.4.0.md) - diff --git a/vendor/github.com/BurntSushi/toml/COPYING b/vendor/github.com/BurntSushi/toml/COPYING deleted file mode 100644 index 01b5743200b..00000000000 --- a/vendor/github.com/BurntSushi/toml/COPYING +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 TOML authors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/BurntSushi/toml/Makefile b/vendor/github.com/BurntSushi/toml/Makefile deleted file mode 100644 index 3600848d331..00000000000 --- a/vendor/github.com/BurntSushi/toml/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -install: - go install ./... - -test: install - go test -v - toml-test toml-test-decoder - toml-test -encoder toml-test-encoder - -fmt: - gofmt -w *.go */*.go - colcheck *.go */*.go - -tags: - find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS - -push: - git push origin master - git push github master - diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md deleted file mode 100644 index 7c1b37ecc7a..00000000000 --- a/vendor/github.com/BurntSushi/toml/README.md +++ /dev/null @@ -1,218 +0,0 @@ -## TOML parser and encoder for Go with reflection - -TOML stands for Tom's Obvious, Minimal Language. This Go package provides a -reflection interface similar to Go's standard library `json` and `xml` -packages. This package also supports the `encoding.TextUnmarshaler` and -`encoding.TextMarshaler` interfaces so that you can define custom data -representations. (There is an example of this below.) - -Spec: https://github.com/toml-lang/toml - -Compatible with TOML version -[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md) - -Documentation: https://godoc.org/github.com/BurntSushi/toml - -Installation: - -```bash -go get github.com/BurntSushi/toml -``` - -Try the toml validator: - -```bash -go get github.com/BurntSushi/toml/cmd/tomlv -tomlv some-toml-file.toml -``` - -[![Build Status](https://travis-ci.org/BurntSushi/toml.svg?branch=master)](https://travis-ci.org/BurntSushi/toml) [![GoDoc](https://godoc.org/github.com/BurntSushi/toml?status.svg)](https://godoc.org/github.com/BurntSushi/toml) - -### Testing - -This package passes all tests in -[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder -and the encoder. - -### Examples - -This package works similarly to how the Go standard library handles `XML` -and `JSON`. Namely, data is loaded into Go values via reflection. - -For the simplest example, consider some TOML file as just a list of keys -and values: - -```toml -Age = 25 -Cats = [ "Cauchy", "Plato" ] -Pi = 3.14 -Perfection = [ 6, 28, 496, 8128 ] -DOB = 1987-07-05T05:45:00Z -``` - -Which could be defined in Go as: - -```go -type Config struct { - Age int - Cats []string - Pi float64 - Perfection []int - DOB time.Time // requires `import time` -} -``` - -And then decoded with: - -```go -var conf Config -if _, err := toml.Decode(tomlData, &conf); err != nil { - // handle error -} -``` - -You can also use struct tags if your struct field name doesn't map to a TOML -key value directly: - -```toml -some_key_NAME = "wat" -``` - -```go -type TOML struct { - ObscureKey string `toml:"some_key_NAME"` -} -``` - -### Using the `encoding.TextUnmarshaler` interface - -Here's an example that automatically parses duration strings into -`time.Duration` values: - -```toml -[[song]] -name = "Thunder Road" -duration = "4m49s" - -[[song]] -name = "Stairway to Heaven" -duration = "8m03s" -``` - -Which can be decoded with: - -```go -type song struct { - Name string - Duration duration -} -type songs struct { - Song []song -} -var favorites songs -if _, err := toml.Decode(blob, &favorites); err != nil { - log.Fatal(err) -} - -for _, s := range favorites.Song { - fmt.Printf("%s (%s)\n", s.Name, s.Duration) -} -``` - -And you'll also need a `duration` type that satisfies the -`encoding.TextUnmarshaler` interface: - -```go -type duration struct { - time.Duration -} - -func (d *duration) UnmarshalText(text []byte) error { - var err error - d.Duration, err = time.ParseDuration(string(text)) - return err -} -``` - -### More complex usage - -Here's an example of how to load the example from the official spec page: - -```toml -# This is a TOML document. Boom. - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T07:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] -``` - -And the corresponding Go types are: - -```go -type tomlConfig struct { - Title string - Owner ownerInfo - DB database `toml:"database"` - Servers map[string]server - Clients clients -} - -type ownerInfo struct { - Name string - Org string `toml:"organization"` - Bio string - DOB time.Time -} - -type database struct { - Server string - Ports []int - ConnMax int `toml:"connection_max"` - Enabled bool -} - -type server struct { - IP string - DC string -} - -type clients struct { - Data [][]interface{} - Hosts []string -} -``` - -Note that a case insensitive match will be tried if an exact match can't be -found. - -A working example of the above can be found in `_examples/example.{go,toml}`. diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go deleted file mode 100644 index b0fd51d5b6e..00000000000 --- a/vendor/github.com/BurntSushi/toml/decode.go +++ /dev/null @@ -1,509 +0,0 @@ -package toml - -import ( - "fmt" - "io" - "io/ioutil" - "math" - "reflect" - "strings" - "time" -) - -func e(format string, args ...interface{}) error { - return fmt.Errorf("toml: "+format, args...) -} - -// Unmarshaler is the interface implemented by objects that can unmarshal a -// TOML description of themselves. -type Unmarshaler interface { - UnmarshalTOML(interface{}) error -} - -// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`. -func Unmarshal(p []byte, v interface{}) error { - _, err := Decode(string(p), v) - return err -} - -// Primitive is a TOML value that hasn't been decoded into a Go value. -// When using the various `Decode*` functions, the type `Primitive` may -// be given to any value, and its decoding will be delayed. -// -// A `Primitive` value can be decoded using the `PrimitiveDecode` function. -// -// The underlying representation of a `Primitive` value is subject to change. -// Do not rely on it. -// -// N.B. Primitive values are still parsed, so using them will only avoid -// the overhead of reflection. They can be useful when you don't know the -// exact type of TOML data until run time. -type Primitive struct { - undecoded interface{} - context Key -} - -// DEPRECATED! -// -// Use MetaData.PrimitiveDecode instead. -func PrimitiveDecode(primValue Primitive, v interface{}) error { - md := MetaData{decoded: make(map[string]bool)} - return md.unify(primValue.undecoded, rvalue(v)) -} - -// PrimitiveDecode is just like the other `Decode*` functions, except it -// decodes a TOML value that has already been parsed. Valid primitive values -// can *only* be obtained from values filled by the decoder functions, -// including this method. (i.e., `v` may contain more `Primitive` -// values.) -// -// Meta data for primitive values is included in the meta data returned by -// the `Decode*` functions with one exception: keys returned by the Undecoded -// method will only reflect keys that were decoded. Namely, any keys hidden -// behind a Primitive will be considered undecoded. Executing this method will -// update the undecoded keys in the meta data. (See the example.) -func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { - md.context = primValue.context - defer func() { md.context = nil }() - return md.unify(primValue.undecoded, rvalue(v)) -} - -// Decode will decode the contents of `data` in TOML format into a pointer -// `v`. -// -// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be -// used interchangeably.) -// -// TOML arrays of tables correspond to either a slice of structs or a slice -// of maps. -// -// TOML datetimes correspond to Go `time.Time` values. -// -// All other TOML types (float, string, int, bool and array) correspond -// to the obvious Go types. -// -// An exception to the above rules is if a type implements the -// encoding.TextUnmarshaler interface. In this case, any primitive TOML value -// (floats, strings, integers, booleans and datetimes) will be converted to -// a byte string and given to the value's UnmarshalText method. See the -// Unmarshaler example for a demonstration with time duration strings. -// -// Key mapping -// -// TOML keys can map to either keys in a Go map or field names in a Go -// struct. The special `toml` struct tag may be used to map TOML keys to -// struct fields that don't match the key name exactly. (See the example.) -// A case insensitive match to struct names will be tried if an exact match -// can't be found. -// -// The mapping between TOML values and Go values is loose. That is, there -// may exist TOML values that cannot be placed into your representation, and -// there may be parts of your representation that do not correspond to -// TOML values. This loose mapping can be made stricter by using the IsDefined -// and/or Undecoded methods on the MetaData returned. -// -// This decoder will not handle cyclic types. If a cyclic type is passed, -// `Decode` will not terminate. -func Decode(data string, v interface{}) (MetaData, error) { - rv := reflect.ValueOf(v) - if rv.Kind() != reflect.Ptr { - return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v)) - } - if rv.IsNil() { - return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v)) - } - p, err := parse(data) - if err != nil { - return MetaData{}, err - } - md := MetaData{ - p.mapping, p.types, p.ordered, - make(map[string]bool, len(p.ordered)), nil, - } - return md, md.unify(p.mapping, indirect(rv)) -} - -// DecodeFile is just like Decode, except it will automatically read the -// contents of the file at `fpath` and decode it for you. -func DecodeFile(fpath string, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadFile(fpath) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// DecodeReader is just like Decode, except it will consume all bytes -// from the reader and decode it for you. -func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadAll(r) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// unify performs a sort of type unification based on the structure of `rv`, -// which is the client representation. -// -// Any type mismatch produces an error. Finding a type that we don't know -// how to handle produces an unsupported type error. -func (md *MetaData) unify(data interface{}, rv reflect.Value) error { - - // Special case. Look for a `Primitive` value. - if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() { - // Save the undecoded data and the key context into the primitive - // value. - context := make(Key, len(md.context)) - copy(context, md.context) - rv.Set(reflect.ValueOf(Primitive{ - undecoded: data, - context: context, - })) - return nil - } - - // Special case. Unmarshaler Interface support. - if rv.CanAddr() { - if v, ok := rv.Addr().Interface().(Unmarshaler); ok { - return v.UnmarshalTOML(data) - } - } - - // Special case. Handle time.Time values specifically. - // TODO: Remove this code when we decide to drop support for Go 1.1. - // This isn't necessary in Go 1.2 because time.Time satisfies the encoding - // interfaces. - if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) { - return md.unifyDatetime(data, rv) - } - - // Special case. Look for a value satisfying the TextUnmarshaler interface. - if v, ok := rv.Interface().(TextUnmarshaler); ok { - return md.unifyText(data, v) - } - // BUG(burntsushi) - // The behavior here is incorrect whenever a Go type satisfies the - // encoding.TextUnmarshaler interface but also corresponds to a TOML - // hash or array. In particular, the unmarshaler should only be applied - // to primitive TOML values. But at this point, it will be applied to - // all kinds of values and produce an incorrect error whenever those values - // are hashes or arrays (including arrays of tables). - - k := rv.Kind() - - // laziness - if k >= reflect.Int && k <= reflect.Uint64 { - return md.unifyInt(data, rv) - } - switch k { - case reflect.Ptr: - elem := reflect.New(rv.Type().Elem()) - err := md.unify(data, reflect.Indirect(elem)) - if err != nil { - return err - } - rv.Set(elem) - return nil - case reflect.Struct: - return md.unifyStruct(data, rv) - case reflect.Map: - return md.unifyMap(data, rv) - case reflect.Array: - return md.unifyArray(data, rv) - case reflect.Slice: - return md.unifySlice(data, rv) - case reflect.String: - return md.unifyString(data, rv) - case reflect.Bool: - return md.unifyBool(data, rv) - case reflect.Interface: - // we only support empty interfaces. - if rv.NumMethod() > 0 { - return e("unsupported type %s", rv.Type()) - } - return md.unifyAnything(data, rv) - case reflect.Float32: - fallthrough - case reflect.Float64: - return md.unifyFloat64(data, rv) - } - return e("unsupported type %s", rv.Kind()) -} - -func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - if mapping == nil { - return nil - } - return e("type mismatch for %s: expected table but found %T", - rv.Type().String(), mapping) - } - - for key, datum := range tmap { - var f *field - fields := cachedTypeFields(rv.Type()) - for i := range fields { - ff := &fields[i] - if ff.name == key { - f = ff - break - } - if f == nil && strings.EqualFold(ff.name, key) { - f = ff - } - } - if f != nil { - subv := rv - for _, i := range f.index { - subv = indirect(subv.Field(i)) - } - if isUnifiable(subv) { - md.decoded[md.context.add(key).String()] = true - md.context = append(md.context, key) - if err := md.unify(datum, subv); err != nil { - return err - } - md.context = md.context[0 : len(md.context)-1] - } else if f.name != "" { - // Bad user! No soup for you! - return e("cannot write unexported field %s.%s", - rv.Type().String(), f.name) - } - } - } - return nil -} - -func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - if tmap == nil { - return nil - } - return badtype("map", mapping) - } - if rv.IsNil() { - rv.Set(reflect.MakeMap(rv.Type())) - } - for k, v := range tmap { - md.decoded[md.context.add(k).String()] = true - md.context = append(md.context, k) - - rvkey := indirect(reflect.New(rv.Type().Key())) - rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) - if err := md.unify(v, rvval); err != nil { - return err - } - md.context = md.context[0 : len(md.context)-1] - - rvkey.SetString(k) - rv.SetMapIndex(rvkey, rvval) - } - return nil -} - -func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - if !datav.IsValid() { - return nil - } - return badtype("slice", data) - } - sliceLen := datav.Len() - if sliceLen != rv.Len() { - return e("expected array length %d; got TOML array of length %d", - rv.Len(), sliceLen) - } - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - if !datav.IsValid() { - return nil - } - return badtype("slice", data) - } - n := datav.Len() - if rv.IsNil() || rv.Cap() < n { - rv.Set(reflect.MakeSlice(rv.Type(), n, n)) - } - rv.SetLen(n) - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { - sliceLen := data.Len() - for i := 0; i < sliceLen; i++ { - v := data.Index(i).Interface() - sliceval := indirect(rv.Index(i)) - if err := md.unify(v, sliceval); err != nil { - return err - } - } - return nil -} - -func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error { - if _, ok := data.(time.Time); ok { - rv.Set(reflect.ValueOf(data)) - return nil - } - return badtype("time.Time", data) -} - -func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { - if s, ok := data.(string); ok { - rv.SetString(s) - return nil - } - return badtype("string", data) -} - -func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { - if num, ok := data.(float64); ok { - switch rv.Kind() { - case reflect.Float32: - fallthrough - case reflect.Float64: - rv.SetFloat(num) - default: - panic("bug") - } - return nil - } - return badtype("float", data) -} - -func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { - if num, ok := data.(int64); ok { - if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 { - switch rv.Kind() { - case reflect.Int, reflect.Int64: - // No bounds checking necessary. - case reflect.Int8: - if num < math.MinInt8 || num > math.MaxInt8 { - return e("value %d is out of range for int8", num) - } - case reflect.Int16: - if num < math.MinInt16 || num > math.MaxInt16 { - return e("value %d is out of range for int16", num) - } - case reflect.Int32: - if num < math.MinInt32 || num > math.MaxInt32 { - return e("value %d is out of range for int32", num) - } - } - rv.SetInt(num) - } else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 { - unum := uint64(num) - switch rv.Kind() { - case reflect.Uint, reflect.Uint64: - // No bounds checking necessary. - case reflect.Uint8: - if num < 0 || unum > math.MaxUint8 { - return e("value %d is out of range for uint8", num) - } - case reflect.Uint16: - if num < 0 || unum > math.MaxUint16 { - return e("value %d is out of range for uint16", num) - } - case reflect.Uint32: - if num < 0 || unum > math.MaxUint32 { - return e("value %d is out of range for uint32", num) - } - } - rv.SetUint(unum) - } else { - panic("unreachable") - } - return nil - } - return badtype("integer", data) -} - -func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { - if b, ok := data.(bool); ok { - rv.SetBool(b) - return nil - } - return badtype("boolean", data) -} - -func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { - rv.Set(reflect.ValueOf(data)) - return nil -} - -func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error { - var s string - switch sdata := data.(type) { - case TextMarshaler: - text, err := sdata.MarshalText() - if err != nil { - return err - } - s = string(text) - case fmt.Stringer: - s = sdata.String() - case string: - s = sdata - case bool: - s = fmt.Sprintf("%v", sdata) - case int64: - s = fmt.Sprintf("%d", sdata) - case float64: - s = fmt.Sprintf("%f", sdata) - default: - return badtype("primitive (string-like)", data) - } - if err := v.UnmarshalText([]byte(s)); err != nil { - return err - } - return nil -} - -// rvalue returns a reflect.Value of `v`. All pointers are resolved. -func rvalue(v interface{}) reflect.Value { - return indirect(reflect.ValueOf(v)) -} - -// indirect returns the value pointed to by a pointer. -// Pointers are followed until the value is not a pointer. -// New values are allocated for each nil pointer. -// -// An exception to this rule is if the value satisfies an interface of -// interest to us (like encoding.TextUnmarshaler). -func indirect(v reflect.Value) reflect.Value { - if v.Kind() != reflect.Ptr { - if v.CanSet() { - pv := v.Addr() - if _, ok := pv.Interface().(TextUnmarshaler); ok { - return pv - } - } - return v - } - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - return indirect(reflect.Indirect(v)) -} - -func isUnifiable(rv reflect.Value) bool { - if rv.CanSet() { - return true - } - if _, ok := rv.Interface().(TextUnmarshaler); ok { - return true - } - return false -} - -func badtype(expected string, data interface{}) error { - return e("cannot load TOML value of type %T into a Go %s", data, expected) -} diff --git a/vendor/github.com/BurntSushi/toml/decode_meta.go b/vendor/github.com/BurntSushi/toml/decode_meta.go deleted file mode 100644 index b9914a6798c..00000000000 --- a/vendor/github.com/BurntSushi/toml/decode_meta.go +++ /dev/null @@ -1,121 +0,0 @@ -package toml - -import "strings" - -// MetaData allows access to meta information about TOML data that may not -// be inferrable via reflection. In particular, whether a key has been defined -// and the TOML type of a key. -type MetaData struct { - mapping map[string]interface{} - types map[string]tomlType - keys []Key - decoded map[string]bool - context Key // Used only during decoding. -} - -// IsDefined returns true if the key given exists in the TOML data. The key -// should be specified hierarchially. e.g., -// -// // access the TOML key 'a.b.c' -// IsDefined("a", "b", "c") -// -// IsDefined will return false if an empty key given. Keys are case sensitive. -func (md *MetaData) IsDefined(key ...string) bool { - if len(key) == 0 { - return false - } - - var hash map[string]interface{} - var ok bool - var hashOrVal interface{} = md.mapping - for _, k := range key { - if hash, ok = hashOrVal.(map[string]interface{}); !ok { - return false - } - if hashOrVal, ok = hash[k]; !ok { - return false - } - } - return true -} - -// Type returns a string representation of the type of the key specified. -// -// Type will return the empty string if given an empty key or a key that -// does not exist. Keys are case sensitive. -func (md *MetaData) Type(key ...string) string { - fullkey := strings.Join(key, ".") - if typ, ok := md.types[fullkey]; ok { - return typ.typeString() - } - return "" -} - -// Key is the type of any TOML key, including key groups. Use (MetaData).Keys -// to get values of this type. -type Key []string - -func (k Key) String() string { - return strings.Join(k, ".") -} - -func (k Key) maybeQuotedAll() string { - var ss []string - for i := range k { - ss = append(ss, k.maybeQuoted(i)) - } - return strings.Join(ss, ".") -} - -func (k Key) maybeQuoted(i int) string { - quote := false - for _, c := range k[i] { - if !isBareKeyChar(c) { - quote = true - break - } - } - if quote { - return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\"" - } - return k[i] -} - -func (k Key) add(piece string) Key { - newKey := make(Key, len(k)+1) - copy(newKey, k) - newKey[len(k)] = piece - return newKey -} - -// Keys returns a slice of every key in the TOML data, including key groups. -// Each key is itself a slice, where the first element is the top of the -// hierarchy and the last is the most specific. -// -// The list will have the same order as the keys appeared in the TOML data. -// -// All keys returned are non-empty. -func (md *MetaData) Keys() []Key { - return md.keys -} - -// Undecoded returns all keys that have not been decoded in the order in which -// they appear in the original TOML document. -// -// This includes keys that haven't been decoded because of a Primitive value. -// Once the Primitive value is decoded, the keys will be considered decoded. -// -// Also note that decoding into an empty interface will result in no decoding, -// and so no keys will be considered decoded. -// -// In this sense, the Undecoded keys correspond to keys in the TOML document -// that do not have a concrete type in your representation. -func (md *MetaData) Undecoded() []Key { - undecoded := make([]Key, 0, len(md.keys)) - for _, key := range md.keys { - if !md.decoded[key.String()] { - undecoded = append(undecoded, key) - } - } - return undecoded -} diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go deleted file mode 100644 index b371f396edc..00000000000 --- a/vendor/github.com/BurntSushi/toml/doc.go +++ /dev/null @@ -1,27 +0,0 @@ -/* -Package toml provides facilities for decoding and encoding TOML configuration -files via reflection. There is also support for delaying decoding with -the Primitive type, and querying the set of keys in a TOML document with the -MetaData type. - -The specification implemented: https://github.com/toml-lang/toml - -The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify -whether a file is a valid TOML document. It can also be used to print the -type of each key in a TOML document. - -Testing - -There are two important types of tests used for this package. The first is -contained inside '*_test.go' files and uses the standard Go unit testing -framework. These tests are primarily devoted to holistically testing the -decoder and encoder. - -The second type of testing is used to verify the implementation's adherence -to the TOML specification. These tests have been factored into their own -project: https://github.com/BurntSushi/toml-test - -The reason the tests are in a separate project is so that they can be used by -any implementation of TOML. Namely, it is language agnostic. -*/ -package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go deleted file mode 100644 index d905c21a246..00000000000 --- a/vendor/github.com/BurntSushi/toml/encode.go +++ /dev/null @@ -1,568 +0,0 @@ -package toml - -import ( - "bufio" - "errors" - "fmt" - "io" - "reflect" - "sort" - "strconv" - "strings" - "time" -) - -type tomlEncodeError struct{ error } - -var ( - errArrayMixedElementTypes = errors.New( - "toml: cannot encode array with mixed element types") - errArrayNilElement = errors.New( - "toml: cannot encode array with nil element") - errNonString = errors.New( - "toml: cannot encode a map with non-string key type") - errAnonNonStruct = errors.New( - "toml: cannot encode an anonymous field that is not a struct") - errArrayNoTable = errors.New( - "toml: TOML array element cannot contain a table") - errNoKey = errors.New( - "toml: top-level values must be Go maps or structs") - errAnything = errors.New("") // used in testing -) - -var quotedReplacer = strings.NewReplacer( - "\t", "\\t", - "\n", "\\n", - "\r", "\\r", - "\"", "\\\"", - "\\", "\\\\", -) - -// Encoder controls the encoding of Go values to a TOML document to some -// io.Writer. -// -// The indentation level can be controlled with the Indent field. -type Encoder struct { - // A single indentation level. By default it is two spaces. - Indent string - - // hasWritten is whether we have written any output to w yet. - hasWritten bool - w *bufio.Writer -} - -// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer -// given. By default, a single indentation level is 2 spaces. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - w: bufio.NewWriter(w), - Indent: " ", - } -} - -// Encode writes a TOML representation of the Go value to the underlying -// io.Writer. If the value given cannot be encoded to a valid TOML document, -// then an error is returned. -// -// The mapping between Go values and TOML values should be precisely the same -// as for the Decode* functions. Similarly, the TextMarshaler interface is -// supported by encoding the resulting bytes as strings. (If you want to write -// arbitrary binary data then you will need to use something like base64 since -// TOML does not have any binary types.) -// -// When encoding TOML hashes (i.e., Go maps or structs), keys without any -// sub-hashes are encoded first. -// -// If a Go map is encoded, then its keys are sorted alphabetically for -// deterministic output. More control over this behavior may be provided if -// there is demand for it. -// -// Encoding Go values without a corresponding TOML representation---like map -// types with non-string keys---will cause an error to be returned. Similarly -// for mixed arrays/slices, arrays/slices with nil elements, embedded -// non-struct types and nested slices containing maps or structs. -// (e.g., [][]map[string]string is not allowed but []map[string]string is OK -// and so is []map[string][]string.) -func (enc *Encoder) Encode(v interface{}) error { - rv := eindirect(reflect.ValueOf(v)) - if err := enc.safeEncode(Key([]string{}), rv); err != nil { - return err - } - return enc.w.Flush() -} - -func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { - defer func() { - if r := recover(); r != nil { - if terr, ok := r.(tomlEncodeError); ok { - err = terr.error - return - } - panic(r) - } - }() - enc.encode(key, rv) - return nil -} - -func (enc *Encoder) encode(key Key, rv reflect.Value) { - // Special case. Time needs to be in ISO8601 format. - // Special case. If we can marshal the type to text, then we used that. - // Basically, this prevents the encoder for handling these types as - // generic structs (or whatever the underlying type of a TextMarshaler is). - switch rv.Interface().(type) { - case time.Time, TextMarshaler: - enc.keyEqElement(key, rv) - return - } - - k := rv.Kind() - switch k { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64, - reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: - enc.keyEqElement(key, rv) - case reflect.Array, reflect.Slice: - if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { - enc.eArrayOfTables(key, rv) - } else { - enc.keyEqElement(key, rv) - } - case reflect.Interface: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Map: - if rv.IsNil() { - return - } - enc.eTable(key, rv) - case reflect.Ptr: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Struct: - enc.eTable(key, rv) - default: - panic(e("unsupported type for key '%s': %s", key, k)) - } -} - -// eElement encodes any value that can be an array element (primitives and -// arrays). -func (enc *Encoder) eElement(rv reflect.Value) { - switch v := rv.Interface().(type) { - case time.Time: - // Special case time.Time as a primitive. Has to come before - // TextMarshaler below because time.Time implements - // encoding.TextMarshaler, but we need to always use UTC. - enc.wf(v.UTC().Format("2006-01-02T15:04:05Z")) - return - case TextMarshaler: - // Special case. Use text marshaler if it's available for this value. - if s, err := v.MarshalText(); err != nil { - encPanic(err) - } else { - enc.writeQuoted(string(s)) - } - return - } - switch rv.Kind() { - case reflect.Bool: - enc.wf(strconv.FormatBool(rv.Bool())) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64: - enc.wf(strconv.FormatInt(rv.Int(), 10)) - case reflect.Uint, reflect.Uint8, reflect.Uint16, - reflect.Uint32, reflect.Uint64: - enc.wf(strconv.FormatUint(rv.Uint(), 10)) - case reflect.Float32: - enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32))) - case reflect.Float64: - enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64))) - case reflect.Array, reflect.Slice: - enc.eArrayOrSliceElement(rv) - case reflect.Interface: - enc.eElement(rv.Elem()) - case reflect.String: - enc.writeQuoted(rv.String()) - default: - panic(e("unexpected primitive type: %s", rv.Kind())) - } -} - -// By the TOML spec, all floats must have a decimal with at least one -// number on either side. -func floatAddDecimal(fstr string) string { - if !strings.Contains(fstr, ".") { - return fstr + ".0" - } - return fstr -} - -func (enc *Encoder) writeQuoted(s string) { - enc.wf("\"%s\"", quotedReplacer.Replace(s)) -} - -func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { - length := rv.Len() - enc.wf("[") - for i := 0; i < length; i++ { - elem := rv.Index(i) - enc.eElement(elem) - if i != length-1 { - enc.wf(", ") - } - } - enc.wf("]") -} - -func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { - if len(key) == 0 { - encPanic(errNoKey) - } - for i := 0; i < rv.Len(); i++ { - trv := rv.Index(i) - if isNil(trv) { - continue - } - panicIfInvalidKey(key) - enc.newline() - enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll()) - enc.newline() - enc.eMapOrStruct(key, trv) - } -} - -func (enc *Encoder) eTable(key Key, rv reflect.Value) { - panicIfInvalidKey(key) - if len(key) == 1 { - // Output an extra newline between top-level tables. - // (The newline isn't written if nothing else has been written though.) - enc.newline() - } - if len(key) > 0 { - enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll()) - enc.newline() - } - enc.eMapOrStruct(key, rv) -} - -func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) { - switch rv := eindirect(rv); rv.Kind() { - case reflect.Map: - enc.eMap(key, rv) - case reflect.Struct: - enc.eStruct(key, rv) - default: - panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) - } -} - -func (enc *Encoder) eMap(key Key, rv reflect.Value) { - rt := rv.Type() - if rt.Key().Kind() != reflect.String { - encPanic(errNonString) - } - - // Sort keys so that we have deterministic output. And write keys directly - // underneath this key first, before writing sub-structs or sub-maps. - var mapKeysDirect, mapKeysSub []string - for _, mapKey := range rv.MapKeys() { - k := mapKey.String() - if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) { - mapKeysSub = append(mapKeysSub, k) - } else { - mapKeysDirect = append(mapKeysDirect, k) - } - } - - var writeMapKeys = func(mapKeys []string) { - sort.Strings(mapKeys) - for _, mapKey := range mapKeys { - mrv := rv.MapIndex(reflect.ValueOf(mapKey)) - if isNil(mrv) { - // Don't write anything for nil fields. - continue - } - enc.encode(key.add(mapKey), mrv) - } - } - writeMapKeys(mapKeysDirect) - writeMapKeys(mapKeysSub) -} - -func (enc *Encoder) eStruct(key Key, rv reflect.Value) { - // Write keys for fields directly under this key first, because if we write - // a field that creates a new table, then all keys under it will be in that - // table (not the one we're writing here). - rt := rv.Type() - var fieldsDirect, fieldsSub [][]int - var addFields func(rt reflect.Type, rv reflect.Value, start []int) - addFields = func(rt reflect.Type, rv reflect.Value, start []int) { - for i := 0; i < rt.NumField(); i++ { - f := rt.Field(i) - // skip unexported fields - if f.PkgPath != "" && !f.Anonymous { - continue - } - frv := rv.Field(i) - if f.Anonymous { - t := f.Type - switch t.Kind() { - case reflect.Struct: - // Treat anonymous struct fields with - // tag names as though they are not - // anonymous, like encoding/json does. - if getOptions(f.Tag).name == "" { - addFields(t, frv, f.Index) - continue - } - case reflect.Ptr: - if t.Elem().Kind() == reflect.Struct && - getOptions(f.Tag).name == "" { - if !frv.IsNil() { - addFields(t.Elem(), frv.Elem(), f.Index) - } - continue - } - // Fall through to the normal field encoding logic below - // for non-struct anonymous fields. - } - } - - if typeIsHash(tomlTypeOfGo(frv)) { - fieldsSub = append(fieldsSub, append(start, f.Index...)) - } else { - fieldsDirect = append(fieldsDirect, append(start, f.Index...)) - } - } - } - addFields(rt, rv, nil) - - var writeFields = func(fields [][]int) { - for _, fieldIndex := range fields { - sft := rt.FieldByIndex(fieldIndex) - sf := rv.FieldByIndex(fieldIndex) - if isNil(sf) { - // Don't write anything for nil fields. - continue - } - - opts := getOptions(sft.Tag) - if opts.skip { - continue - } - keyName := sft.Name - if opts.name != "" { - keyName = opts.name - } - if opts.omitempty && isEmpty(sf) { - continue - } - if opts.omitzero && isZero(sf) { - continue - } - - enc.encode(key.add(keyName), sf) - } - } - writeFields(fieldsDirect) - writeFields(fieldsSub) -} - -// tomlTypeName returns the TOML type name of the Go value's type. It is -// used to determine whether the types of array elements are mixed (which is -// forbidden). If the Go value is nil, then it is illegal for it to be an array -// element, and valueIsNil is returned as true. - -// Returns the TOML type of a Go value. The type may be `nil`, which means -// no concrete TOML type could be found. -func tomlTypeOfGo(rv reflect.Value) tomlType { - if isNil(rv) || !rv.IsValid() { - return nil - } - switch rv.Kind() { - case reflect.Bool: - return tomlBool - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64: - return tomlInteger - case reflect.Float32, reflect.Float64: - return tomlFloat - case reflect.Array, reflect.Slice: - if typeEqual(tomlHash, tomlArrayType(rv)) { - return tomlArrayHash - } - return tomlArray - case reflect.Ptr, reflect.Interface: - return tomlTypeOfGo(rv.Elem()) - case reflect.String: - return tomlString - case reflect.Map: - return tomlHash - case reflect.Struct: - switch rv.Interface().(type) { - case time.Time: - return tomlDatetime - case TextMarshaler: - return tomlString - default: - return tomlHash - } - default: - panic("unexpected reflect.Kind: " + rv.Kind().String()) - } -} - -// tomlArrayType returns the element type of a TOML array. The type returned -// may be nil if it cannot be determined (e.g., a nil slice or a zero length -// slize). This function may also panic if it finds a type that cannot be -// expressed in TOML (such as nil elements, heterogeneous arrays or directly -// nested arrays of tables). -func tomlArrayType(rv reflect.Value) tomlType { - if isNil(rv) || !rv.IsValid() || rv.Len() == 0 { - return nil - } - firstType := tomlTypeOfGo(rv.Index(0)) - if firstType == nil { - encPanic(errArrayNilElement) - } - - rvlen := rv.Len() - for i := 1; i < rvlen; i++ { - elem := rv.Index(i) - switch elemType := tomlTypeOfGo(elem); { - case elemType == nil: - encPanic(errArrayNilElement) - case !typeEqual(firstType, elemType): - encPanic(errArrayMixedElementTypes) - } - } - // If we have a nested array, then we must make sure that the nested - // array contains ONLY primitives. - // This checks arbitrarily nested arrays. - if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) { - nest := tomlArrayType(eindirect(rv.Index(0))) - if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) { - encPanic(errArrayNoTable) - } - } - return firstType -} - -type tagOptions struct { - skip bool // "-" - name string - omitempty bool - omitzero bool -} - -func getOptions(tag reflect.StructTag) tagOptions { - t := tag.Get("toml") - if t == "-" { - return tagOptions{skip: true} - } - var opts tagOptions - parts := strings.Split(t, ",") - opts.name = parts[0] - for _, s := range parts[1:] { - switch s { - case "omitempty": - opts.omitempty = true - case "omitzero": - opts.omitzero = true - } - } - return opts -} - -func isZero(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return rv.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return rv.Uint() == 0 - case reflect.Float32, reflect.Float64: - return rv.Float() == 0.0 - } - return false -} - -func isEmpty(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Array, reflect.Slice, reflect.Map, reflect.String: - return rv.Len() == 0 - case reflect.Bool: - return !rv.Bool() - } - return false -} - -func (enc *Encoder) newline() { - if enc.hasWritten { - enc.wf("\n") - } -} - -func (enc *Encoder) keyEqElement(key Key, val reflect.Value) { - if len(key) == 0 { - encPanic(errNoKey) - } - panicIfInvalidKey(key) - enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) - enc.eElement(val) - enc.newline() -} - -func (enc *Encoder) wf(format string, v ...interface{}) { - if _, err := fmt.Fprintf(enc.w, format, v...); err != nil { - encPanic(err) - } - enc.hasWritten = true -} - -func (enc *Encoder) indentStr(key Key) string { - return strings.Repeat(enc.Indent, len(key)-1) -} - -func encPanic(err error) { - panic(tomlEncodeError{err}) -} - -func eindirect(v reflect.Value) reflect.Value { - switch v.Kind() { - case reflect.Ptr, reflect.Interface: - return eindirect(v.Elem()) - default: - return v - } -} - -func isNil(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return rv.IsNil() - default: - return false - } -} - -func panicIfInvalidKey(key Key) { - for _, k := range key { - if len(k) == 0 { - encPanic(e("Key '%s' is not a valid table name. Key names "+ - "cannot be empty.", key.maybeQuotedAll())) - } - } -} - -func isValidKeyName(s string) bool { - return len(s) != 0 -} diff --git a/vendor/github.com/BurntSushi/toml/encoding_types.go b/vendor/github.com/BurntSushi/toml/encoding_types.go deleted file mode 100644 index d36e1dd6002..00000000000 --- a/vendor/github.com/BurntSushi/toml/encoding_types.go +++ /dev/null @@ -1,19 +0,0 @@ -// +build go1.2 - -package toml - -// In order to support Go 1.1, we define our own TextMarshaler and -// TextUnmarshaler types. For Go 1.2+, we just alias them with the -// standard library interfaces. - -import ( - "encoding" -) - -// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextMarshaler encoding.TextMarshaler - -// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined -// here so that Go 1.1 can be supported. -type TextUnmarshaler encoding.TextUnmarshaler diff --git a/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go b/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go deleted file mode 100644 index e8d503d0469..00000000000 --- a/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go +++ /dev/null @@ -1,18 +0,0 @@ -// +build !go1.2 - -package toml - -// These interfaces were introduced in Go 1.2, so we add them manually when -// compiling for Go 1.1. - -// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextMarshaler interface { - MarshalText() (text []byte, err error) -} - -// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined -// here so that Go 1.1 can be supported. -type TextUnmarshaler interface { - UnmarshalText(text []byte) error -} diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go deleted file mode 100644 index e0a742a8870..00000000000 --- a/vendor/github.com/BurntSushi/toml/lex.go +++ /dev/null @@ -1,953 +0,0 @@ -package toml - -import ( - "fmt" - "strings" - "unicode" - "unicode/utf8" -) - -type itemType int - -const ( - itemError itemType = iota - itemNIL // used in the parser to indicate no type - itemEOF - itemText - itemString - itemRawString - itemMultilineString - itemRawMultilineString - itemBool - itemInteger - itemFloat - itemDatetime - itemArray // the start of an array - itemArrayEnd - itemTableStart - itemTableEnd - itemArrayTableStart - itemArrayTableEnd - itemKeyStart - itemCommentStart - itemInlineTableStart - itemInlineTableEnd -) - -const ( - eof = 0 - comma = ',' - tableStart = '[' - tableEnd = ']' - arrayTableStart = '[' - arrayTableEnd = ']' - tableSep = '.' - keySep = '=' - arrayStart = '[' - arrayEnd = ']' - commentStart = '#' - stringStart = '"' - stringEnd = '"' - rawStringStart = '\'' - rawStringEnd = '\'' - inlineTableStart = '{' - inlineTableEnd = '}' -) - -type stateFn func(lx *lexer) stateFn - -type lexer struct { - input string - start int - pos int - line int - state stateFn - items chan item - - // Allow for backing up up to three runes. - // This is necessary because TOML contains 3-rune tokens (""" and '''). - prevWidths [3]int - nprev int // how many of prevWidths are in use - // If we emit an eof, we can still back up, but it is not OK to call - // next again. - atEOF bool - - // A stack of state functions used to maintain context. - // The idea is to reuse parts of the state machine in various places. - // For example, values can appear at the top level or within arbitrarily - // nested arrays. The last state on the stack is used after a value has - // been lexed. Similarly for comments. - stack []stateFn -} - -type item struct { - typ itemType - val string - line int -} - -func (lx *lexer) nextItem() item { - for { - select { - case item := <-lx.items: - return item - default: - lx.state = lx.state(lx) - } - } -} - -func lex(input string) *lexer { - lx := &lexer{ - input: input, - state: lexTop, - line: 1, - items: make(chan item, 10), - stack: make([]stateFn, 0, 10), - } - return lx -} - -func (lx *lexer) push(state stateFn) { - lx.stack = append(lx.stack, state) -} - -func (lx *lexer) pop() stateFn { - if len(lx.stack) == 0 { - return lx.errorf("BUG in lexer: no states to pop") - } - last := lx.stack[len(lx.stack)-1] - lx.stack = lx.stack[0 : len(lx.stack)-1] - return last -} - -func (lx *lexer) current() string { - return lx.input[lx.start:lx.pos] -} - -func (lx *lexer) emit(typ itemType) { - lx.items <- item{typ, lx.current(), lx.line} - lx.start = lx.pos -} - -func (lx *lexer) emitTrim(typ itemType) { - lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line} - lx.start = lx.pos -} - -func (lx *lexer) next() (r rune) { - if lx.atEOF { - panic("next called after EOF") - } - if lx.pos >= len(lx.input) { - lx.atEOF = true - return eof - } - - if lx.input[lx.pos] == '\n' { - lx.line++ - } - lx.prevWidths[2] = lx.prevWidths[1] - lx.prevWidths[1] = lx.prevWidths[0] - if lx.nprev < 3 { - lx.nprev++ - } - r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) - lx.prevWidths[0] = w - lx.pos += w - return r -} - -// ignore skips over the pending input before this point. -func (lx *lexer) ignore() { - lx.start = lx.pos -} - -// backup steps back one rune. Can be called only twice between calls to next. -func (lx *lexer) backup() { - if lx.atEOF { - lx.atEOF = false - return - } - if lx.nprev < 1 { - panic("backed up too far") - } - w := lx.prevWidths[0] - lx.prevWidths[0] = lx.prevWidths[1] - lx.prevWidths[1] = lx.prevWidths[2] - lx.nprev-- - lx.pos -= w - if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { - lx.line-- - } -} - -// accept consumes the next rune if it's equal to `valid`. -func (lx *lexer) accept(valid rune) bool { - if lx.next() == valid { - return true - } - lx.backup() - return false -} - -// peek returns but does not consume the next rune in the input. -func (lx *lexer) peek() rune { - r := lx.next() - lx.backup() - return r -} - -// skip ignores all input that matches the given predicate. -func (lx *lexer) skip(pred func(rune) bool) { - for { - r := lx.next() - if pred(r) { - continue - } - lx.backup() - lx.ignore() - return - } -} - -// errorf stops all lexing by emitting an error and returning `nil`. -// Note that any value that is a character is escaped if it's a special -// character (newlines, tabs, etc.). -func (lx *lexer) errorf(format string, values ...interface{}) stateFn { - lx.items <- item{ - itemError, - fmt.Sprintf(format, values...), - lx.line, - } - return nil -} - -// lexTop consumes elements at the top level of TOML data. -func lexTop(lx *lexer) stateFn { - r := lx.next() - if isWhitespace(r) || isNL(r) { - return lexSkip(lx, lexTop) - } - switch r { - case commentStart: - lx.push(lexTop) - return lexCommentStart - case tableStart: - return lexTableStart - case eof: - if lx.pos > lx.start { - return lx.errorf("unexpected EOF") - } - lx.emit(itemEOF) - return nil - } - - // At this point, the only valid item can be a key, so we back up - // and let the key lexer do the rest. - lx.backup() - lx.push(lexTopEnd) - return lexKeyStart -} - -// lexTopEnd is entered whenever a top-level item has been consumed. (A value -// or a table.) It must see only whitespace, and will turn back to lexTop -// upon a newline. If it sees EOF, it will quit the lexer successfully. -func lexTopEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case r == commentStart: - // a comment will read to a newline for us. - lx.push(lexTop) - return lexCommentStart - case isWhitespace(r): - return lexTopEnd - case isNL(r): - lx.ignore() - return lexTop - case r == eof: - lx.emit(itemEOF) - return nil - } - return lx.errorf("expected a top-level item to end with a newline, "+ - "comment, or EOF, but got %q instead", r) -} - -// lexTable lexes the beginning of a table. Namely, it makes sure that -// it starts with a character other than '.' and ']'. -// It assumes that '[' has already been consumed. -// It also handles the case that this is an item in an array of tables. -// e.g., '[[name]]'. -func lexTableStart(lx *lexer) stateFn { - if lx.peek() == arrayTableStart { - lx.next() - lx.emit(itemArrayTableStart) - lx.push(lexArrayTableEnd) - } else { - lx.emit(itemTableStart) - lx.push(lexTableEnd) - } - return lexTableNameStart -} - -func lexTableEnd(lx *lexer) stateFn { - lx.emit(itemTableEnd) - return lexTopEnd -} - -func lexArrayTableEnd(lx *lexer) stateFn { - if r := lx.next(); r != arrayTableEnd { - return lx.errorf("expected end of table array name delimiter %q, "+ - "but got %q instead", arrayTableEnd, r) - } - lx.emit(itemArrayTableEnd) - return lexTopEnd -} - -func lexTableNameStart(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.peek(); { - case r == tableEnd || r == eof: - return lx.errorf("unexpected end of table name " + - "(table names cannot be empty)") - case r == tableSep: - return lx.errorf("unexpected table separator " + - "(table names cannot be empty)") - case r == stringStart || r == rawStringStart: - lx.ignore() - lx.push(lexTableNameEnd) - return lexValue // reuse string lexing - default: - return lexBareTableName - } -} - -// lexBareTableName lexes the name of a table. It assumes that at least one -// valid character for the table has already been read. -func lexBareTableName(lx *lexer) stateFn { - r := lx.next() - if isBareKeyChar(r) { - return lexBareTableName - } - lx.backup() - lx.emit(itemText) - return lexTableNameEnd -} - -// lexTableNameEnd reads the end of a piece of a table name, optionally -// consuming whitespace. -func lexTableNameEnd(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.next(); { - case isWhitespace(r): - return lexTableNameEnd - case r == tableSep: - lx.ignore() - return lexTableNameStart - case r == tableEnd: - return lx.pop() - default: - return lx.errorf("expected '.' or ']' to end table name, "+ - "but got %q instead", r) - } -} - -// lexKeyStart consumes a key name up until the first non-whitespace character. -// lexKeyStart will ignore whitespace. -func lexKeyStart(lx *lexer) stateFn { - r := lx.peek() - switch { - case r == keySep: - return lx.errorf("unexpected key separator %q", keySep) - case isWhitespace(r) || isNL(r): - lx.next() - return lexSkip(lx, lexKeyStart) - case r == stringStart || r == rawStringStart: - lx.ignore() - lx.emit(itemKeyStart) - lx.push(lexKeyEnd) - return lexValue // reuse string lexing - default: - lx.ignore() - lx.emit(itemKeyStart) - return lexBareKey - } -} - -// lexBareKey consumes the text of a bare key. Assumes that the first character -// (which is not whitespace) has not yet been consumed. -func lexBareKey(lx *lexer) stateFn { - switch r := lx.next(); { - case isBareKeyChar(r): - return lexBareKey - case isWhitespace(r): - lx.backup() - lx.emit(itemText) - return lexKeyEnd - case r == keySep: - lx.backup() - lx.emit(itemText) - return lexKeyEnd - default: - return lx.errorf("bare keys cannot contain %q", r) - } -} - -// lexKeyEnd consumes the end of a key and trims whitespace (up to the key -// separator). -func lexKeyEnd(lx *lexer) stateFn { - switch r := lx.next(); { - case r == keySep: - return lexSkip(lx, lexValue) - case isWhitespace(r): - return lexSkip(lx, lexKeyEnd) - default: - return lx.errorf("expected key separator %q, but got %q instead", - keySep, r) - } -} - -// lexValue starts the consumption of a value anywhere a value is expected. -// lexValue will ignore whitespace. -// After a value is lexed, the last state on the next is popped and returned. -func lexValue(lx *lexer) stateFn { - // We allow whitespace to precede a value, but NOT newlines. - // In array syntax, the array states are responsible for ignoring newlines. - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexValue) - case isDigit(r): - lx.backup() // avoid an extra state and use the same as above - return lexNumberOrDateStart - } - switch r { - case arrayStart: - lx.ignore() - lx.emit(itemArray) - return lexArrayValue - case inlineTableStart: - lx.ignore() - lx.emit(itemInlineTableStart) - return lexInlineTableValue - case stringStart: - if lx.accept(stringStart) { - if lx.accept(stringStart) { - lx.ignore() // Ignore """ - return lexMultilineString - } - lx.backup() - } - lx.ignore() // ignore the '"' - return lexString - case rawStringStart: - if lx.accept(rawStringStart) { - if lx.accept(rawStringStart) { - lx.ignore() // Ignore """ - return lexMultilineRawString - } - lx.backup() - } - lx.ignore() // ignore the "'" - return lexRawString - case '+', '-': - return lexNumberStart - case '.': // special error case, be kind to users - return lx.errorf("floats must start with a digit, not '.'") - } - if unicode.IsLetter(r) { - // Be permissive here; lexBool will give a nice error if the - // user wrote something like - // x = foo - // (i.e. not 'true' or 'false' but is something else word-like.) - lx.backup() - return lexBool - } - return lx.errorf("expected value but found %q instead", r) -} - -// lexArrayValue consumes one value in an array. It assumes that '[' or ',' -// have already been consumed. All whitespace and newlines are ignored. -func lexArrayValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValue) - case r == commentStart: - lx.push(lexArrayValue) - return lexCommentStart - case r == comma: - return lx.errorf("unexpected comma") - case r == arrayEnd: - // NOTE(caleb): The spec isn't clear about whether you can have - // a trailing comma or not, so we'll allow it. - return lexArrayEnd - } - - lx.backup() - lx.push(lexArrayValueEnd) - return lexValue -} - -// lexArrayValueEnd consumes everything between the end of an array value and -// the next value (or the end of the array): it ignores whitespace and newlines -// and expects either a ',' or a ']'. -func lexArrayValueEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValueEnd) - case r == commentStart: - lx.push(lexArrayValueEnd) - return lexCommentStart - case r == comma: - lx.ignore() - return lexArrayValue // move on to the next value - case r == arrayEnd: - return lexArrayEnd - } - return lx.errorf( - "expected a comma or array terminator %q, but got %q instead", - arrayEnd, r, - ) -} - -// lexArrayEnd finishes the lexing of an array. -// It assumes that a ']' has just been consumed. -func lexArrayEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemArrayEnd) - return lx.pop() -} - -// lexInlineTableValue consumes one key/value pair in an inline table. -// It assumes that '{' or ',' have already been consumed. Whitespace is ignored. -func lexInlineTableValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexInlineTableValue) - case isNL(r): - return lx.errorf("newlines not allowed within inline tables") - case r == commentStart: - lx.push(lexInlineTableValue) - return lexCommentStart - case r == comma: - return lx.errorf("unexpected comma") - case r == inlineTableEnd: - return lexInlineTableEnd - } - lx.backup() - lx.push(lexInlineTableValueEnd) - return lexKeyStart -} - -// lexInlineTableValueEnd consumes everything between the end of an inline table -// key/value pair and the next pair (or the end of the table): -// it ignores whitespace and expects either a ',' or a '}'. -func lexInlineTableValueEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexInlineTableValueEnd) - case isNL(r): - return lx.errorf("newlines not allowed within inline tables") - case r == commentStart: - lx.push(lexInlineTableValueEnd) - return lexCommentStart - case r == comma: - lx.ignore() - return lexInlineTableValue - case r == inlineTableEnd: - return lexInlineTableEnd - } - return lx.errorf("expected a comma or an inline table terminator %q, "+ - "but got %q instead", inlineTableEnd, r) -} - -// lexInlineTableEnd finishes the lexing of an inline table. -// It assumes that a '}' has just been consumed. -func lexInlineTableEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemInlineTableEnd) - return lx.pop() -} - -// lexString consumes the inner contents of a string. It assumes that the -// beginning '"' has already been consumed and ignored. -func lexString(lx *lexer) stateFn { - r := lx.next() - switch { - case r == eof: - return lx.errorf("unexpected EOF") - case isNL(r): - return lx.errorf("strings cannot contain newlines") - case r == '\\': - lx.push(lexString) - return lexStringEscape - case r == stringEnd: - lx.backup() - lx.emit(itemString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexString -} - -// lexMultilineString consumes the inner contents of a string. It assumes that -// the beginning '"""' has already been consumed and ignored. -func lexMultilineString(lx *lexer) stateFn { - switch lx.next() { - case eof: - return lx.errorf("unexpected EOF") - case '\\': - return lexMultilineStringEscape - case stringEnd: - if lx.accept(stringEnd) { - if lx.accept(stringEnd) { - lx.backup() - lx.backup() - lx.backup() - lx.emit(itemMultilineString) - lx.next() - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - lx.backup() - } - } - return lexMultilineString -} - -// lexRawString consumes a raw string. Nothing can be escaped in such a string. -// It assumes that the beginning "'" has already been consumed and ignored. -func lexRawString(lx *lexer) stateFn { - r := lx.next() - switch { - case r == eof: - return lx.errorf("unexpected EOF") - case isNL(r): - return lx.errorf("strings cannot contain newlines") - case r == rawStringEnd: - lx.backup() - lx.emit(itemRawString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexRawString -} - -// lexMultilineRawString consumes a raw string. Nothing can be escaped in such -// a string. It assumes that the beginning "'''" has already been consumed and -// ignored. -func lexMultilineRawString(lx *lexer) stateFn { - switch lx.next() { - case eof: - return lx.errorf("unexpected EOF") - case rawStringEnd: - if lx.accept(rawStringEnd) { - if lx.accept(rawStringEnd) { - lx.backup() - lx.backup() - lx.backup() - lx.emit(itemRawMultilineString) - lx.next() - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - lx.backup() - } - } - return lexMultilineRawString -} - -// lexMultilineStringEscape consumes an escaped character. It assumes that the -// preceding '\\' has already been consumed. -func lexMultilineStringEscape(lx *lexer) stateFn { - // Handle the special case first: - if isNL(lx.next()) { - return lexMultilineString - } - lx.backup() - lx.push(lexMultilineString) - return lexStringEscape(lx) -} - -func lexStringEscape(lx *lexer) stateFn { - r := lx.next() - switch r { - case 'b': - fallthrough - case 't': - fallthrough - case 'n': - fallthrough - case 'f': - fallthrough - case 'r': - fallthrough - case '"': - fallthrough - case '\\': - return lx.pop() - case 'u': - return lexShortUnicodeEscape - case 'U': - return lexLongUnicodeEscape - } - return lx.errorf("invalid escape character %q; only the following "+ - "escape characters are allowed: "+ - `\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r) -} - -func lexShortUnicodeEscape(lx *lexer) stateFn { - var r rune - for i := 0; i < 4; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf(`expected four hexadecimal digits after '\u', `+ - "but got %q instead", lx.current()) - } - } - return lx.pop() -} - -func lexLongUnicodeEscape(lx *lexer) stateFn { - var r rune - for i := 0; i < 8; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf(`expected eight hexadecimal digits after '\U', `+ - "but got %q instead", lx.current()) - } - } - return lx.pop() -} - -// lexNumberOrDateStart consumes either an integer, a float, or datetime. -func lexNumberOrDateStart(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexNumberOrDate - } - switch r { - case '_': - return lexNumber - case 'e', 'E': - return lexFloat - case '.': - return lx.errorf("floats must start with a digit, not '.'") - } - return lx.errorf("expected a digit but got %q", r) -} - -// lexNumberOrDate consumes either an integer, float or datetime. -func lexNumberOrDate(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexNumberOrDate - } - switch r { - case '-': - return lexDatetime - case '_': - return lexNumber - case '.', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexDatetime consumes a Datetime, to a first approximation. -// The parser validates that it matches one of the accepted formats. -func lexDatetime(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexDatetime - } - switch r { - case '-', 'T', ':', '.', 'Z', '+': - return lexDatetime - } - - lx.backup() - lx.emit(itemDatetime) - return lx.pop() -} - -// lexNumberStart consumes either an integer or a float. It assumes that a sign -// has already been read, but that *no* digits have been consumed. -// lexNumberStart will move to the appropriate integer or float states. -func lexNumberStart(lx *lexer) stateFn { - // We MUST see a digit. Even floats have to start with a digit. - r := lx.next() - if !isDigit(r) { - if r == '.' { - return lx.errorf("floats must start with a digit, not '.'") - } - return lx.errorf("expected a digit but got %q", r) - } - return lexNumber -} - -// lexNumber consumes an integer or a float after seeing the first digit. -func lexNumber(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexNumber - } - switch r { - case '_': - return lexNumber - case '.', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexFloat consumes the elements of a float. It allows any sequence of -// float-like characters, so floats emitted by the lexer are only a first -// approximation and must be validated by the parser. -func lexFloat(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexFloat - } - switch r { - case '_', '.', '-', '+', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemFloat) - return lx.pop() -} - -// lexBool consumes a bool string: 'true' or 'false. -func lexBool(lx *lexer) stateFn { - var rs []rune - for { - r := lx.next() - if !unicode.IsLetter(r) { - lx.backup() - break - } - rs = append(rs, r) - } - s := string(rs) - switch s { - case "true", "false": - lx.emit(itemBool) - return lx.pop() - } - return lx.errorf("expected value but found %q instead", s) -} - -// lexCommentStart begins the lexing of a comment. It will emit -// itemCommentStart and consume no characters, passing control to lexComment. -func lexCommentStart(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemCommentStart) - return lexComment -} - -// lexComment lexes an entire comment. It assumes that '#' has been consumed. -// It will consume *up to* the first newline character, and pass control -// back to the last state on the stack. -func lexComment(lx *lexer) stateFn { - r := lx.peek() - if isNL(r) || r == eof { - lx.emit(itemText) - return lx.pop() - } - lx.next() - return lexComment -} - -// lexSkip ignores all slurped input and moves on to the next state. -func lexSkip(lx *lexer, nextState stateFn) stateFn { - return func(lx *lexer) stateFn { - lx.ignore() - return nextState - } -} - -// isWhitespace returns true if `r` is a whitespace character according -// to the spec. -func isWhitespace(r rune) bool { - return r == '\t' || r == ' ' -} - -func isNL(r rune) bool { - return r == '\n' || r == '\r' -} - -func isDigit(r rune) bool { - return r >= '0' && r <= '9' -} - -func isHexadecimal(r rune) bool { - return (r >= '0' && r <= '9') || - (r >= 'a' && r <= 'f') || - (r >= 'A' && r <= 'F') -} - -func isBareKeyChar(r rune) bool { - return (r >= 'A' && r <= 'Z') || - (r >= 'a' && r <= 'z') || - (r >= '0' && r <= '9') || - r == '_' || - r == '-' -} - -func (itype itemType) String() string { - switch itype { - case itemError: - return "Error" - case itemNIL: - return "NIL" - case itemEOF: - return "EOF" - case itemText: - return "Text" - case itemString, itemRawString, itemMultilineString, itemRawMultilineString: - return "String" - case itemBool: - return "Bool" - case itemInteger: - return "Integer" - case itemFloat: - return "Float" - case itemDatetime: - return "DateTime" - case itemTableStart: - return "TableStart" - case itemTableEnd: - return "TableEnd" - case itemKeyStart: - return "KeyStart" - case itemArray: - return "Array" - case itemArrayEnd: - return "ArrayEnd" - case itemCommentStart: - return "CommentStart" - } - panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) -} - -func (item item) String() string { - return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) -} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go deleted file mode 100644 index 50869ef9266..00000000000 --- a/vendor/github.com/BurntSushi/toml/parse.go +++ /dev/null @@ -1,592 +0,0 @@ -package toml - -import ( - "fmt" - "strconv" - "strings" - "time" - "unicode" - "unicode/utf8" -) - -type parser struct { - mapping map[string]interface{} - types map[string]tomlType - lx *lexer - - // A list of keys in the order that they appear in the TOML data. - ordered []Key - - // the full key for the current hash in scope - context Key - - // the base key name for everything except hashes - currentKey string - - // rough approximation of line number - approxLine int - - // A map of 'key.group.names' to whether they were created implicitly. - implicits map[string]bool -} - -type parseError string - -func (pe parseError) Error() string { - return string(pe) -} - -func parse(data string) (p *parser, err error) { - defer func() { - if r := recover(); r != nil { - var ok bool - if err, ok = r.(parseError); ok { - return - } - panic(r) - } - }() - - p = &parser{ - mapping: make(map[string]interface{}), - types: make(map[string]tomlType), - lx: lex(data), - ordered: make([]Key, 0), - implicits: make(map[string]bool), - } - for { - item := p.next() - if item.typ == itemEOF { - break - } - p.topLevel(item) - } - - return p, nil -} - -func (p *parser) panicf(format string, v ...interface{}) { - msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s", - p.approxLine, p.current(), fmt.Sprintf(format, v...)) - panic(parseError(msg)) -} - -func (p *parser) next() item { - it := p.lx.nextItem() - if it.typ == itemError { - p.panicf("%s", it.val) - } - return it -} - -func (p *parser) bug(format string, v ...interface{}) { - panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) -} - -func (p *parser) expect(typ itemType) item { - it := p.next() - p.assertEqual(typ, it.typ) - return it -} - -func (p *parser) assertEqual(expected, got itemType) { - if expected != got { - p.bug("Expected '%s' but got '%s'.", expected, got) - } -} - -func (p *parser) topLevel(item item) { - switch item.typ { - case itemCommentStart: - p.approxLine = item.line - p.expect(itemText) - case itemTableStart: - kg := p.next() - p.approxLine = kg.line - - var key Key - for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() { - key = append(key, p.keyString(kg)) - } - p.assertEqual(itemTableEnd, kg.typ) - - p.establishContext(key, false) - p.setType("", tomlHash) - p.ordered = append(p.ordered, key) - case itemArrayTableStart: - kg := p.next() - p.approxLine = kg.line - - var key Key - for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() { - key = append(key, p.keyString(kg)) - } - p.assertEqual(itemArrayTableEnd, kg.typ) - - p.establishContext(key, true) - p.setType("", tomlArrayHash) - p.ordered = append(p.ordered, key) - case itemKeyStart: - kname := p.next() - p.approxLine = kname.line - p.currentKey = p.keyString(kname) - - val, typ := p.value(p.next()) - p.setValue(p.currentKey, val) - p.setType(p.currentKey, typ) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - p.currentKey = "" - default: - p.bug("Unexpected type at top level: %s", item.typ) - } -} - -// Gets a string for a key (or part of a key in a table name). -func (p *parser) keyString(it item) string { - switch it.typ { - case itemText: - return it.val - case itemString, itemMultilineString, - itemRawString, itemRawMultilineString: - s, _ := p.value(it) - return s.(string) - default: - p.bug("Unexpected key type: %s", it.typ) - panic("unreachable") - } -} - -// value translates an expected value from the lexer into a Go value wrapped -// as an empty interface. -func (p *parser) value(it item) (interface{}, tomlType) { - switch it.typ { - case itemString: - return p.replaceEscapes(it.val), p.typeOfPrimitive(it) - case itemMultilineString: - trimmed := stripFirstNewline(stripEscapedWhitespace(it.val)) - return p.replaceEscapes(trimmed), p.typeOfPrimitive(it) - case itemRawString: - return it.val, p.typeOfPrimitive(it) - case itemRawMultilineString: - return stripFirstNewline(it.val), p.typeOfPrimitive(it) - case itemBool: - switch it.val { - case "true": - return true, p.typeOfPrimitive(it) - case "false": - return false, p.typeOfPrimitive(it) - } - p.bug("Expected boolean value, but got '%s'.", it.val) - case itemInteger: - if !numUnderscoresOK(it.val) { - p.panicf("Invalid integer %q: underscores must be surrounded by digits", - it.val) - } - val := strings.Replace(it.val, "_", "", -1) - num, err := strconv.ParseInt(val, 10, 64) - if err != nil { - // Distinguish integer values. Normally, it'd be a bug if the lexer - // provides an invalid integer, but it's possible that the number is - // out of range of valid values (which the lexer cannot determine). - // So mark the former as a bug but the latter as a legitimate user - // error. - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panicf("Integer '%s' is out of the range of 64-bit "+ - "signed integers.", it.val) - } else { - p.bug("Expected integer value, but got '%s'.", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemFloat: - parts := strings.FieldsFunc(it.val, func(r rune) bool { - switch r { - case '.', 'e', 'E': - return true - } - return false - }) - for _, part := range parts { - if !numUnderscoresOK(part) { - p.panicf("Invalid float %q: underscores must be "+ - "surrounded by digits", it.val) - } - } - if !numPeriodsOK(it.val) { - // As a special case, numbers like '123.' or '1.e2', - // which are valid as far as Go/strconv are concerned, - // must be rejected because TOML says that a fractional - // part consists of '.' followed by 1+ digits. - p.panicf("Invalid float %q: '.' must be followed "+ - "by one or more digits", it.val) - } - val := strings.Replace(it.val, "_", "", -1) - num, err := strconv.ParseFloat(val, 64) - if err != nil { - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panicf("Float '%s' is out of the range of 64-bit "+ - "IEEE-754 floating-point numbers.", it.val) - } else { - p.panicf("Invalid float value: %q", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemDatetime: - var t time.Time - var ok bool - var err error - for _, format := range []string{ - "2006-01-02T15:04:05Z07:00", - "2006-01-02T15:04:05", - "2006-01-02", - } { - t, err = time.ParseInLocation(format, it.val, time.Local) - if err == nil { - ok = true - break - } - } - if !ok { - p.panicf("Invalid TOML Datetime: %q.", it.val) - } - return t, p.typeOfPrimitive(it) - case itemArray: - array := make([]interface{}, 0) - types := make([]tomlType, 0) - - for it = p.next(); it.typ != itemArrayEnd; it = p.next() { - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - val, typ := p.value(it) - array = append(array, val) - types = append(types, typ) - } - return array, p.typeOfArray(types) - case itemInlineTableStart: - var ( - hash = make(map[string]interface{}) - outerContext = p.context - outerKey = p.currentKey - ) - - p.context = append(p.context, p.currentKey) - p.currentKey = "" - for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() { - if it.typ != itemKeyStart { - p.bug("Expected key start but instead found %q, around line %d", - it.val, p.approxLine) - } - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - // retrieve key - k := p.next() - p.approxLine = k.line - kname := p.keyString(k) - - // retrieve value - p.currentKey = kname - val, typ := p.value(p.next()) - // make sure we keep metadata up to date - p.setType(kname, typ) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - hash[kname] = val - } - p.context = outerContext - p.currentKey = outerKey - return hash, tomlHash - } - p.bug("Unexpected value type: %s", it.typ) - panic("unreachable") -} - -// numUnderscoresOK checks whether each underscore in s is surrounded by -// characters that are not underscores. -func numUnderscoresOK(s string) bool { - accept := false - for _, r := range s { - if r == '_' { - if !accept { - return false - } - accept = false - continue - } - accept = true - } - return accept -} - -// numPeriodsOK checks whether every period in s is followed by a digit. -func numPeriodsOK(s string) bool { - period := false - for _, r := range s { - if period && !isDigit(r) { - return false - } - period = r == '.' - } - return !period -} - -// establishContext sets the current context of the parser, -// where the context is either a hash or an array of hashes. Which one is -// set depends on the value of the `array` parameter. -// -// Establishing the context also makes sure that the key isn't a duplicate, and -// will create implicit hashes automatically. -func (p *parser) establishContext(key Key, array bool) { - var ok bool - - // Always start at the top level and drill down for our context. - hashContext := p.mapping - keyContext := make(Key, 0) - - // We only need implicit hashes for key[0:-1] - for _, k := range key[0 : len(key)-1] { - _, ok = hashContext[k] - keyContext = append(keyContext, k) - - // No key? Make an implicit hash and move on. - if !ok { - p.addImplicit(keyContext) - hashContext[k] = make(map[string]interface{}) - } - - // If the hash context is actually an array of tables, then set - // the hash context to the last element in that array. - // - // Otherwise, it better be a table, since this MUST be a key group (by - // virtue of it not being the last element in a key). - switch t := hashContext[k].(type) { - case []map[string]interface{}: - hashContext = t[len(t)-1] - case map[string]interface{}: - hashContext = t - default: - p.panicf("Key '%s' was already created as a hash.", keyContext) - } - } - - p.context = keyContext - if array { - // If this is the first element for this array, then allocate a new - // list of tables for it. - k := key[len(key)-1] - if _, ok := hashContext[k]; !ok { - hashContext[k] = make([]map[string]interface{}, 0, 5) - } - - // Add a new table. But make sure the key hasn't already been used - // for something else. - if hash, ok := hashContext[k].([]map[string]interface{}); ok { - hashContext[k] = append(hash, make(map[string]interface{})) - } else { - p.panicf("Key '%s' was already created and cannot be used as "+ - "an array.", keyContext) - } - } else { - p.setValue(key[len(key)-1], make(map[string]interface{})) - } - p.context = append(p.context, key[len(key)-1]) -} - -// setValue sets the given key to the given value in the current context. -// It will make sure that the key hasn't already been defined, account for -// implicit key groups. -func (p *parser) setValue(key string, value interface{}) { - var tmpHash interface{} - var ok bool - - hash := p.mapping - keyContext := make(Key, 0) - for _, k := range p.context { - keyContext = append(keyContext, k) - if tmpHash, ok = hash[k]; !ok { - p.bug("Context for key '%s' has not been established.", keyContext) - } - switch t := tmpHash.(type) { - case []map[string]interface{}: - // The context is a table of hashes. Pick the most recent table - // defined as the current hash. - hash = t[len(t)-1] - case map[string]interface{}: - hash = t - default: - p.bug("Expected hash to have type 'map[string]interface{}', but "+ - "it has '%T' instead.", tmpHash) - } - } - keyContext = append(keyContext, key) - - if _, ok := hash[key]; ok { - // Typically, if the given key has already been set, then we have - // to raise an error since duplicate keys are disallowed. However, - // it's possible that a key was previously defined implicitly. In this - // case, it is allowed to be redefined concretely. (See the - // `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.) - // - // But we have to make sure to stop marking it as an implicit. (So that - // another redefinition provokes an error.) - // - // Note that since it has already been defined (as a hash), we don't - // want to overwrite it. So our business is done. - if p.isImplicit(keyContext) { - p.removeImplicit(keyContext) - return - } - - // Otherwise, we have a concrete key trying to override a previous - // key, which is *always* wrong. - p.panicf("Key '%s' has already been defined.", keyContext) - } - hash[key] = value -} - -// setType sets the type of a particular value at a given key. -// It should be called immediately AFTER setValue. -// -// Note that if `key` is empty, then the type given will be applied to the -// current context (which is either a table or an array of tables). -func (p *parser) setType(key string, typ tomlType) { - keyContext := make(Key, 0, len(p.context)+1) - for _, k := range p.context { - keyContext = append(keyContext, k) - } - if len(key) > 0 { // allow type setting for hashes - keyContext = append(keyContext, key) - } - p.types[keyContext.String()] = typ -} - -// addImplicit sets the given Key as having been created implicitly. -func (p *parser) addImplicit(key Key) { - p.implicits[key.String()] = true -} - -// removeImplicit stops tagging the given key as having been implicitly -// created. -func (p *parser) removeImplicit(key Key) { - p.implicits[key.String()] = false -} - -// isImplicit returns true if the key group pointed to by the key was created -// implicitly. -func (p *parser) isImplicit(key Key) bool { - return p.implicits[key.String()] -} - -// current returns the full key name of the current context. -func (p *parser) current() string { - if len(p.currentKey) == 0 { - return p.context.String() - } - if len(p.context) == 0 { - return p.currentKey - } - return fmt.Sprintf("%s.%s", p.context, p.currentKey) -} - -func stripFirstNewline(s string) string { - if len(s) == 0 || s[0] != '\n' { - return s - } - return s[1:] -} - -func stripEscapedWhitespace(s string) string { - esc := strings.Split(s, "\\\n") - if len(esc) > 1 { - for i := 1; i < len(esc); i++ { - esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace) - } - } - return strings.Join(esc, "") -} - -func (p *parser) replaceEscapes(str string) string { - var replaced []rune - s := []byte(str) - r := 0 - for r < len(s) { - if s[r] != '\\' { - c, size := utf8.DecodeRune(s[r:]) - r += size - replaced = append(replaced, c) - continue - } - r += 1 - if r >= len(s) { - p.bug("Escape sequence at end of string.") - return "" - } - switch s[r] { - default: - p.bug("Expected valid escape code after \\, but got %q.", s[r]) - return "" - case 'b': - replaced = append(replaced, rune(0x0008)) - r += 1 - case 't': - replaced = append(replaced, rune(0x0009)) - r += 1 - case 'n': - replaced = append(replaced, rune(0x000A)) - r += 1 - case 'f': - replaced = append(replaced, rune(0x000C)) - r += 1 - case 'r': - replaced = append(replaced, rune(0x000D)) - r += 1 - case '"': - replaced = append(replaced, rune(0x0022)) - r += 1 - case '\\': - replaced = append(replaced, rune(0x005C)) - r += 1 - case 'u': - // At this point, we know we have a Unicode escape of the form - // `uXXXX` at [r, r+5). (Because the lexer guarantees this - // for us.) - escaped := p.asciiEscapeToUnicode(s[r+1 : r+5]) - replaced = append(replaced, escaped) - r += 5 - case 'U': - // At this point, we know we have a Unicode escape of the form - // `uXXXX` at [r, r+9). (Because the lexer guarantees this - // for us.) - escaped := p.asciiEscapeToUnicode(s[r+1 : r+9]) - replaced = append(replaced, escaped) - r += 9 - } - } - return string(replaced) -} - -func (p *parser) asciiEscapeToUnicode(bs []byte) rune { - s := string(bs) - hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) - if err != nil { - p.bug("Could not parse '%s' as a hexadecimal number, but the "+ - "lexer claims it's OK: %s", s, err) - } - if !utf8.ValidRune(rune(hex)) { - p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s) - } - return rune(hex) -} - -func isStringType(ty itemType) bool { - return ty == itemString || ty == itemMultilineString || - ty == itemRawString || ty == itemRawMultilineString -} diff --git a/vendor/github.com/BurntSushi/toml/session.vim b/vendor/github.com/BurntSushi/toml/session.vim deleted file mode 100644 index 562164be060..00000000000 --- a/vendor/github.com/BurntSushi/toml/session.vim +++ /dev/null @@ -1 +0,0 @@ -au BufWritePost *.go silent!make tags > /dev/null 2>&1 diff --git a/vendor/github.com/BurntSushi/toml/type_check.go b/vendor/github.com/BurntSushi/toml/type_check.go deleted file mode 100644 index c73f8afc1a6..00000000000 --- a/vendor/github.com/BurntSushi/toml/type_check.go +++ /dev/null @@ -1,91 +0,0 @@ -package toml - -// tomlType represents any Go type that corresponds to a TOML type. -// While the first draft of the TOML spec has a simplistic type system that -// probably doesn't need this level of sophistication, we seem to be militating -// toward adding real composite types. -type tomlType interface { - typeString() string -} - -// typeEqual accepts any two types and returns true if they are equal. -func typeEqual(t1, t2 tomlType) bool { - if t1 == nil || t2 == nil { - return false - } - return t1.typeString() == t2.typeString() -} - -func typeIsHash(t tomlType) bool { - return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) -} - -type tomlBaseType string - -func (btype tomlBaseType) typeString() string { - return string(btype) -} - -func (btype tomlBaseType) String() string { - return btype.typeString() -} - -var ( - tomlInteger tomlBaseType = "Integer" - tomlFloat tomlBaseType = "Float" - tomlDatetime tomlBaseType = "Datetime" - tomlString tomlBaseType = "String" - tomlBool tomlBaseType = "Bool" - tomlArray tomlBaseType = "Array" - tomlHash tomlBaseType = "Hash" - tomlArrayHash tomlBaseType = "ArrayHash" -) - -// typeOfPrimitive returns a tomlType of any primitive value in TOML. -// Primitive values are: Integer, Float, Datetime, String and Bool. -// -// Passing a lexer item other than the following will cause a BUG message -// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. -func (p *parser) typeOfPrimitive(lexItem item) tomlType { - switch lexItem.typ { - case itemInteger: - return tomlInteger - case itemFloat: - return tomlFloat - case itemDatetime: - return tomlDatetime - case itemString: - return tomlString - case itemMultilineString: - return tomlString - case itemRawString: - return tomlString - case itemRawMultilineString: - return tomlString - case itemBool: - return tomlBool - } - p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) - panic("unreachable") -} - -// typeOfArray returns a tomlType for an array given a list of types of its -// values. -// -// In the current spec, if an array is homogeneous, then its type is always -// "Array". If the array is not homogeneous, an error is generated. -func (p *parser) typeOfArray(types []tomlType) tomlType { - // Empty arrays are cool. - if len(types) == 0 { - return tomlArray - } - - theType := types[0] - for _, t := range types[1:] { - if !typeEqual(theType, t) { - p.panicf("Array contains values of type '%s' and '%s', but "+ - "arrays must be homogeneous.", theType, t) - } - } - return tomlArray -} diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go deleted file mode 100644 index 608997c22f6..00000000000 --- a/vendor/github.com/BurntSushi/toml/type_fields.go +++ /dev/null @@ -1,242 +0,0 @@ -package toml - -// Struct field handling is adapted from code in encoding/json: -// -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the Go distribution. - -import ( - "reflect" - "sort" - "sync" -) - -// A field represents a single field found in a struct. -type field struct { - name string // the name of the field (`toml` tag included) - tag bool // whether field has a `toml` tag - index []int // represents the depth of an anonymous field - typ reflect.Type // the type of the field -} - -// byName sorts field by name, breaking ties with depth, -// then breaking ties with "name came from toml tag", then -// breaking ties with index sequence. -type byName []field - -func (x byName) Len() int { return len(x) } - -func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byName) Less(i, j int) bool { - if x[i].name != x[j].name { - return x[i].name < x[j].name - } - if len(x[i].index) != len(x[j].index) { - return len(x[i].index) < len(x[j].index) - } - if x[i].tag != x[j].tag { - return x[i].tag - } - return byIndex(x).Less(i, j) -} - -// byIndex sorts field by index sequence. -type byIndex []field - -func (x byIndex) Len() int { return len(x) } - -func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byIndex) Less(i, j int) bool { - for k, xik := range x[i].index { - if k >= len(x[j].index) { - return false - } - if xik != x[j].index[k] { - return xik < x[j].index[k] - } - } - return len(x[i].index) < len(x[j].index) -} - -// typeFields returns a list of fields that TOML should recognize for the given -// type. The algorithm is breadth-first search over the set of structs to -// include - the top struct and then any reachable anonymous structs. -func typeFields(t reflect.Type) []field { - // Anonymous fields to explore at the current level and the next. - current := []field{} - next := []field{{typ: t}} - - // Count of queued names for current level and the next. - count := map[reflect.Type]int{} - nextCount := map[reflect.Type]int{} - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []field - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.typ] { - continue - } - visited[f.typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.typ.NumField(); i++ { - sf := f.typ.Field(i) - if sf.PkgPath != "" && !sf.Anonymous { // unexported - continue - } - opts := getOptions(sf.Tag) - if opts.skip { - continue - } - index := make([]int, len(f.index)+1) - copy(index, f.index) - index[len(f.index)] = i - - ft := sf.Type - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := opts.name != "" - name := opts.name - if name == "" { - name = sf.Name - } - fields = append(fields, field{name, tagged, index, ft}) - if count[f.typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - f := field{name: ft.Name(), index: index, typ: ft} - next = append(next, f) - } - } - } - } - - sort.Sort(byName(fields)) - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with TOML tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.name - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.name != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - sort.Sort(byIndex(fields)) - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// TOML tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []field) (field, bool) { - // The fields are sorted in increasing index-length order. The winner - // must therefore be one with the shortest index length. Drop all - // longer entries, which is easy: just truncate the slice. - length := len(fields[0].index) - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if len(f.index) > length { - fields = fields[:i] - break - } - if f.tag { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return field{}, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return field{}, false - } - return fields[0], true -} - -var fieldCache struct { - sync.RWMutex - m map[reflect.Type][]field -} - -// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. -func cachedTypeFields(t reflect.Type) []field { - fieldCache.RLock() - f := fieldCache.m[t] - fieldCache.RUnlock() - if f != nil { - return f - } - - // Compute fields without lock. - // Might duplicate effort but won't hold other computations back. - f = typeFields(t) - if f == nil { - f = []field{} - } - - fieldCache.Lock() - if fieldCache.m == nil { - fieldCache.m = map[reflect.Type][]field{} - } - fieldCache.m[t] = f - fieldCache.Unlock() - return f -} diff --git a/vendor/github.com/Jeffail/gabs/LICENSE b/vendor/github.com/Jeffail/gabs/LICENSE deleted file mode 100644 index 99a62c6298f..00000000000 --- a/vendor/github.com/Jeffail/gabs/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2014 Ashley Jeffs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/Jeffail/gabs/README.md b/vendor/github.com/Jeffail/gabs/README.md deleted file mode 100644 index 962ba686483..00000000000 --- a/vendor/github.com/Jeffail/gabs/README.md +++ /dev/null @@ -1,290 +0,0 @@ -![Gabs](gabs_logo.png "Gabs") - -Gabs is a small utility for dealing with dynamic or unknown JSON structures in -golang. It's pretty much just a helpful wrapper around the golang -`json.Marshal/json.Unmarshal` behaviour and `map[string]interface{}` objects. -It does nothing spectacular except for being fabulous. - -https://godoc.org/github.com/Jeffail/gabs - -## Install - -``` bash -go get github.com/Jeffail/gabs -``` - -## Use - -### Parsing and searching JSON - -``` go -jsonParsed, err := gabs.ParseJSON([]byte(`{ - "outter":{ - "inner":{ - "value1":10, - "value2":22 - }, - "alsoInner":{ - "value1":20, - "array1":[ - 30, 40 - ] - } - } -}`)) - -var value float64 -var ok bool - -value, ok = jsonParsed.Path("outter.inner.value1").Data().(float64) -// value == 10.0, ok == true - -value, ok = jsonParsed.Search("outter", "inner", "value1").Data().(float64) -// value == 10.0, ok == true - -gObj, err := jsonParsed.JSONPointer("/outter/alsoInner/array1/1") -if err != nil { - panic(err) -} -value, ok = gObj.Data().(float64) -// value == 40.0, ok == true - -value, ok = jsonParsed.Path("does.not.exist").Data().(float64) -// value == 0.0, ok == false - -exists := jsonParsed.Exists("outter", "inner", "value1") -// exists == true - -exists := jsonParsed.ExistsP("does.not.exist") -// exists == false -``` - -### Iterating objects - -``` go -jsonParsed, _ := gabs.ParseJSON([]byte(`{"object":{ "first": 1, "second": 2, "third": 3 }}`)) - -// S is shorthand for Search -children, _ := jsonParsed.S("object").ChildrenMap() -for key, child := range children { - fmt.Printf("key: %v, value: %v\n", key, child.Data().(string)) -} -``` - -### Iterating arrays - -``` go -jsonParsed, err := gabs.ParseJSON([]byte(`{"array":[ "first", "second", "third" ]}`)) -if err != nil { - panic(err) -} - -// S is shorthand for Search -children, err := jsonParsed.S("array").Children() -if err != nil { - panic(err) -} - -for _, child := range children { - fmt.Println(child.Data().(string)) -} -``` - -Will print: - -``` -first -second -third -``` - -Children() will return all children of an array in order. This also works on -objects, however, the children will be returned in a random order. - -### Searching through arrays - -If your JSON structure contains arrays you can still search the fields of the -objects within the array, this returns a JSON array containing the results for -each element. - -``` go -jsonParsed, err := gabs.ParseJSON([]byte(`{"array":[ {"value":1}, {"value":2}, {"value":3} ]}`)) -if err != nil { - panic(err) -} -fmt.Println(jsonParsed.Path("array.value").String()) -``` - -Will print: - -``` -[1,2,3] -``` - -### Generating JSON - -``` go -jsonObj := gabs.New() -// or gabs.Consume(jsonObject) to work on an existing map[string]interface{} - -jsonObj.Set(10, "outter", "inner", "value") -jsonObj.SetP(20, "outter.inner.value2") -jsonObj.Set(30, "outter", "inner2", "value3") - -fmt.Println(jsonObj.String()) -``` - -Will print: - -``` -{"outter":{"inner":{"value":10,"value2":20},"inner2":{"value3":30}}} -``` - -To pretty-print: - -``` go -fmt.Println(jsonObj.StringIndent("", " ")) -``` - -Will print: - -``` -{ - "outter": { - "inner": { - "value": 10, - "value2": 20 - }, - "inner2": { - "value3": 30 - } - } -} -``` - -### Generating Arrays - -``` go -jsonObj := gabs.New() - -jsonObj.Array("foo", "array") -// Or .ArrayP("foo.array") - -jsonObj.ArrayAppend(10, "foo", "array") -jsonObj.ArrayAppend(20, "foo", "array") -jsonObj.ArrayAppend(30, "foo", "array") - -fmt.Println(jsonObj.String()) -``` - -Will print: - -``` -{"foo":{"array":[10,20,30]}} -``` - -Working with arrays by index: - -``` go -jsonObj := gabs.New() - -// Create an array with the length of 3 -jsonObj.ArrayOfSize(3, "foo") - -jsonObj.S("foo").SetIndex("test1", 0) -jsonObj.S("foo").SetIndex("test2", 1) - -// Create an embedded array with the length of 3 -jsonObj.S("foo").ArrayOfSizeI(3, 2) - -jsonObj.S("foo").Index(2).SetIndex(1, 0) -jsonObj.S("foo").Index(2).SetIndex(2, 1) -jsonObj.S("foo").Index(2).SetIndex(3, 2) - -fmt.Println(jsonObj.String()) -``` - -Will print: - -``` -{"foo":["test1","test2",[1,2,3]]} -``` - -### Converting back to JSON - -This is the easiest part: - -``` go -jsonParsedObj, _ := gabs.ParseJSON([]byte(`{ - "outter":{ - "values":{ - "first":10, - "second":11 - } - }, - "outter2":"hello world" -}`)) - -jsonOutput := jsonParsedObj.String() -// Becomes `{"outter":{"values":{"first":10,"second":11}},"outter2":"hello world"}` -``` - -And to serialize a specific segment is as simple as: - -``` go -jsonParsedObj := gabs.ParseJSON([]byte(`{ - "outter":{ - "values":{ - "first":10, - "second":11 - } - }, - "outter2":"hello world" -}`)) - -jsonOutput := jsonParsedObj.Search("outter").String() -// Becomes `{"values":{"first":10,"second":11}}` -``` - -### Merge two containers - -You can merge a JSON structure into an existing one, where collisions will be -converted into a JSON array. - -``` go -jsonParsed1, _ := ParseJSON([]byte(`{"outter": {"value1": "one"}}`)) -jsonParsed2, _ := ParseJSON([]byte(`{"outter": {"inner": {"value3": "three"}}, "outter2": {"value2": "two"}}`)) - -jsonParsed1.Merge(jsonParsed2) -// Becomes `{"outter":{"inner":{"value3":"three"},"value1":"one"},"outter2":{"value2":"two"}}` -``` - -Arrays are merged: - -``` go -jsonParsed1, _ := ParseJSON([]byte(`{"array": ["one"]}`)) -jsonParsed2, _ := ParseJSON([]byte(`{"array": ["two"]}`)) - -jsonParsed1.Merge(jsonParsed2) -// Becomes `{"array":["one", "two"]}` -``` - -### Parsing Numbers - -Gabs uses the `json` package under the bonnet, which by default will parse all -number values into `float64`. If you need to parse `Int` values then you should -use a `json.Decoder` (https://golang.org/pkg/encoding/json/#Decoder): - -``` go -sample := []byte(`{"test":{"int":10, "float":6.66}}`) -dec := json.NewDecoder(bytes.NewReader(sample)) -dec.UseNumber() - -val, err := gabs.ParseJSONDecoder(dec) -if err != nil { - t.Errorf("Failed to parse: %v", err) - return -} - -intValue, err := val.Path("test.int").Data().(json.Number).Int64() -``` diff --git a/vendor/github.com/Jeffail/gabs/gabs.go b/vendor/github.com/Jeffail/gabs/gabs.go deleted file mode 100644 index 011c4c39241..00000000000 --- a/vendor/github.com/Jeffail/gabs/gabs.go +++ /dev/null @@ -1,727 +0,0 @@ -/* -Copyright (c) 2014 Ashley Jeffs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -*/ - -// Package gabs implements a simplified wrapper around creating and parsing -// unknown or dynamic JSON. -package gabs - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "io/ioutil" - "strconv" - "strings" -) - -//------------------------------------------------------------------------------ - -var ( - // ErrOutOfBounds indicates an index was out of bounds. - ErrOutOfBounds = errors.New("out of bounds") - - // ErrNotObjOrArray is returned when a target is not an object or array type - // but needs to be for the intended operation. - ErrNotObjOrArray = errors.New("not an object or array") - - // ErrNotObj is returned when a target is not an object but needs to be for - // the intended operation. - ErrNotObj = errors.New("not an object") - - // ErrNotArray is returned when a target is not an array but needs to be for - // the intended operation. - ErrNotArray = errors.New("not an array") - - // ErrPathCollision is returned when creating a path failed because an - // element collided with an existing value. - ErrPathCollision = errors.New("encountered value collision whilst building path") - - // ErrInvalidInputObj is returned when the input value was not a - // map[string]interface{}. - ErrInvalidInputObj = errors.New("invalid input object") - - // ErrInvalidInputText is returned when the input data could not be parsed. - ErrInvalidInputText = errors.New("input text could not be parsed") - - // ErrInvalidPath is returned when the filepath was not valid. - ErrInvalidPath = errors.New("invalid file path") - - // ErrInvalidBuffer is returned when the input buffer contained an invalid - // JSON string. - ErrInvalidBuffer = errors.New("input buffer contained invalid JSON") -) - -//------------------------------------------------------------------------------ - -func resolveJSONPointerHierarchy(path string) ([]string, error) { - if len(path) < 1 { - return nil, errors.New("failed to resolve JSON pointer: path must not be empty") - } - if path[0] != '/' { - return nil, errors.New("failed to resolve JSON pointer: path must begin with '/'") - } - hierarchy := strings.Split(path, "/")[1:] - for i, v := range hierarchy { - v = strings.Replace(v, "~1", "/", -1) - v = strings.Replace(v, "~0", "~", -1) - hierarchy[i] = v - } - return hierarchy, nil -} - -//------------------------------------------------------------------------------ - -// Container references a specific element within a JSON structure. -type Container struct { - object interface{} -} - -// Data returns the underlying interface{} of the target element in the JSON -// structure. -func (g *Container) Data() interface{} { - if g == nil { - return nil - } - return g.object -} - -//------------------------------------------------------------------------------ - -// Path searches the JSON structure following a path in dot notation. -func (g *Container) Path(path string) *Container { - return g.Search(strings.Split(path, ".")...) -} - -// Search attempts to find and return an object within the JSON structure by -// following a provided hierarchy of field names to locate the target. If the -// search encounters an array and has not reached the end target then it will -// iterate each object of the array for the target and return all of the results -// in a JSON array. -func (g *Container) Search(hierarchy ...string) *Container { - var object interface{} - - object = g.Data() - for target := 0; target < len(hierarchy); target++ { - if mmap, ok := object.(map[string]interface{}); ok { - object, ok = mmap[hierarchy[target]] - if !ok { - return nil - } - } else if marray, ok := object.([]interface{}); ok { - tmpArray := []interface{}{} - for _, val := range marray { - tmpGabs := &Container{val} - res := tmpGabs.Search(hierarchy[target:]...) - if res != nil { - tmpArray = append(tmpArray, res.Data()) - } - } - if len(tmpArray) == 0 { - return nil - } - return &Container{tmpArray} - } else { - return nil - } - } - return &Container{object} -} - -// JSONPointer parses a JSON pointer path (https://tools.ietf.org/html/rfc6901) -// and either returns a *gabs.Container containing the result or an error if the -// referenced item could not be found. -func (g *Container) JSONPointer(path string) (*Container, error) { - hierarchy, err := resolveJSONPointerHierarchy(path) - if err != nil { - return nil, err - } - - object := g.Data() - for target := 0; target < len(hierarchy); target++ { - pathSeg := hierarchy[target] - if mmap, ok := object.(map[string]interface{}); ok { - object, ok = mmap[pathSeg] - if !ok { - return nil, fmt.Errorf("failed to resolve JSON pointer: index '%v' value '%v' was not found", target, pathSeg) - } - } else if marray, ok := object.([]interface{}); ok { - index, err := strconv.Atoi(pathSeg) - if err != nil { - return nil, fmt.Errorf("failed to resolve JSON pointer: could not parse index '%v' value '%v' into array index: %v", target, pathSeg, err) - } - if len(marray) <= index { - return nil, fmt.Errorf("failed to resolve JSON pointer: index '%v' value '%v' exceeded target array size of '%v'", target, pathSeg, len(marray)) - } - object = marray[index] - } else { - return &Container{nil}, fmt.Errorf("failed to resolve JSON pointer: index '%v' field '%v' was not found", target, pathSeg) - } - } - return &Container{object}, nil -} - -// S is a shorthand alias for Search. -func (g *Container) S(hierarchy ...string) *Container { - return g.Search(hierarchy...) -} - -// Exists checks whether a path exists. -func (g *Container) Exists(hierarchy ...string) bool { - return g.Search(hierarchy...) != nil -} - -// ExistsP checks whether a dot notation path exists. -func (g *Container) ExistsP(path string) bool { - return g.Exists(strings.Split(path, ".")...) -} - -// Index attempts to find and return an element within a JSON array by an index. -func (g *Container) Index(index int) *Container { - if array, ok := g.Data().([]interface{}); ok { - if index >= len(array) { - return &Container{nil} - } - return &Container{array[index]} - } - return &Container{nil} -} - -// Children returns a slice of all children of an array element. This also works -// for objects, however, the children returned for an object will be in a random -// order and you lose the names of the returned objects this way. -func (g *Container) Children() ([]*Container, error) { - if array, ok := g.Data().([]interface{}); ok { - children := make([]*Container, len(array)) - for i := 0; i < len(array); i++ { - children[i] = &Container{array[i]} - } - return children, nil - } - if mmap, ok := g.Data().(map[string]interface{}); ok { - children := []*Container{} - for _, obj := range mmap { - children = append(children, &Container{obj}) - } - return children, nil - } - return nil, ErrNotObjOrArray -} - -// ChildrenMap returns a map of all the children of an object element. -func (g *Container) ChildrenMap() (map[string]*Container, error) { - if mmap, ok := g.Data().(map[string]interface{}); ok { - children := map[string]*Container{} - for name, obj := range mmap { - children[name] = &Container{obj} - } - return children, nil - } - return nil, ErrNotObj -} - -//------------------------------------------------------------------------------ - -// Set the value of a field at a JSON path, any parts of the path that do not -// exist will be constructed, and if a collision occurs with a non object type -// whilst iterating the path an error is returned. -func (g *Container) Set(value interface{}, path ...string) (*Container, error) { - if len(path) == 0 { - g.object = value - return g, nil - } - var object interface{} - if g.object == nil { - g.object = map[string]interface{}{} - } - object = g.object - for target := 0; target < len(path); target++ { - if mmap, ok := object.(map[string]interface{}); ok { - if target == len(path)-1 { - mmap[path[target]] = value - } else if mmap[path[target]] == nil { - mmap[path[target]] = map[string]interface{}{} - } - object = mmap[path[target]] - } else { - return &Container{nil}, ErrPathCollision - } - } - return &Container{object}, nil -} - -// SetP sets the value of a field at a JSON path using dot notation, any parts -// of the path that do not exist will be constructed, and if a collision occurs -// with a non object type whilst iterating the path an error is returned. -func (g *Container) SetP(value interface{}, path string) (*Container, error) { - return g.Set(value, strings.Split(path, ".")...) -} - -// SetIndex attempts to set a value of an array element based on an index. -func (g *Container) SetIndex(value interface{}, index int) (*Container, error) { - if array, ok := g.Data().([]interface{}); ok { - if index >= len(array) { - return &Container{nil}, ErrOutOfBounds - } - array[index] = value - return &Container{array[index]}, nil - } - return &Container{nil}, ErrNotArray -} - -// SetJSONPointer parses a JSON pointer path -// (https://tools.ietf.org/html/rfc6901) and sets the leaf to a value. Returns -// an error if the pointer could not be resolved due to missing fields. -func (g *Container) SetJSONPointer(value interface{}, path string) error { - hierarchy, err := resolveJSONPointerHierarchy(path) - if err != nil { - return err - } - - if len(hierarchy) == 0 { - g.object = value - return nil - } - - object := g.object - - for target := 0; target < len(hierarchy); target++ { - pathSeg := hierarchy[target] - if mmap, ok := object.(map[string]interface{}); ok { - if target == len(hierarchy)-1 { - object = value - mmap[pathSeg] = object - } else if object = mmap[pathSeg]; object == nil { - return fmt.Errorf("failed to resolve JSON pointer: index '%v' value '%v' was not found", target, pathSeg) - } - } else if marray, ok := object.([]interface{}); ok { - index, err := strconv.Atoi(pathSeg) - if err != nil { - return fmt.Errorf("failed to resolve JSON pointer: could not parse index '%v' value '%v' into array index: %v", target, pathSeg, err) - } - if len(marray) <= index { - return fmt.Errorf("failed to resolve JSON pointer: index '%v' value '%v' exceeded target array size of '%v'", target, pathSeg, len(marray)) - } - if target == len(hierarchy)-1 { - object = value - marray[index] = object - } else if object = marray[index]; object == nil { - return fmt.Errorf("failed to resolve JSON pointer: index '%v' value '%v' was not found", target, pathSeg) - } - } else { - return fmt.Errorf("failed to resolve JSON pointer: index '%v' value '%v' was not found", target, pathSeg) - } - } - return nil -} - -// Object creates a new JSON object at a target path. Returns an error if the -// path contains a collision with a non object type. -func (g *Container) Object(path ...string) (*Container, error) { - return g.Set(map[string]interface{}{}, path...) -} - -// ObjectP creates a new JSON object at a target path using dot notation. -// Returns an error if the path contains a collision with a non object type. -func (g *Container) ObjectP(path string) (*Container, error) { - return g.Object(strings.Split(path, ".")...) -} - -// ObjectI creates a new JSON object at an array index. Returns an error if the -// object is not an array or the index is out of bounds. -func (g *Container) ObjectI(index int) (*Container, error) { - return g.SetIndex(map[string]interface{}{}, index) -} - -// Array creates a new JSON array at a path. Returns an error if the path -// contains a collision with a non object type. -func (g *Container) Array(path ...string) (*Container, error) { - return g.Set([]interface{}{}, path...) -} - -// ArrayP creates a new JSON array at a path using dot notation. Returns an -// error if the path contains a collision with a non object type. -func (g *Container) ArrayP(path string) (*Container, error) { - return g.Array(strings.Split(path, ".")...) -} - -// ArrayI creates a new JSON array within an array at an index. Returns an error -// if the element is not an array or the index is out of bounds. -func (g *Container) ArrayI(index int) (*Container, error) { - return g.SetIndex([]interface{}{}, index) -} - -// ArrayOfSize creates a new JSON array of a particular size at a path. Returns -// an error if the path contains a collision with a non object type. -func (g *Container) ArrayOfSize(size int, path ...string) (*Container, error) { - a := make([]interface{}, size) - return g.Set(a, path...) -} - -// ArrayOfSizeP creates a new JSON array of a particular size at a path using -// dot notation. Returns an error if the path contains a collision with a non -// object type. -func (g *Container) ArrayOfSizeP(size int, path string) (*Container, error) { - return g.ArrayOfSize(size, strings.Split(path, ".")...) -} - -// ArrayOfSizeI create a new JSON array of a particular size within an array at -// an index. Returns an error if the element is not an array or the index is out -// of bounds. -func (g *Container) ArrayOfSizeI(size, index int) (*Container, error) { - a := make([]interface{}, size) - return g.SetIndex(a, index) -} - -// Delete an element at a path, an error is returned if the element does not -// exist. -func (g *Container) Delete(path ...string) error { - var object interface{} - - if g.object == nil { - return ErrNotObj - } - object = g.object - for target := 0; target < len(path); target++ { - if mmap, ok := object.(map[string]interface{}); ok { - if target == len(path)-1 { - if _, ok := mmap[path[target]]; ok { - delete(mmap, path[target]) - } else { - return ErrNotObj - } - } - object = mmap[path[target]] - } else { - return ErrNotObj - } - } - return nil -} - -// DeleteP deletes an element at a path using dot notation, an error is returned -// if the element does not exist. -func (g *Container) DeleteP(path string) error { - return g.Delete(strings.Split(path, ".")...) -} - -// MergeFn merges two objects using a provided function to resolve collisions. -// -// The collision function receives two interface{} arguments, destination (the -// original object) and source (the object being merged into the destination). -// Which ever value is returned becomes the new value in the destination object -// at the location of the collision. -func (g *Container) MergeFn(source *Container, collisionFn func(destination, source interface{}) interface{}) error { - var recursiveFnc func(map[string]interface{}, []string) error - recursiveFnc = func(mmap map[string]interface{}, path []string) error { - for key, value := range mmap { - newPath := append(path, key) - if g.Exists(newPath...) { - existingData := g.Search(newPath...).Data() - switch t := value.(type) { - case map[string]interface{}: - switch existingVal := existingData.(type) { - case map[string]interface{}: - if err := recursiveFnc(t, newPath); err != nil { - return err - } - default: - if _, err := g.Set(collisionFn(existingVal, t), newPath...); err != nil { - return err - } - } - default: - if _, err := g.Set(collisionFn(existingData, t), newPath...); err != nil { - return err - } - } - } else { - // path doesn't exist. So set the value - if _, err := g.Set(value, newPath...); err != nil { - return err - } - } - } - return nil - } - if mmap, ok := source.Data().(map[string]interface{}); ok { - return recursiveFnc(mmap, []string{}) - } - return nil -} - -// Merge a source object into an existing destination object. When a collision -// is found within the merged structures (both a source and destination object -// contain the same non-object keys) the result will be an array containing both -// values, where values that are already arrays will be expanded into the -// resulting array. -// -// It is possible to merge structures will different collision behaviours with -// MergeFn. -func (g *Container) Merge(source *Container) error { - return g.MergeFn(source, func(dest, source interface{}) interface{} { - destArr, destIsArray := dest.([]interface{}) - sourceArr, sourceIsArray := source.([]interface{}) - if destIsArray { - if sourceIsArray { - return append(destArr, sourceArr...) - } - return append(destArr, source) - } - if sourceIsArray { - return append(append([]interface{}{}, dest), sourceArr...) - } - return []interface{}{dest, source} - }) -} - -//------------------------------------------------------------------------------ - -/* -Array modification/search - Keeping these options simple right now, no need for -anything more complicated since you can just cast to []interface{}, modify and -then reassign with Set. -*/ - -// ArrayAppend attempts to append a value onto a JSON array at a path. If the -// target is not a JSON array then it will be converted into one, with its -// original contents set to the first element of the array. -func (g *Container) ArrayAppend(value interface{}, path ...string) error { - if array, ok := g.Search(path...).Data().([]interface{}); ok { - array = append(array, value) - _, err := g.Set(array, path...) - return err - } - - newArray := []interface{}{} - if d := g.Search(path...).Data(); d != nil { - newArray = append(newArray, d) - } - newArray = append(newArray, value) - - _, err := g.Set(newArray, path...) - return err -} - -// ArrayAppendP attempts to append a value onto a JSON array at a path using dot -// notation. If the target is not a JSON array then it will be converted into -// one, with its original contents set to the first element of the array. -func (g *Container) ArrayAppendP(value interface{}, path string) error { - return g.ArrayAppend(value, strings.Split(path, ".")...) -} - -// ArrayRemove attempts to remove an element identified by an index from a JSON -// array at a path. -func (g *Container) ArrayRemove(index int, path ...string) error { - if index < 0 { - return ErrOutOfBounds - } - array, ok := g.Search(path...).Data().([]interface{}) - if !ok { - return ErrNotArray - } - if index < len(array) { - array = append(array[:index], array[index+1:]...) - } else { - return ErrOutOfBounds - } - _, err := g.Set(array, path...) - return err -} - -// ArrayRemoveP attempts to remove an element identified by an index from a JSON -// array at a path using dot notation. -func (g *Container) ArrayRemoveP(index int, path string) error { - return g.ArrayRemove(index, strings.Split(path, ".")...) -} - -// ArrayElement attempts to access an element by an index from a JSON array at a -// path. -func (g *Container) ArrayElement(index int, path ...string) (*Container, error) { - if index < 0 { - return &Container{nil}, ErrOutOfBounds - } - array, ok := g.Search(path...).Data().([]interface{}) - if !ok { - return &Container{nil}, ErrNotArray - } - if index < len(array) { - return &Container{array[index]}, nil - } - return &Container{nil}, ErrOutOfBounds -} - -// ArrayElementP attempts to access an element by an index from a JSON array at -// a path using dot notation. -func (g *Container) ArrayElementP(index int, path string) (*Container, error) { - return g.ArrayElement(index, strings.Split(path, ".")...) -} - -// ArrayCount counts the number of elements in a JSON array at a path. -func (g *Container) ArrayCount(path ...string) (int, error) { - if array, ok := g.Search(path...).Data().([]interface{}); ok { - return len(array), nil - } - return 0, ErrNotArray -} - -// ArrayCountP counts the number of elements in a JSON array at a path using dot -// notation. -func (g *Container) ArrayCountP(path string) (int, error) { - return g.ArrayCount(strings.Split(path, ".")...) -} - -//------------------------------------------------------------------------------ - -// Bytes marshals an element to a JSON []byte blob. -func (g *Container) Bytes() []byte { - if g.Data() != nil { - if bytes, err := json.Marshal(g.object); err == nil { - return bytes - } - } - return []byte("{}") -} - -// BytesIndent marshals an element to a JSON []byte blob formatted with a prefix -// and indent string. -func (g *Container) BytesIndent(prefix string, indent string) []byte { - if g.object != nil { - if bytes, err := json.MarshalIndent(g.object, prefix, indent); err == nil { - return bytes - } - } - return []byte("{}") -} - -// String marshals an element to a JSON formatted string. -func (g *Container) String() string { - return string(g.Bytes()) -} - -// StringIndent marshals an element to a JSON string formatted with a prefix and -// indent string. -func (g *Container) StringIndent(prefix string, indent string) string { - return string(g.BytesIndent(prefix, indent)) -} - -// EncodeOpt is a functional option for the EncodeJSON method. -type EncodeOpt func(e *json.Encoder) - -// EncodeOptHTMLEscape sets the encoder to escape the JSON for html. -func EncodeOptHTMLEscape(doEscape bool) EncodeOpt { - return func(e *json.Encoder) { - e.SetEscapeHTML(doEscape) - } -} - -// EncodeOptIndent sets the encoder to indent the JSON output. -func EncodeOptIndent(prefix string, indent string) EncodeOpt { - return func(e *json.Encoder) { - e.SetIndent(prefix, indent) - } -} - -// EncodeJSON marshals an element to a JSON formatted []byte using a variant -// list of modifier functions for the encoder being used. Functions for -// modifying the output are prefixed with EncodeOpt, e.g. EncodeOptHTMLEscape. -func (g *Container) EncodeJSON(encodeOpts ...EncodeOpt) []byte { - var b bytes.Buffer - encoder := json.NewEncoder(&b) - encoder.SetEscapeHTML(false) // Do not escape by default. - for _, opt := range encodeOpts { - opt(encoder) - } - if err := encoder.Encode(g.object); err != nil { - return []byte("{}") - } - result := b.Bytes() - if len(result) > 0 { - result = result[:len(result)-1] - } - return result -} - -// New creates a new gabs JSON object. -func New() *Container { - return &Container{map[string]interface{}{}} -} - -// Consume an already unmarshalled JSON object (or a new map[string]interface{}) -// into a *Container. -func Consume(root interface{}) (*Container, error) { - return &Container{root}, nil -} - -// ParseJSON unmarshals a JSON byte slice into a *Container. -func ParseJSON(sample []byte) (*Container, error) { - var gabs Container - - if err := json.Unmarshal(sample, &gabs.object); err != nil { - return nil, err - } - - return &gabs, nil -} - -// ParseJSONDecoder applies a json.Decoder to a *Container. -func ParseJSONDecoder(decoder *json.Decoder) (*Container, error) { - var gabs Container - - if err := decoder.Decode(&gabs.object); err != nil { - return nil, err - } - - return &gabs, nil -} - -// ParseJSONFile reads a file and unmarshals the contents into a *Container. -func ParseJSONFile(path string) (*Container, error) { - if len(path) > 0 { - cBytes, err := ioutil.ReadFile(path) - if err != nil { - return nil, err - } - - container, err := ParseJSON(cBytes) - if err != nil { - return nil, err - } - - return container, nil - } - return nil, ErrInvalidPath -} - -// ParseJSONBuffer reads a buffer and unmarshals the contents into a *Container. -func ParseJSONBuffer(buffer io.Reader) (*Container, error) { - var gabs Container - jsonDecoder := json.NewDecoder(buffer) - if err := jsonDecoder.Decode(&gabs.object); err != nil { - return nil, err - } - - return &gabs, nil -} - -//------------------------------------------------------------------------------ diff --git a/vendor/github.com/Jeffail/gabs/gabs_logo.png b/vendor/github.com/Jeffail/gabs/gabs_logo.png deleted file mode 100644 index b6c1fad9931..00000000000 Binary files a/vendor/github.com/Jeffail/gabs/gabs_logo.png and /dev/null differ diff --git a/vendor/github.com/Jeffail/gabs/go.mod b/vendor/github.com/Jeffail/gabs/go.mod deleted file mode 100644 index ff2fc976614..00000000000 --- a/vendor/github.com/Jeffail/gabs/go.mod +++ /dev/null @@ -1 +0,0 @@ -module github.com/Jeffail/gabs diff --git a/vendor/github.com/Jeffail/tunny/LICENSE b/vendor/github.com/Jeffail/tunny/LICENSE deleted file mode 100644 index 99a62c6298f..00000000000 --- a/vendor/github.com/Jeffail/tunny/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2014 Ashley Jeffs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/Jeffail/tunny/README.md b/vendor/github.com/Jeffail/tunny/README.md deleted file mode 100644 index 1712d8e6785..00000000000 --- a/vendor/github.com/Jeffail/tunny/README.md +++ /dev/null @@ -1,229 +0,0 @@ -![Tunny](tunny_logo.png "Tunny") - -Tunny is a Golang library for spawning and managing a goroutine pool. - -The API is synchronous and simple to use. Jobs are allocated to a worker when one becomes available. - -https://godoc.org/github.com/Jeffail/tunny - -## How to install: - -```bash -go get github.com/jeffail/tunny -``` - -## How to use: - -The most obvious use for a goroutine pool would be limiting heavy jobs to the number of CPUs available. In the example below we limit the work from arbitrary numbers of HTTP request goroutines through our pool. - -```go -package main - -import ( - "io/ioutil" - "net/http" - "runtime" - - "github.com/jeffail/tunny" -) - -func main() { - numCPUs := runtime.NumCPU() - runtime.GOMAXPROCS(numCPUs+1) // numCPUs hot threads + one for async tasks. - - pool, _ := tunny.CreatePool(numCPUs, func(object interface{}) interface{} { - input, _ := object.([]byte) - - // Do something that takes a lot of work - output := input - - return output - }).Open() - - defer pool.Close() - - http.HandleFunc("/work", func(w http.ResponseWriter, r *http.Request) { - input, err := ioutil.ReadAll(r.Body) - if err != nil { - http.Error(w, "Internal error", http.StatusInternalServerError) - } - - // Send work to our pool - result, _ := pool.SendWork(input) - - w.Write(result.([]byte)) - }) - - http.ListenAndServe(":8080", nil) -} -``` - -Tunny supports timeouts. You can replace the `SendWork` call above to the following: - -```go - // Or, alternatively, send it with a timeout (in this case 5 seconds). - result, err := pool.SendWorkTimed(5000, input) - if err != nil { - http.Error(w, "Request timed out", http.StatusRequestTimeout) - } -``` - -## Can I send a closure instead of data? - -Yes, the arguments passed to the worker are boxed as interface{}, so this can actually be a func, you can implement this yourself, or if you're not bothered about return values you can use: - -```go -exampleChannel := make(chan int) - -pool, _ := tunny.CreatePoolGeneric(numCPUs).Open() - -err := pool.SendWork(func() { - /* Do your hard work here, usual rules of closures apply here, - * so you can return values like so: - */ - exampleChannel <- 10 -}) - -if err != nil { - // You done goofed -} -``` - -## How do I give my workers state? - -Tunny workers implement the `TunnyWorkers` interface, simply implement this interface to have your own objects (and state) act as your workers. - -```go -/* -TunnyWorker - The basic interface of a tunny worker. -*/ -type TunnyWorker interface { - - // Called for each job, expects the result to be returned synchronously - TunnyJob(interface{}) interface{} - - // Called after each job, this indicates whether the worker is ready for the next job. - // The default implementation is to return true always. If false is returned then the - // method is called every five milliseconds until either true is returned or the pool - // is closed. - TunnyReady() bool -} -``` - -Here is a short example: - -```go -type customWorker struct { - // TODO: Put some state here -} - -// Use this call to block further jobs if necessary -func (worker *customWorker) TunnyReady() bool { - return true -} - -// This is where the work actually happens -func (worker *customWorker) TunnyJob(data interface{}) interface{} { - /* TODO: Use and modify state - * there's no need for thread safety paradigms here unless the - * data is being accessed from another goroutine outside of - * the pool. - */ - if outputStr, ok := data.(string); ok { - return ("custom job done: " + outputStr) - } - return nil -} - -func TestCustomWorkers (t *testing.T) { - outChan := make(chan int, 10) - - wg := new(sync.WaitGroup) - wg.Add(10) - - workers := make([]tunny.TunnyWorker, 4) - for i, _ := range workers { - workers[i] = &(customWorker{}) - } - - pool, _ := tunny.CreateCustomPool(workers).Open() - - defer pool.Close() - - for i := 0; i < 10; i++ { - go func() { - value, _ := pool.SendWork("hello world") - fmt.Println(value.(string)) - - wg.Done() - }() - } - - wg.Wait() -} -``` - -The TunnyReady method allows you to use your state to determine whether or not a worker should take on another job. For example, your worker could hold a counter of how many jobs it has done, and perhaps after a certain amount it should perform another act before taking on more work, it's important to use TunnyReady for these occasions since blocking the TunnyJob call will hold up the waiting client. - -It is recommended that you do not block TunnyReady() whilst you wait for some condition to change, since this can prevent the pool from closing the worker goroutines. Currently, TunnyReady is called at 5 millisecond intervals until you answer true or the pool is closed. - -## I need more control - -You crazy fool, let's take this up to the next level. You can optionally implement `TunnyExtendedWorker` for more control. - -```go -/* -TunnyExtendedWorker - An optional interface that can be implemented if the worker needs -more control over its state. -*/ -type TunnyExtendedWorker interface { - - // Called when the pool is opened, this will be called before any jobs are sent. - TunnyInitialize() - - // Called when the pool is closed, this will be called after all jobs are completed. - TunnyTerminate() -} -``` - -## Can a worker detect when a timeout occurs? - -Yes, you can also implement the `TunnyInterruptable` interface. - -```go -/* -TunnyInterruptable - An optional interface that can be implemented in order to allow the -worker to drop jobs when they are abandoned. -*/ -type TunnyInterruptable interface { - - // Called when the current job has been abandoned by the client. - TunnyInterrupt() -} -``` - -This method will be called in the event that a timeout occurs whilst waiting for the result. `TunnyInterrupt` is called from a newly spawned goroutine, so you'll need to create your own mechanism for stopping your worker mid-way through a job. - -## Can SendWork be called asynchronously? - -There are the helper functions SendWorkAsync and SendWorkTimedAsync, that are the same as their respective sync calls with an optional second argument func(interface{}, error), this is the call made when a result is returned and can be nil if there is no need for the closure. - -However, if you find yourself in a situation where the sync return is not necessary then chances are you don't actually need Tunny at all. Golang is all about making concurrent programming simple by nature, and using Tunny for implementing simple async worker calls defeats the great work of the language spec and adds overhead that isn't necessary. - -## Behaviours and caveats: - -### - Workers request jobs on an ad-hoc basis - -When there is a backlog of jobs waiting to be serviced, and all workers are occupied, a job will not be assigned to a worker until it is already prepared for its next job. This means workers do not develop their own individual queues. Instead, the backlog is shared by the entire pool. - -This means an individual worker is able to halt, or spend exceptional lengths of time on a single request without hindering the flow of any other requests, provided there are other active workers in the pool. - -### - A job can be dropped before work is begun - -Tunny has support for specified timeouts at the work request level, if this timeout is triggered whilst waiting for a worker to become available then the request is dropped entirely and no effort is wasted on the abandoned request. - -### - Backlogged jobs are FIFO, for now - -When a job arrives and all workers are occupied the waiting thread will lock at a select block whilst waiting to be assigned a worker. In practice this seems to create a FIFO queue, implying that this is how the implementation of Golang has dealt with select blocks, channels and multiple reading goroutines. - -However, I haven't found a guarantee of this behaviour in the Golang documentation, so I cannot guarantee that this will always be the case. diff --git a/vendor/github.com/Jeffail/tunny/tunny.go b/vendor/github.com/Jeffail/tunny/tunny.go deleted file mode 100644 index 96b10321a20..00000000000 --- a/vendor/github.com/Jeffail/tunny/tunny.go +++ /dev/null @@ -1,379 +0,0 @@ -/* -Copyright (c) 2014 Ashley Jeffs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -*/ - -// Package tunny implements a simple pool for maintaining independant worker goroutines. -package tunny - -import ( - "errors" - "expvar" - "reflect" - "strconv" - "sync" - "sync/atomic" - "time" -) - -// Errors that are used throughout the Tunny API. -var ( - ErrPoolAlreadyRunning = errors.New("the pool is already running") - ErrPoolNotRunning = errors.New("the pool is not running") - ErrJobNotFunc = errors.New("generic worker not given a func()") - ErrWorkerClosed = errors.New("worker was closed") - ErrJobTimedOut = errors.New("job request timed out") -) - -/* -TunnyWorker - The basic interface of a tunny worker. -*/ -type TunnyWorker interface { - - // Called for each job, expects the result to be returned synchronously - TunnyJob(interface{}) interface{} - - // Called after each job, this indicates whether the worker is ready for the next job. - // The default implementation is to return true always. If false is returned then the - // method is called every five milliseconds until either true is returned or the pool - // is closed. For efficiency you should have this call block until your worker is ready, - // otherwise you introduce a 5ms latency between jobs. - TunnyReady() bool -} - -/* -TunnyExtendedWorker - An optional interface that can be implemented if the worker needs -more control over its state. -*/ -type TunnyExtendedWorker interface { - - // Called when the pool is opened, this will be called before any jobs are sent. - TunnyInitialize() - - // Called when the pool is closed, this will be called after all jobs are completed. - TunnyTerminate() -} - -/* -TunnyInterruptable - An optional interface that can be implemented in order to allow the -worker to drop jobs when they are abandoned. -*/ -type TunnyInterruptable interface { - - // Called when the current job has been abandoned by the client. - TunnyInterrupt() -} - -/* -Default and very basic implementation of a tunny worker. This worker holds a closure which -is assigned at construction, and this closure is called on each job. -*/ -type tunnyDefaultWorker struct { - job *func(interface{}) interface{} -} - -func (worker *tunnyDefaultWorker) TunnyJob(data interface{}) interface{} { - return (*worker.job)(data) -} - -func (worker *tunnyDefaultWorker) TunnyReady() bool { - return true -} - -/* -WorkPool contains the structures and methods required to communicate with your pool, it must -be opened before sending work and closed when all jobs are completed. - -You may open and close a pool as many times as you wish, calling close is a blocking call that -guarantees all goroutines are stopped. -*/ -type WorkPool struct { - workers []*workerWrapper - selects []reflect.SelectCase - statusMutex sync.RWMutex - running uint32 - pendingAsyncJobs int32 -} - -func (pool *WorkPool) isRunning() bool { - return (atomic.LoadUint32(&pool.running) == 1) -} - -func (pool *WorkPool) setRunning(running bool) { - if running { - atomic.SwapUint32(&pool.running, 1) - } else { - atomic.SwapUint32(&pool.running, 0) - } -} - -/* -Open all channels and launch the background goroutines managed by the pool. -*/ -func (pool *WorkPool) Open() (*WorkPool, error) { - pool.statusMutex.Lock() - defer pool.statusMutex.Unlock() - - if !pool.isRunning() { - - pool.selects = make([]reflect.SelectCase, len(pool.workers)) - - for i, workerWrapper := range pool.workers { - workerWrapper.Open() - - pool.selects[i] = reflect.SelectCase{ - Dir: reflect.SelectRecv, - Chan: reflect.ValueOf(workerWrapper.readyChan), - } - } - - pool.setRunning(true) - return pool, nil - - } - return nil, ErrPoolAlreadyRunning -} - -/* -Close all channels and goroutines managed by the pool. -*/ -func (pool *WorkPool) Close() error { - pool.statusMutex.Lock() - defer pool.statusMutex.Unlock() - - if pool.isRunning() { - for _, workerWrapper := range pool.workers { - workerWrapper.Close() - } - for _, workerWrapper := range pool.workers { - workerWrapper.Join() - } - pool.setRunning(false) - return nil - } - return ErrPoolNotRunning -} - -/* -CreatePool - Creates a pool of workers, and takes a closure argument which is the action -to perform for each job. -*/ -func CreatePool(numWorkers int, job func(interface{}) interface{}) *WorkPool { - pool := WorkPool{running: 0} - - pool.workers = make([]*workerWrapper, numWorkers) - for i := range pool.workers { - newWorker := workerWrapper{ - worker: &(tunnyDefaultWorker{&job}), - } - pool.workers[i] = &newWorker - } - - return &pool -} - -/* -CreatePoolGeneric - Creates a pool of generic workers. When sending work to a pool of -generic workers you send a closure (func()) which is the job to perform. -*/ -func CreatePoolGeneric(numWorkers int) *WorkPool { - - return CreatePool(numWorkers, func(jobCall interface{}) interface{} { - if method, ok := jobCall.(func()); ok { - method() - return nil - } - return ErrJobNotFunc - }) - -} - -/* -CreateCustomPool - Creates a pool for an array of custom workers. The custom workers -must implement TunnyWorker, and may also optionally implement TunnyExtendedWorker and -TunnyInterruptable. -*/ -func CreateCustomPool(customWorkers []TunnyWorker) *WorkPool { - pool := WorkPool{running: 0} - - pool.workers = make([]*workerWrapper, len(customWorkers)) - for i := range pool.workers { - newWorker := workerWrapper{ - worker: customWorkers[i], - } - pool.workers[i] = &newWorker - } - - return &pool -} - -/* -SendWorkTimed - Send a job to a worker and return the result, this is a synchronous -call with a timeout. -*/ -func (pool *WorkPool) SendWorkTimed(milliTimeout time.Duration, jobData interface{}) (interface{}, error) { - pool.statusMutex.RLock() - defer pool.statusMutex.RUnlock() - - if pool.isRunning() { - before := time.Now() - - // Create a new time out timer - timeout := time.NewTimer(milliTimeout * time.Millisecond) - defer timeout.Stop() - - // Create new selectcase[] and add time out case - selectCases := append(pool.selects[:], reflect.SelectCase{ - Dir: reflect.SelectRecv, - Chan: reflect.ValueOf(timeout.C), - }) - - // Wait for workers, or time out - if chosen, _, ok := reflect.Select(selectCases); ok { - - // Check if the selected index is a worker, otherwise we timed out - if chosen < (len(selectCases) - 1) { - pool.workers[chosen].jobChan <- jobData - - timeoutRemain := time.NewTimer((milliTimeout * time.Millisecond) - time.Since(before)) - defer timeoutRemain.Stop() - - // Wait for response, or time out - select { - case data, open := <-pool.workers[chosen].outputChan: - if !open { - return nil, ErrWorkerClosed - } - return data, nil - case <-timeoutRemain.C: - /* If we time out here we also need to ensure that the output is still - * collected and that the worker can move on. Therefore, we fork the - * waiting process into a new goroutine. - */ - go func() { - pool.workers[chosen].Interrupt() - <-pool.workers[chosen].outputChan - }() - return nil, ErrJobTimedOut - } - } else { - return nil, ErrJobTimedOut - } - } else { - // This means the chosen channel was closed - return nil, ErrWorkerClosed - } - } else { - return nil, ErrPoolNotRunning - } -} - -/* -SendWorkTimedAsync - Send a timed job to a worker without blocking, and optionally -send the result to a receiving closure. You may set the closure to nil if no -further actions are required. -*/ -func (pool *WorkPool) SendWorkTimedAsync( - milliTimeout time.Duration, - jobData interface{}, - after func(interface{}, error), -) { - atomic.AddInt32(&pool.pendingAsyncJobs, 1) - go func() { - defer atomic.AddInt32(&pool.pendingAsyncJobs, -1) - result, err := pool.SendWorkTimed(milliTimeout, jobData) - if after != nil { - after(result, err) - } - }() -} - -/* -SendWork - Send a job to a worker and return the result, this is a synchronous call. -*/ -func (pool *WorkPool) SendWork(jobData interface{}) (interface{}, error) { - pool.statusMutex.RLock() - defer pool.statusMutex.RUnlock() - - if pool.isRunning() { - if chosen, _, ok := reflect.Select(pool.selects); ok && chosen >= 0 { - pool.workers[chosen].jobChan <- jobData - result, open := <-pool.workers[chosen].outputChan - - if !open { - return nil, ErrWorkerClosed - } - return result, nil - } - return nil, ErrWorkerClosed - } - return nil, ErrPoolNotRunning -} - -/* -SendWorkAsync - Send a job to a worker without blocking, and optionally send the -result to a receiving closure. You may set the closure to nil if no further actions -are required. -*/ -func (pool *WorkPool) SendWorkAsync(jobData interface{}, after func(interface{}, error)) { - atomic.AddInt32(&pool.pendingAsyncJobs, 1) - go func() { - defer atomic.AddInt32(&pool.pendingAsyncJobs, -1) - result, err := pool.SendWork(jobData) - if after != nil { - after(result, err) - } - }() -} - -/* -NumPendingAsyncJobs - Get the current count of async jobs either in flight, or waiting for a worker -*/ -func (pool *WorkPool) NumPendingAsyncJobs() int32 { - return atomic.LoadInt32(&pool.pendingAsyncJobs) -} - -/* -NumWorkers - Number of workers in the pool -*/ -func (pool *WorkPool) NumWorkers() int { - return len(pool.workers) -} - -type liveVarAccessor func() string - -func (a liveVarAccessor) String() string { - return a() -} - -/* -PublishExpvarMetrics - Publishes the NumWorkers and NumPendingAsyncJobs to expvars -*/ -func (pool *WorkPool) PublishExpvarMetrics(poolName string) { - ret := expvar.NewMap(poolName) - asyncJobsFn := func() string { - return strconv.FormatInt(int64(pool.NumPendingAsyncJobs()), 10) - } - numWorkersFn := func() string { - return strconv.FormatInt(int64(pool.NumWorkers()), 10) - } - ret.Set("pendingAsyncJobs", liveVarAccessor(asyncJobsFn)) - ret.Set("numWorkers", liveVarAccessor(numWorkersFn)) -} diff --git a/vendor/github.com/Jeffail/tunny/tunny_logo.png b/vendor/github.com/Jeffail/tunny/tunny_logo.png deleted file mode 100644 index 36028decfd1..00000000000 Binary files a/vendor/github.com/Jeffail/tunny/tunny_logo.png and /dev/null differ diff --git a/vendor/github.com/Jeffail/tunny/worker.go b/vendor/github.com/Jeffail/tunny/worker.go deleted file mode 100644 index 9f2cad8ad12..00000000000 --- a/vendor/github.com/Jeffail/tunny/worker.go +++ /dev/null @@ -1,110 +0,0 @@ -/* -Copyright (c) 2014 Ashley Jeffs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -*/ - -package tunny - -import ( - "sync/atomic" - "time" -) - -type workerWrapper struct { - readyChan chan int - jobChan chan interface{} - outputChan chan interface{} - poolOpen uint32 - worker TunnyWorker -} - -func (wrapper *workerWrapper) Loop() { - - // TODO: Configure? - tout := time.Duration(5) - - for !wrapper.worker.TunnyReady() { - // It's sad that we can't simply check if jobChan is closed here. - if atomic.LoadUint32(&wrapper.poolOpen) == 0 { - break - } - time.Sleep(tout * time.Millisecond) - } - - wrapper.readyChan <- 1 - - for data := range wrapper.jobChan { - wrapper.outputChan <- wrapper.worker.TunnyJob(data) - for !wrapper.worker.TunnyReady() { - if atomic.LoadUint32(&wrapper.poolOpen) == 0 { - break - } - time.Sleep(tout * time.Millisecond) - } - wrapper.readyChan <- 1 - } - - close(wrapper.readyChan) - close(wrapper.outputChan) - -} - -func (wrapper *workerWrapper) Open() { - if extWorker, ok := wrapper.worker.(TunnyExtendedWorker); ok { - extWorker.TunnyInitialize() - } - - wrapper.readyChan = make(chan int) - wrapper.jobChan = make(chan interface{}) - wrapper.outputChan = make(chan interface{}) - - atomic.SwapUint32(&wrapper.poolOpen, uint32(1)) - - go wrapper.Loop() -} - -// Follow this with Join(), otherwise terminate isn't called on the worker -func (wrapper *workerWrapper) Close() { - close(wrapper.jobChan) - - // Breaks the worker out of a Ready() -> false loop - atomic.SwapUint32(&wrapper.poolOpen, uint32(0)) -} - -func (wrapper *workerWrapper) Join() { - // Ensure that both the ready and output channels are closed - for { - _, readyOpen := <-wrapper.readyChan - _, outputOpen := <-wrapper.outputChan - if !readyOpen && !outputOpen { - break - } - } - - if extWorker, ok := wrapper.worker.(TunnyExtendedWorker); ok { - extWorker.TunnyTerminate() - } -} - -func (wrapper *workerWrapper) Interrupt() { - if extWorker, ok := wrapper.worker.(TunnyInterruptable); ok { - extWorker.TunnyInterrupt() - } -} diff --git a/vendor/github.com/Masterminds/goutils/.travis.yml b/vendor/github.com/Masterminds/goutils/.travis.yml deleted file mode 100644 index 4025e01ec4a..00000000000 --- a/vendor/github.com/Masterminds/goutils/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ -language: go - -go: - - 1.6 - - 1.7 - - 1.8 - - tip - -script: - - go test -v - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/06e3328629952dabe3e0 - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: never # options: [always|never|change] default: always diff --git a/vendor/github.com/Masterminds/goutils/CHANGELOG.md b/vendor/github.com/Masterminds/goutils/CHANGELOG.md deleted file mode 100644 index d700ec47f2b..00000000000 --- a/vendor/github.com/Masterminds/goutils/CHANGELOG.md +++ /dev/null @@ -1,8 +0,0 @@ -# 1.0.1 (2017-05-31) - -## Fixed -- #21: Fix generation of alphanumeric strings (thanks @dbarranco) - -# 1.0.0 (2014-04-30) - -- Initial release. diff --git a/vendor/github.com/Masterminds/goutils/LICENSE.txt b/vendor/github.com/Masterminds/goutils/LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/vendor/github.com/Masterminds/goutils/LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/Masterminds/goutils/README.md b/vendor/github.com/Masterminds/goutils/README.md deleted file mode 100644 index 163ffe72a82..00000000000 --- a/vendor/github.com/Masterminds/goutils/README.md +++ /dev/null @@ -1,70 +0,0 @@ -GoUtils -=========== -[![Stability: Maintenance](https://masterminds.github.io/stability/maintenance.svg)](https://masterminds.github.io/stability/maintenance.html) -[![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils) [![Build Status](https://travis-ci.org/Masterminds/goutils.svg?branch=master)](https://travis-ci.org/Masterminds/goutils) [![Build status](https://ci.appveyor.com/api/projects/status/sc2b1ew0m7f0aiju?svg=true)](https://ci.appveyor.com/project/mattfarina/goutils) - - -GoUtils provides users with utility functions to manipulate strings in various ways. It is a Go implementation of some -string manipulation libraries of Java Apache Commons. GoUtils includes the following Java Apache Commons classes: -* WordUtils -* RandomStringUtils -* StringUtils (partial implementation) - -## Installation -If you have Go set up on your system, from the GOPATH directory within the command line/terminal, enter this: - - go get github.com/Masterminds/goutils - -If you do not have Go set up on your system, please follow the [Go installation directions from the documenation](http://golang.org/doc/install), and then follow the instructions above to install GoUtils. - - -## Documentation -GoUtils doc is available here: [![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils) - - -## Usage -The code snippets below show examples of how to use GoUtils. Some functions return errors while others do not. The first instance below, which does not return an error, is the `Initials` function (located within the `wordutils.go` file). - - package main - - import ( - "fmt" - "github.com/Masterminds/goutils" - ) - - func main() { - - // EXAMPLE 1: A goutils function which returns no errors - fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF" - - } -Some functions return errors mainly due to illegal arguements used as parameters. The code example below illustrates how to deal with function that returns an error. In this instance, the function is the `Random` function (located within the `randomstringutils.go` file). - - package main - - import ( - "fmt" - "github.com/Masterminds/goutils" - ) - - func main() { - - // EXAMPLE 2: A goutils function which returns an error - rand1, err1 := goutils.Random (-1, 0, 0, true, true) - - if err1 != nil { - fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...) - } else { - fmt.Println(rand1) - } - - } - -## License -GoUtils is licensed under the Apache License, Version 2.0. Please check the LICENSE.txt file or visit http://www.apache.org/licenses/LICENSE-2.0 for a copy of the license. - -## Issue Reporting -Make suggestions or report issues using the Git issue tracker: https://github.com/Masterminds/goutils/issues - -## Website -* [GoUtils webpage](http://Masterminds.github.io/goutils/) diff --git a/vendor/github.com/Masterminds/goutils/appveyor.yml b/vendor/github.com/Masterminds/goutils/appveyor.yml deleted file mode 100644 index 657564a8474..00000000000 --- a/vendor/github.com/Masterminds/goutils/appveyor.yml +++ /dev/null @@ -1,21 +0,0 @@ -version: build-{build}.{branch} - -clone_folder: C:\gopath\src\github.com\Masterminds\goutils -shallow_clone: true - -environment: - GOPATH: C:\gopath - -platform: - - x64 - -build: off - -install: - - go version - - go env - -test_script: - - go test -v - -deploy: off diff --git a/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go b/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go deleted file mode 100644 index 177dd865848..00000000000 --- a/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go +++ /dev/null @@ -1,251 +0,0 @@ -/* -Copyright 2014 Alexander Okoli - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package goutils - -import ( - "crypto/rand" - "fmt" - "math" - "math/big" - "regexp" - "unicode" -) - -/* -CryptoRandomNonAlphaNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of all characters (ASCII/Unicode values between 0 to 2,147,483,647 (math.MaxInt32)). - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...) -*/ -func CryptoRandomNonAlphaNumeric(count int) (string, error) { - return CryptoRandomAlphaNumericCustom(count, false, false) -} - -/* -CryptoRandomAscii creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of characters whose ASCII value is between 32 and 126 (inclusive). - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...) -*/ -func CryptoRandomAscii(count int) (string, error) { - return CryptoRandom(count, 32, 127, false, false) -} - -/* -CryptoRandomNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of numeric characters. - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...) -*/ -func CryptoRandomNumeric(count int) (string, error) { - return CryptoRandom(count, 0, 0, false, true) -} - -/* -CryptoRandomAlphabetic creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments. - -Parameters: - count - the length of random string to create - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...) -*/ -func CryptoRandomAlphabetic(count int) (string, error) { - return CryptoRandom(count, 0, 0, true, false) -} - -/* -CryptoRandomAlphaNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters. - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...) -*/ -func CryptoRandomAlphaNumeric(count int) (string, error) { - if count == 0 { - return "", nil - } - RandomString, err := CryptoRandom(count, 0, 0, true, true) - if err != nil { - return "", fmt.Errorf("Error: %s", err) - } - match, err := regexp.MatchString("([0-9]+)", RandomString) - if err != nil { - panic(err) - } - - if !match { - //Get the position between 0 and the length of the string-1 to insert a random number - position := getCryptoRandomInt(count) - //Insert a random number between [0-9] in the position - RandomString = RandomString[:position] + string('0' + getCryptoRandomInt(10)) + RandomString[position + 1:] - return RandomString, err - } - return RandomString, err - -} - -/* -CryptoRandomAlphaNumericCustom creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments. - -Parameters: - count - the length of random string to create - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...) -*/ -func CryptoRandomAlphaNumericCustom(count int, letters bool, numbers bool) (string, error) { - return CryptoRandom(count, 0, 0, letters, numbers) -} - -/* -CryptoRandom creates a random string based on a variety of options, using using golang's crypto/rand source of randomness. -If the parameters start and end are both 0, start and end are set to ' ' and 'z', the ASCII printable characters, will be used, -unless letters and numbers are both false, in which case, start and end are set to 0 and math.MaxInt32, respectively. -If chars is not nil, characters stored in chars that are between start and end are chosen. - -Parameters: - count - the length of random string to create - start - the position in set of chars (ASCII/Unicode int) to start at - end - the position in set of chars (ASCII/Unicode int) to end before - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars. - -Returns: - string - the random string - error - an error stemming from invalid parameters: if count < 0; or the provided chars array is empty; or end <= start; or end > len(chars) -*/ -func CryptoRandom(count int, start int, end int, letters bool, numbers bool, chars ...rune) (string, error) { - if count == 0 { - return "", nil - } else if count < 0 { - err := fmt.Errorf("randomstringutils illegal argument: Requested random string length %v is less than 0.", count) // equiv to err := errors.New("...") - return "", err - } - if chars != nil && len(chars) == 0 { - err := fmt.Errorf("randomstringutils illegal argument: The chars array must not be empty") - return "", err - } - - if start == 0 && end == 0 { - if chars != nil { - end = len(chars) - } else { - if !letters && !numbers { - end = math.MaxInt32 - } else { - end = 'z' + 1 - start = ' ' - } - } - } else { - if end <= start { - err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) must be greater than start (%v)", end, start) - return "", err - } - - if chars != nil && end > len(chars) { - err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) cannot be greater than len(chars) (%v)", end, len(chars)) - return "", err - } - } - - buffer := make([]rune, count) - gap := end - start - - // high-surrogates range, (\uD800-\uDBFF) = 55296 - 56319 - // low-surrogates range, (\uDC00-\uDFFF) = 56320 - 57343 - - for count != 0 { - count-- - var ch rune - if chars == nil { - ch = rune(getCryptoRandomInt(gap) + int64(start)) - } else { - ch = chars[getCryptoRandomInt(gap) + int64(start)] - } - - if letters && unicode.IsLetter(ch) || numbers && unicode.IsDigit(ch) || !letters && !numbers { - if ch >= 56320 && ch <= 57343 { // low surrogate range - if count == 0 { - count++ - } else { - // Insert low surrogate - buffer[count] = ch - count-- - // Insert high surrogate - buffer[count] = rune(55296 + getCryptoRandomInt(128)) - } - } else if ch >= 55296 && ch <= 56191 { // High surrogates range (Partial) - if count == 0 { - count++ - } else { - // Insert low surrogate - buffer[count] = rune(56320 + getCryptoRandomInt(128)) - count-- - // Insert high surrogate - buffer[count] = ch - } - } else if ch >= 56192 && ch <= 56319 { - // private high surrogate, skip it - count++ - } else { - // not one of the surrogates* - buffer[count] = ch - } - } else { - count++ - } - } - return string(buffer), nil -} - -func getCryptoRandomInt(count int) int64 { - nBig, err := rand.Int(rand.Reader, big.NewInt(int64(count))) - if err != nil { - panic(err) - } - return nBig.Int64() -} diff --git a/vendor/github.com/Masterminds/goutils/randomstringutils.go b/vendor/github.com/Masterminds/goutils/randomstringutils.go deleted file mode 100644 index 1364e0cafdf..00000000000 --- a/vendor/github.com/Masterminds/goutils/randomstringutils.go +++ /dev/null @@ -1,268 +0,0 @@ -/* -Copyright 2014 Alexander Okoli - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package goutils - -import ( - "fmt" - "math" - "math/rand" - "regexp" - "time" - "unicode" -) - -// RANDOM provides the time-based seed used to generate random numbers -var RANDOM = rand.New(rand.NewSource(time.Now().UnixNano())) - -/* -RandomNonAlphaNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of all characters (ASCII/Unicode values between 0 to 2,147,483,647 (math.MaxInt32)). - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomNonAlphaNumeric(count int) (string, error) { - return RandomAlphaNumericCustom(count, false, false) -} - -/* -RandomAscii creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of characters whose ASCII value is between 32 and 126 (inclusive). - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAscii(count int) (string, error) { - return Random(count, 32, 127, false, false) -} - -/* -RandomNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of numeric characters. - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomNumeric(count int) (string, error) { - return Random(count, 0, 0, false, true) -} - -/* -RandomAlphabetic creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments. - -Parameters: - count - the length of random string to create - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAlphabetic(count int) (string, error) { - return Random(count, 0, 0, true, false) -} - -/* -RandomAlphaNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters. - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAlphaNumeric(count int) (string, error) { - RandomString, err := Random(count, 0, 0, true, true) - if err != nil { - return "", fmt.Errorf("Error: %s", err) - } - match, err := regexp.MatchString("([0-9]+)", RandomString) - if err != nil { - panic(err) - } - - if !match { - //Get the position between 0 and the length of the string-1 to insert a random number - position := rand.Intn(count) - //Insert a random number between [0-9] in the position - RandomString = RandomString[:position] + string('0'+rand.Intn(10)) + RandomString[position+1:] - return RandomString, err - } - return RandomString, err - -} - -/* -RandomAlphaNumericCustom creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments. - -Parameters: - count - the length of random string to create - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAlphaNumericCustom(count int, letters bool, numbers bool) (string, error) { - return Random(count, 0, 0, letters, numbers) -} - -/* -Random creates a random string based on a variety of options, using default source of randomness. -This method has exactly the same semantics as RandomSeed(int, int, int, bool, bool, []char, *rand.Rand), but -instead of using an externally supplied source of randomness, it uses the internal *rand.Rand instance. - -Parameters: - count - the length of random string to create - start - the position in set of chars (ASCII/Unicode int) to start at - end - the position in set of chars (ASCII/Unicode int) to end before - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars. - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func Random(count int, start int, end int, letters bool, numbers bool, chars ...rune) (string, error) { - return RandomSeed(count, start, end, letters, numbers, chars, RANDOM) -} - -/* -RandomSeed creates a random string based on a variety of options, using supplied source of randomness. -If the parameters start and end are both 0, start and end are set to ' ' and 'z', the ASCII printable characters, will be used, -unless letters and numbers are both false, in which case, start and end are set to 0 and math.MaxInt32, respectively. -If chars is not nil, characters stored in chars that are between start and end are chosen. -This method accepts a user-supplied *rand.Rand instance to use as a source of randomness. By seeding a single *rand.Rand instance -with a fixed seed and using it for each call, the same random sequence of strings can be generated repeatedly and predictably. - -Parameters: - count - the length of random string to create - start - the position in set of chars (ASCII/Unicode decimals) to start at - end - the position in set of chars (ASCII/Unicode decimals) to end before - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars. - random - a source of randomness. - -Returns: - string - the random string - error - an error stemming from invalid parameters: if count < 0; or the provided chars array is empty; or end <= start; or end > len(chars) -*/ -func RandomSeed(count int, start int, end int, letters bool, numbers bool, chars []rune, random *rand.Rand) (string, error) { - - if count == 0 { - return "", nil - } else if count < 0 { - err := fmt.Errorf("randomstringutils illegal argument: Requested random string length %v is less than 0.", count) // equiv to err := errors.New("...") - return "", err - } - if chars != nil && len(chars) == 0 { - err := fmt.Errorf("randomstringutils illegal argument: The chars array must not be empty") - return "", err - } - - if start == 0 && end == 0 { - if chars != nil { - end = len(chars) - } else { - if !letters && !numbers { - end = math.MaxInt32 - } else { - end = 'z' + 1 - start = ' ' - } - } - } else { - if end <= start { - err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) must be greater than start (%v)", end, start) - return "", err - } - - if chars != nil && end > len(chars) { - err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) cannot be greater than len(chars) (%v)", end, len(chars)) - return "", err - } - } - - buffer := make([]rune, count) - gap := end - start - - // high-surrogates range, (\uD800-\uDBFF) = 55296 - 56319 - // low-surrogates range, (\uDC00-\uDFFF) = 56320 - 57343 - - for count != 0 { - count-- - var ch rune - if chars == nil { - ch = rune(random.Intn(gap) + start) - } else { - ch = chars[random.Intn(gap)+start] - } - - if letters && unicode.IsLetter(ch) || numbers && unicode.IsDigit(ch) || !letters && !numbers { - if ch >= 56320 && ch <= 57343 { // low surrogate range - if count == 0 { - count++ - } else { - // Insert low surrogate - buffer[count] = ch - count-- - // Insert high surrogate - buffer[count] = rune(55296 + random.Intn(128)) - } - } else if ch >= 55296 && ch <= 56191 { // High surrogates range (Partial) - if count == 0 { - count++ - } else { - // Insert low surrogate - buffer[count] = rune(56320 + random.Intn(128)) - count-- - // Insert high surrogate - buffer[count] = ch - } - } else if ch >= 56192 && ch <= 56319 { - // private high surrogate, skip it - count++ - } else { - // not one of the surrogates* - buffer[count] = ch - } - } else { - count++ - } - } - return string(buffer), nil -} diff --git a/vendor/github.com/Masterminds/goutils/stringutils.go b/vendor/github.com/Masterminds/goutils/stringutils.go deleted file mode 100644 index 5037c4516ba..00000000000 --- a/vendor/github.com/Masterminds/goutils/stringutils.go +++ /dev/null @@ -1,224 +0,0 @@ -/* -Copyright 2014 Alexander Okoli - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package goutils - -import ( - "bytes" - "fmt" - "strings" - "unicode" -) - -// Typically returned by functions where a searched item cannot be found -const INDEX_NOT_FOUND = -1 - -/* -Abbreviate abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "Now is the time for..." - -Specifically, the algorithm is as follows: - - - If str is less than maxWidth characters long, return it. - - Else abbreviate it to (str[0:maxWidth - 3] + "..."). - - If maxWidth is less than 4, return an illegal argument error. - - In no case will it return a string of length greater than maxWidth. - -Parameters: - str - the string to check - maxWidth - maximum length of result string, must be at least 4 - -Returns: - string - abbreviated string - error - if the width is too small -*/ -func Abbreviate(str string, maxWidth int) (string, error) { - return AbbreviateFull(str, 0, maxWidth) -} - -/* -AbbreviateFull abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "...is the time for..." -This function works like Abbreviate(string, int), but allows you to specify a "left edge" offset. Note that this left edge is not -necessarily going to be the leftmost character in the result, or the first character following the ellipses, but it will appear -somewhere in the result. -In no case will it return a string of length greater than maxWidth. - -Parameters: - str - the string to check - offset - left edge of source string - maxWidth - maximum length of result string, must be at least 4 - -Returns: - string - abbreviated string - error - if the width is too small -*/ -func AbbreviateFull(str string, offset int, maxWidth int) (string, error) { - if str == "" { - return "", nil - } - if maxWidth < 4 { - err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width is 4") - return "", err - } - if len(str) <= maxWidth { - return str, nil - } - if offset > len(str) { - offset = len(str) - } - if len(str)-offset < (maxWidth - 3) { // 15 - 5 < 10 - 3 = 10 < 7 - offset = len(str) - (maxWidth - 3) - } - abrevMarker := "..." - if offset <= 4 { - return str[0:maxWidth-3] + abrevMarker, nil // str.substring(0, maxWidth - 3) + abrevMarker; - } - if maxWidth < 7 { - err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width with offset is 7") - return "", err - } - if (offset + maxWidth - 3) < len(str) { // 5 + (10-3) < 15 = 12 < 15 - abrevStr, _ := Abbreviate(str[offset:len(str)], (maxWidth - 3)) - return abrevMarker + abrevStr, nil // abrevMarker + abbreviate(str.substring(offset), maxWidth - 3); - } - return abrevMarker + str[(len(str)-(maxWidth-3)):len(str)], nil // abrevMarker + str.substring(str.length() - (maxWidth - 3)); -} - -/* -DeleteWhiteSpace deletes all whitespaces from a string as defined by unicode.IsSpace(rune). -It returns the string without whitespaces. - -Parameter: - str - the string to delete whitespace from, may be nil - -Returns: - the string without whitespaces -*/ -func DeleteWhiteSpace(str string) string { - if str == "" { - return str - } - sz := len(str) - var chs bytes.Buffer - count := 0 - for i := 0; i < sz; i++ { - ch := rune(str[i]) - if !unicode.IsSpace(ch) { - chs.WriteRune(ch) - count++ - } - } - if count == sz { - return str - } - return chs.String() -} - -/* -IndexOfDifference compares two strings, and returns the index at which the strings begin to differ. - -Parameters: - str1 - the first string - str2 - the second string - -Returns: - the index where str1 and str2 begin to differ; -1 if they are equal -*/ -func IndexOfDifference(str1 string, str2 string) int { - if str1 == str2 { - return INDEX_NOT_FOUND - } - if IsEmpty(str1) || IsEmpty(str2) { - return 0 - } - var i int - for i = 0; i < len(str1) && i < len(str2); i++ { - if rune(str1[i]) != rune(str2[i]) { - break - } - } - if i < len(str2) || i < len(str1) { - return i - } - return INDEX_NOT_FOUND -} - -/* -IsBlank checks if a string is whitespace or empty (""). Observe the following behavior: - - goutils.IsBlank("") = true - goutils.IsBlank(" ") = true - goutils.IsBlank("bob") = false - goutils.IsBlank(" bob ") = false - -Parameter: - str - the string to check - -Returns: - true - if the string is whitespace or empty ("") -*/ -func IsBlank(str string) bool { - strLen := len(str) - if str == "" || strLen == 0 { - return true - } - for i := 0; i < strLen; i++ { - if unicode.IsSpace(rune(str[i])) == false { - return false - } - } - return true -} - -/* -IndexOf returns the index of the first instance of sub in str, with the search beginning from the -index start point specified. -1 is returned if sub is not present in str. - -An empty string ("") will return -1 (INDEX_NOT_FOUND). A negative start position is treated as zero. -A start position greater than the string length returns -1. - -Parameters: - str - the string to check - sub - the substring to find - start - the start position; negative treated as zero - -Returns: - the first index where the sub string was found (always >= start) -*/ -func IndexOf(str string, sub string, start int) int { - - if start < 0 { - start = 0 - } - - if len(str) < start { - return INDEX_NOT_FOUND - } - - if IsEmpty(str) || IsEmpty(sub) { - return INDEX_NOT_FOUND - } - - partialIndex := strings.Index(str[start:len(str)], sub) - if partialIndex == -1 { - return INDEX_NOT_FOUND - } - return partialIndex + start -} - -// IsEmpty checks if a string is empty (""). Returns true if empty, and false otherwise. -func IsEmpty(str string) bool { - return len(str) == 0 -} diff --git a/vendor/github.com/Masterminds/goutils/wordutils.go b/vendor/github.com/Masterminds/goutils/wordutils.go deleted file mode 100644 index 034cad8e210..00000000000 --- a/vendor/github.com/Masterminds/goutils/wordutils.go +++ /dev/null @@ -1,357 +0,0 @@ -/* -Copyright 2014 Alexander Okoli - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -/* -Package goutils provides utility functions to manipulate strings in various ways. -The code snippets below show examples of how to use goutils. Some functions return -errors while others do not, so usage would vary as a result. - -Example: - - package main - - import ( - "fmt" - "github.com/aokoli/goutils" - ) - - func main() { - - // EXAMPLE 1: A goutils function which returns no errors - fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF" - - - - // EXAMPLE 2: A goutils function which returns an error - rand1, err1 := goutils.Random (-1, 0, 0, true, true) - - if err1 != nil { - fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...) - } else { - fmt.Println(rand1) - } - } -*/ -package goutils - -import ( - "bytes" - "strings" - "unicode" -) - -// VERSION indicates the current version of goutils -const VERSION = "1.0.0" - -/* -Wrap wraps a single line of text, identifying words by ' '. -New lines will be separated by '\n'. Very long words, such as URLs will not be wrapped. -Leading spaces on a new line are stripped. Trailing spaces are not stripped. - -Parameters: - str - the string to be word wrapped - wrapLength - the column (a column can fit only one character) to wrap the words at, less than 1 is treated as 1 - -Returns: - a line with newlines inserted -*/ -func Wrap(str string, wrapLength int) string { - return WrapCustom(str, wrapLength, "", false) -} - -/* -WrapCustom wraps a single line of text, identifying words by ' '. -Leading spaces on a new line are stripped. Trailing spaces are not stripped. - -Parameters: - str - the string to be word wrapped - wrapLength - the column number (a column can fit only one character) to wrap the words at, less than 1 is treated as 1 - newLineStr - the string to insert for a new line, "" uses '\n' - wrapLongWords - true if long words (such as URLs) should be wrapped - -Returns: - a line with newlines inserted -*/ -func WrapCustom(str string, wrapLength int, newLineStr string, wrapLongWords bool) string { - - if str == "" { - return "" - } - if newLineStr == "" { - newLineStr = "\n" // TODO Assumes "\n" is seperator. Explore SystemUtils.LINE_SEPARATOR from Apache Commons - } - if wrapLength < 1 { - wrapLength = 1 - } - - inputLineLength := len(str) - offset := 0 - - var wrappedLine bytes.Buffer - - for inputLineLength-offset > wrapLength { - - if rune(str[offset]) == ' ' { - offset++ - continue - } - - end := wrapLength + offset + 1 - spaceToWrapAt := strings.LastIndex(str[offset:end], " ") + offset - - if spaceToWrapAt >= offset { - // normal word (not longer than wrapLength) - wrappedLine.WriteString(str[offset:spaceToWrapAt]) - wrappedLine.WriteString(newLineStr) - offset = spaceToWrapAt + 1 - - } else { - // long word or URL - if wrapLongWords { - end := wrapLength + offset - // long words are wrapped one line at a time - wrappedLine.WriteString(str[offset:end]) - wrappedLine.WriteString(newLineStr) - offset += wrapLength - } else { - // long words aren't wrapped, just extended beyond limit - end := wrapLength + offset - index := strings.IndexRune(str[end:len(str)], ' ') - if index == -1 { - wrappedLine.WriteString(str[offset:len(str)]) - offset = inputLineLength - } else { - spaceToWrapAt = index + end - wrappedLine.WriteString(str[offset:spaceToWrapAt]) - wrappedLine.WriteString(newLineStr) - offset = spaceToWrapAt + 1 - } - } - } - } - - wrappedLine.WriteString(str[offset:len(str)]) - - return wrappedLine.String() - -} - -/* -Capitalize capitalizes all the delimiter separated words in a string. Only the first letter of each word is changed. -To convert the rest of each word to lowercase at the same time, use CapitalizeFully(str string, delimiters ...rune). -The delimiters represent a set of characters understood to separate words. The first string character -and the first non-delimiter character after a delimiter will be capitalized. A "" input string returns "". -Capitalization uses the Unicode title case, normally equivalent to upper case. - -Parameters: - str - the string to capitalize - delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter - -Returns: - capitalized string -*/ -func Capitalize(str string, delimiters ...rune) string { - - var delimLen int - - if delimiters == nil { - delimLen = -1 - } else { - delimLen = len(delimiters) - } - - if str == "" || delimLen == 0 { - return str - } - - buffer := []rune(str) - capitalizeNext := true - for i := 0; i < len(buffer); i++ { - ch := buffer[i] - if isDelimiter(ch, delimiters...) { - capitalizeNext = true - } else if capitalizeNext { - buffer[i] = unicode.ToTitle(ch) - capitalizeNext = false - } - } - return string(buffer) - -} - -/* -CapitalizeFully converts all the delimiter separated words in a string into capitalized words, that is each word is made up of a -titlecase character and then a series of lowercase characters. The delimiters represent a set of characters understood -to separate words. The first string character and the first non-delimiter character after a delimiter will be capitalized. -Capitalization uses the Unicode title case, normally equivalent to upper case. - -Parameters: - str - the string to capitalize fully - delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter - -Returns: - capitalized string -*/ -func CapitalizeFully(str string, delimiters ...rune) string { - - var delimLen int - - if delimiters == nil { - delimLen = -1 - } else { - delimLen = len(delimiters) - } - - if str == "" || delimLen == 0 { - return str - } - str = strings.ToLower(str) - return Capitalize(str, delimiters...) -} - -/* -Uncapitalize uncapitalizes all the whitespace separated words in a string. Only the first letter of each word is changed. -The delimiters represent a set of characters understood to separate words. The first string character and the first non-delimiter -character after a delimiter will be uncapitalized. Whitespace is defined by unicode.IsSpace(char). - -Parameters: - str - the string to uncapitalize fully - delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter - -Returns: - uncapitalized string -*/ -func Uncapitalize(str string, delimiters ...rune) string { - - var delimLen int - - if delimiters == nil { - delimLen = -1 - } else { - delimLen = len(delimiters) - } - - if str == "" || delimLen == 0 { - return str - } - - buffer := []rune(str) - uncapitalizeNext := true // TODO Always makes capitalize/un apply to first char. - for i := 0; i < len(buffer); i++ { - ch := buffer[i] - if isDelimiter(ch, delimiters...) { - uncapitalizeNext = true - } else if uncapitalizeNext { - buffer[i] = unicode.ToLower(ch) - uncapitalizeNext = false - } - } - return string(buffer) -} - -/* -SwapCase swaps the case of a string using a word based algorithm. - -Conversion algorithm: - - Upper case character converts to Lower case - Title case character converts to Lower case - Lower case character after Whitespace or at start converts to Title case - Other Lower case character converts to Upper case - Whitespace is defined by unicode.IsSpace(char). - -Parameters: - str - the string to swap case - -Returns: - the changed string -*/ -func SwapCase(str string) string { - if str == "" { - return str - } - buffer := []rune(str) - - whitespace := true - - for i := 0; i < len(buffer); i++ { - ch := buffer[i] - if unicode.IsUpper(ch) { - buffer[i] = unicode.ToLower(ch) - whitespace = false - } else if unicode.IsTitle(ch) { - buffer[i] = unicode.ToLower(ch) - whitespace = false - } else if unicode.IsLower(ch) { - if whitespace { - buffer[i] = unicode.ToTitle(ch) - whitespace = false - } else { - buffer[i] = unicode.ToUpper(ch) - } - } else { - whitespace = unicode.IsSpace(ch) - } - } - return string(buffer) -} - -/* -Initials extracts the initial letters from each word in the string. The first letter of the string and all first -letters after the defined delimiters are returned as a new string. Their case is not changed. If the delimiters -parameter is excluded, then Whitespace is used. Whitespace is defined by unicode.IsSpacea(char). An empty delimiter array returns an empty string. - -Parameters: - str - the string to get initials from - delimiters - set of characters to determine words, exclusion of this parameter means whitespace would be delimeter -Returns: - string of initial letters -*/ -func Initials(str string, delimiters ...rune) string { - if str == "" { - return str - } - if delimiters != nil && len(delimiters) == 0 { - return "" - } - strLen := len(str) - var buf bytes.Buffer - lastWasGap := true - for i := 0; i < strLen; i++ { - ch := rune(str[i]) - - if isDelimiter(ch, delimiters...) { - lastWasGap = true - } else if lastWasGap { - buf.WriteRune(ch) - lastWasGap = false - } - } - return buf.String() -} - -// private function (lower case func name) -func isDelimiter(ch rune, delimiters ...rune) bool { - if delimiters == nil { - return unicode.IsSpace(ch) - } - for _, delimiter := range delimiters { - if ch == delimiter { - return true - } - } - return false -} diff --git a/vendor/github.com/Masterminds/semver/.travis.yml b/vendor/github.com/Masterminds/semver/.travis.yml deleted file mode 100644 index 096369d44d9..00000000000 --- a/vendor/github.com/Masterminds/semver/.travis.yml +++ /dev/null @@ -1,29 +0,0 @@ -language: go - -go: - - 1.6.x - - 1.7.x - - 1.8.x - - 1.9.x - - 1.10.x - - 1.11.x - - 1.12.x - - tip - -# Setting sudo access to false will let Travis CI use containers rather than -# VMs to run the tests. For more details see: -# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/ -# - http://docs.travis-ci.com/user/workers/standard-infrastructure/ -sudo: false - -script: - - make setup - - make test - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/06e3328629952dabe3e0 - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: never # options: [always|never|change] default: always diff --git a/vendor/github.com/Masterminds/semver/CHANGELOG.md b/vendor/github.com/Masterminds/semver/CHANGELOG.md deleted file mode 100644 index e405c9a84d9..00000000000 --- a/vendor/github.com/Masterminds/semver/CHANGELOG.md +++ /dev/null @@ -1,109 +0,0 @@ -# 1.5.0 (2019-09-11) - -## Added - -- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c) - -## Changed - -- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil) -- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil) -- #72: Adding docs comment pointing to vert for a cli -- #71: Update the docs on pre-release comparator handling -- #89: Test with new go versions (thanks @thedevsaddam) -- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll) - -## Fixed - -- #78: Fix unchecked error in example code (thanks @ravron) -- #70: Fix the handling of pre-releases and the 0.0.0 release edge case -- #97: Fixed copyright file for proper display on GitHub -- #107: Fix handling prerelease when sorting alphanum and num -- #109: Fixed where Validate sometimes returns wrong message on error - -# 1.4.2 (2018-04-10) - -## Changed -- #72: Updated the docs to point to vert for a console appliaction -- #71: Update the docs on pre-release comparator handling - -## Fixed -- #70: Fix the handling of pre-releases and the 0.0.0 release edge case - -# 1.4.1 (2018-04-02) - -## Fixed -- Fixed #64: Fix pre-release precedence issue (thanks @uudashr) - -# 1.4.0 (2017-10-04) - -## Changed -- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill) - -# 1.3.1 (2017-07-10) - -## Fixed -- Fixed #57: number comparisons in prerelease sometimes inaccurate - -# 1.3.0 (2017-05-02) - -## Added -- #45: Added json (un)marshaling support (thanks @mh-cbon) -- Stability marker. See https://masterminds.github.io/stability/ - -## Fixed -- #51: Fix handling of single digit tilde constraint (thanks @dgodd) - -## Changed -- #55: The godoc icon moved from png to svg - -# 1.2.3 (2017-04-03) - -## Fixed -- #46: Fixed 0.x.x and 0.0.x in constraints being treated as * - -# Release 1.2.2 (2016-12-13) - -## Fixed -- #34: Fixed issue where hyphen range was not working with pre-release parsing. - -# Release 1.2.1 (2016-11-28) - -## Fixed -- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha" - properly. - -# Release 1.2.0 (2016-11-04) - -## Added -- #20: Added MustParse function for versions (thanks @adamreese) -- #15: Added increment methods on versions (thanks @mh-cbon) - -## Fixed -- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and - might not satisfy the intended compatibility. The change here ignores pre-releases - on constraint checks (e.g., ~ or ^) when a pre-release is not part of the - constraint. For example, `^1.2.3` will ignore pre-releases while - `^1.2.3-alpha` will include them. - -# Release 1.1.1 (2016-06-30) - -## Changed -- Issue #9: Speed up version comparison performance (thanks @sdboyer) -- Issue #8: Added benchmarks (thanks @sdboyer) -- Updated Go Report Card URL to new location -- Updated Readme to add code snippet formatting (thanks @mh-cbon) -- Updating tagging to v[SemVer] structure for compatibility with other tools. - -# Release 1.1.0 (2016-03-11) - -- Issue #2: Implemented validation to provide reasons a versions failed a - constraint. - -# Release 1.0.1 (2015-12-31) - -- Fixed #1: * constraint failing on valid versions. - -# Release 1.0.0 (2015-10-20) - -- Initial release diff --git a/vendor/github.com/Masterminds/semver/LICENSE.txt b/vendor/github.com/Masterminds/semver/LICENSE.txt deleted file mode 100644 index 9ff7da9c48b..00000000000 --- a/vendor/github.com/Masterminds/semver/LICENSE.txt +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2014-2019, Matt Butcher and Matt Farina - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/Masterminds/semver/Makefile b/vendor/github.com/Masterminds/semver/Makefile deleted file mode 100644 index a7a1b4e36de..00000000000 --- a/vendor/github.com/Masterminds/semver/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -.PHONY: setup -setup: - go get -u gopkg.in/alecthomas/gometalinter.v1 - gometalinter.v1 --install - -.PHONY: test -test: validate lint - @echo "==> Running tests" - go test -v - -.PHONY: validate -validate: - @echo "==> Running static validations" - @gometalinter.v1 \ - --disable-all \ - --enable deadcode \ - --severity deadcode:error \ - --enable gofmt \ - --enable gosimple \ - --enable ineffassign \ - --enable misspell \ - --enable vet \ - --tests \ - --vendor \ - --deadline 60s \ - ./... || exit_code=1 - -.PHONY: lint -lint: - @echo "==> Running linters" - @gometalinter.v1 \ - --disable-all \ - --enable golint \ - --vendor \ - --deadline 60s \ - ./... || : diff --git a/vendor/github.com/Masterminds/semver/README.md b/vendor/github.com/Masterminds/semver/README.md deleted file mode 100644 index 1b52d2f4362..00000000000 --- a/vendor/github.com/Masterminds/semver/README.md +++ /dev/null @@ -1,194 +0,0 @@ -# SemVer - -The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to: - -* Parse semantic versions -* Sort semantic versions -* Check if a semantic version fits within a set of constraints -* Optionally work with a `v` prefix - -[![Stability: -Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html) -[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.svg)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver) - -If you are looking for a command line tool for version comparisons please see -[vert](https://github.com/Masterminds/vert) which uses this library. - -## Parsing Semantic Versions - -To parse a semantic version use the `NewVersion` function. For example, - -```go - v, err := semver.NewVersion("1.2.3-beta.1+build345") -``` - -If there is an error the version wasn't parseable. The version object has methods -to get the parts of the version, compare it to other versions, convert the -version back into a string, and get the original string. For more details -please see the [documentation](https://godoc.org/github.com/Masterminds/semver). - -## Sorting Semantic Versions - -A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/) -package from the standard library. For example, - -```go - raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} - vs := make([]*semver.Version, len(raw)) - for i, r := range raw { - v, err := semver.NewVersion(r) - if err != nil { - t.Errorf("Error parsing version: %s", err) - } - - vs[i] = v - } - - sort.Sort(semver.Collection(vs)) -``` - -## Checking Version Constraints - -Checking a version against version constraints is one of the most featureful -parts of the package. - -```go - c, err := semver.NewConstraint(">= 1.2.3") - if err != nil { - // Handle constraint not being parseable. - } - - v, _ := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - // Check if the version meets the constraints. The a variable will be true. - a := c.Check(v) -``` - -## Basic Comparisons - -There are two elements to the comparisons. First, a comparison string is a list -of comma separated and comparisons. These are then separated by || separated or -comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a -comparison that's greater than or equal to 1.2 and less than 3.0.0 or is -greater than or equal to 4.2.3. - -The basic comparisons are: - -* `=`: equal (aliased to no operator) -* `!=`: not equal -* `>`: greater than -* `<`: less than -* `>=`: greater than or equal to -* `<=`: less than or equal to - -## Working With Pre-release Versions - -Pre-releases, for those not familiar with them, are used for software releases -prior to stable or generally available releases. Examples of pre-releases include -development, alpha, beta, and release candidate releases. A pre-release may be -a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the -order of precidence, pre-releases come before their associated releases. In this -example `1.2.3-beta.1 < 1.2.3`. - -According to the Semantic Version specification pre-releases may not be -API compliant with their release counterpart. It says, - -> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version. - -SemVer comparisons without a pre-release comparator will skip pre-release versions. -For example, `>=1.2.3` will skip pre-releases when looking at a list of releases -while `>=1.2.3-0` will evaluate and find pre-releases. - -The reason for the `0` as a pre-release version in the example comparison is -because pre-releases can only contain ASCII alphanumerics and hyphens (along with -`.` separators), per the spec. Sorting happens in ASCII sort order, again per the spec. The lowest character is a `0` in ASCII sort order (see an [ASCII Table](http://www.asciitable.com/)) - -Understanding ASCII sort ordering is important because A-Z comes before a-z. That -means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case -sensitivity doesn't apply here. This is due to ASCII sort ordering which is what -the spec specifies. - -## Hyphen Range Comparisons - -There are multiple methods to handle ranges and the first is hyphens ranges. -These look like: - -* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` -* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` - -## Wildcards In Comparisons - -The `x`, `X`, and `*` characters can be used as a wildcard character. This works -for all comparison operators. When used on the `=` operator it falls -back to the pack level comparison (see tilde below). For example, - -* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` -* `>= 1.2.x` is equivalent to `>= 1.2.0` -* `<= 2.x` is equivalent to `< 3` -* `*` is equivalent to `>= 0.0.0` - -## Tilde Range Comparisons (Patch) - -The tilde (`~`) comparison operator is for patch level ranges when a minor -version is specified and major level changes when the minor number is missing. -For example, - -* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` -* `~1` is equivalent to `>= 1, < 2` -* `~2.3` is equivalent to `>= 2.3, < 2.4` -* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` -* `~1.x` is equivalent to `>= 1, < 2` - -## Caret Range Comparisons (Major) - -The caret (`^`) comparison operator is for major level changes. This is useful -when comparisons of API versions as a major change is API breaking. For example, - -* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` -* `^0.0.1` is equivalent to `>= 0.0.1, < 1.0.0` -* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` -* `^2.3` is equivalent to `>= 2.3, < 3` -* `^2.x` is equivalent to `>= 2.0.0, < 3` - -# Validation - -In addition to testing a version against a constraint, a version can be validated -against a constraint. When validation fails a slice of errors containing why a -version didn't meet the constraint is returned. For example, - -```go - c, err := semver.NewConstraint("<= 1.2.3, >= 1.4") - if err != nil { - // Handle constraint not being parseable. - } - - v, _ := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - - // Validate a version against a constraint. - a, msgs := c.Validate(v) - // a is false - for _, m := range msgs { - fmt.Println(m) - - // Loops over the errors which would read - // "1.3 is greater than 1.2.3" - // "1.3 is less than 1.4" - } -``` - -# Fuzzing - - [dvyukov/go-fuzz](https://github.com/dvyukov/go-fuzz) is used for fuzzing. - -1. `go-fuzz-build` -2. `go-fuzz -workdir=fuzz` - -# Contribute - -If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues) -or [create a pull request](https://github.com/Masterminds/semver/pulls). diff --git a/vendor/github.com/Masterminds/semver/appveyor.yml b/vendor/github.com/Masterminds/semver/appveyor.yml deleted file mode 100644 index b2778df15a4..00000000000 --- a/vendor/github.com/Masterminds/semver/appveyor.yml +++ /dev/null @@ -1,44 +0,0 @@ -version: build-{build}.{branch} - -clone_folder: C:\gopath\src\github.com\Masterminds\semver -shallow_clone: true - -environment: - GOPATH: C:\gopath - -platform: - - x64 - -install: - - go version - - go env - - go get -u gopkg.in/alecthomas/gometalinter.v1 - - set PATH=%PATH%;%GOPATH%\bin - - gometalinter.v1.exe --install - -build_script: - - go install -v ./... - -test_script: - - "gometalinter.v1 \ - --disable-all \ - --enable deadcode \ - --severity deadcode:error \ - --enable gofmt \ - --enable gosimple \ - --enable ineffassign \ - --enable misspell \ - --enable vet \ - --tests \ - --vendor \ - --deadline 60s \ - ./... || exit_code=1" - - "gometalinter.v1 \ - --disable-all \ - --enable golint \ - --vendor \ - --deadline 60s \ - ./... || :" - - go test -v - -deploy: off diff --git a/vendor/github.com/Masterminds/semver/collection.go b/vendor/github.com/Masterminds/semver/collection.go deleted file mode 100644 index a78235895fd..00000000000 --- a/vendor/github.com/Masterminds/semver/collection.go +++ /dev/null @@ -1,24 +0,0 @@ -package semver - -// Collection is a collection of Version instances and implements the sort -// interface. See the sort package for more details. -// https://golang.org/pkg/sort/ -type Collection []*Version - -// Len returns the length of a collection. The number of Version instances -// on the slice. -func (c Collection) Len() int { - return len(c) -} - -// Less is needed for the sort interface to compare two Version objects on the -// slice. If checks if one is less than the other. -func (c Collection) Less(i, j int) bool { - return c[i].LessThan(c[j]) -} - -// Swap is needed for the sort interface to replace the Version objects -// at two different positions in the slice. -func (c Collection) Swap(i, j int) { - c[i], c[j] = c[j], c[i] -} diff --git a/vendor/github.com/Masterminds/semver/constraints.go b/vendor/github.com/Masterminds/semver/constraints.go deleted file mode 100644 index b94b93413f3..00000000000 --- a/vendor/github.com/Masterminds/semver/constraints.go +++ /dev/null @@ -1,423 +0,0 @@ -package semver - -import ( - "errors" - "fmt" - "regexp" - "strings" -) - -// Constraints is one or more constraint that a semantic version can be -// checked against. -type Constraints struct { - constraints [][]*constraint -} - -// NewConstraint returns a Constraints instance that a Version instance can -// be checked against. If there is a parse error it will be returned. -func NewConstraint(c string) (*Constraints, error) { - - // Rewrite - ranges into a comparison operation. - c = rewriteRange(c) - - ors := strings.Split(c, "||") - or := make([][]*constraint, len(ors)) - for k, v := range ors { - cs := strings.Split(v, ",") - result := make([]*constraint, len(cs)) - for i, s := range cs { - pc, err := parseConstraint(s) - if err != nil { - return nil, err - } - - result[i] = pc - } - or[k] = result - } - - o := &Constraints{constraints: or} - return o, nil -} - -// Check tests if a version satisfies the constraints. -func (cs Constraints) Check(v *Version) bool { - // loop over the ORs and check the inner ANDs - for _, o := range cs.constraints { - joy := true - for _, c := range o { - if !c.check(v) { - joy = false - break - } - } - - if joy { - return true - } - } - - return false -} - -// Validate checks if a version satisfies a constraint. If not a slice of -// reasons for the failure are returned in addition to a bool. -func (cs Constraints) Validate(v *Version) (bool, []error) { - // loop over the ORs and check the inner ANDs - var e []error - - // Capture the prerelease message only once. When it happens the first time - // this var is marked - var prerelesase bool - for _, o := range cs.constraints { - joy := true - for _, c := range o { - // Before running the check handle the case there the version is - // a prerelease and the check is not searching for prereleases. - if c.con.pre == "" && v.pre != "" { - if !prerelesase { - em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) - e = append(e, em) - prerelesase = true - } - joy = false - - } else { - - if !c.check(v) { - em := fmt.Errorf(c.msg, v, c.orig) - e = append(e, em) - joy = false - } - } - } - - if joy { - return true, []error{} - } - } - - return false, e -} - -var constraintOps map[string]cfunc -var constraintMsg map[string]string -var constraintRegex *regexp.Regexp - -func init() { - constraintOps = map[string]cfunc{ - "": constraintTildeOrEqual, - "=": constraintTildeOrEqual, - "!=": constraintNotEqual, - ">": constraintGreaterThan, - "<": constraintLessThan, - ">=": constraintGreaterThanEqual, - "=>": constraintGreaterThanEqual, - "<=": constraintLessThanEqual, - "=<": constraintLessThanEqual, - "~": constraintTilde, - "~>": constraintTilde, - "^": constraintCaret, - } - - constraintMsg = map[string]string{ - "": "%s is not equal to %s", - "=": "%s is not equal to %s", - "!=": "%s is equal to %s", - ">": "%s is less than or equal to %s", - "<": "%s is greater than or equal to %s", - ">=": "%s is less than %s", - "=>": "%s is less than %s", - "<=": "%s is greater than %s", - "=<": "%s is greater than %s", - "~": "%s does not have same major and minor version as %s", - "~>": "%s does not have same major and minor version as %s", - "^": "%s does not have same major version as %s", - } - - ops := make([]string, 0, len(constraintOps)) - for k := range constraintOps { - ops = append(ops, regexp.QuoteMeta(k)) - } - - constraintRegex = regexp.MustCompile(fmt.Sprintf( - `^\s*(%s)\s*(%s)\s*$`, - strings.Join(ops, "|"), - cvRegex)) - - constraintRangeRegex = regexp.MustCompile(fmt.Sprintf( - `\s*(%s)\s+-\s+(%s)\s*`, - cvRegex, cvRegex)) -} - -// An individual constraint -type constraint struct { - // The callback function for the restraint. It performs the logic for - // the constraint. - function cfunc - - msg string - - // The version used in the constraint check. For example, if a constraint - // is '<= 2.0.0' the con a version instance representing 2.0.0. - con *Version - - // The original parsed version (e.g., 4.x from != 4.x) - orig string - - // When an x is used as part of the version (e.g., 1.x) - minorDirty bool - dirty bool - patchDirty bool -} - -// Check if a version meets the constraint -func (c *constraint) check(v *Version) bool { - return c.function(v, c) -} - -type cfunc func(v *Version, c *constraint) bool - -func parseConstraint(c string) (*constraint, error) { - m := constraintRegex.FindStringSubmatch(c) - if m == nil { - return nil, fmt.Errorf("improper constraint: %s", c) - } - - ver := m[2] - orig := ver - minorDirty := false - patchDirty := false - dirty := false - if isX(m[3]) { - ver = "0.0.0" - dirty = true - } else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" { - minorDirty = true - dirty = true - ver = fmt.Sprintf("%s.0.0%s", m[3], m[6]) - } else if isX(strings.TrimPrefix(m[5], ".")) { - dirty = true - patchDirty = true - ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6]) - } - - con, err := NewVersion(ver) - if err != nil { - - // The constraintRegex should catch any regex parsing errors. So, - // we should never get here. - return nil, errors.New("constraint Parser Error") - } - - cs := &constraint{ - function: constraintOps[m[1]], - msg: constraintMsg[m[1]], - con: con, - orig: orig, - minorDirty: minorDirty, - patchDirty: patchDirty, - dirty: dirty, - } - return cs, nil -} - -// Constraint functions -func constraintNotEqual(v *Version, c *constraint) bool { - if c.dirty { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if c.con.Major() != v.Major() { - return true - } - if c.con.Minor() != v.Minor() && !c.minorDirty { - return true - } else if c.minorDirty { - return false - } - - return false - } - - return !v.Equal(c.con) -} - -func constraintGreaterThan(v *Version, c *constraint) bool { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - return v.Compare(c.con) == 1 -} - -func constraintLessThan(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if !c.dirty { - return v.Compare(c.con) < 0 - } - - if v.Major() > c.con.Major() { - return false - } else if v.Minor() > c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -func constraintGreaterThanEqual(v *Version, c *constraint) bool { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - return v.Compare(c.con) >= 0 -} - -func constraintLessThanEqual(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if !c.dirty { - return v.Compare(c.con) <= 0 - } - - if v.Major() > c.con.Major() { - return false - } else if v.Minor() > c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -// ~*, ~>* --> >= 0.0.0 (any) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0 -func constraintTilde(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if v.LessThan(c.con) { - return false - } - - // ~0.0.0 is a special case where all constraints are accepted. It's - // equivalent to >= 0.0.0. - if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 && - !c.minorDirty && !c.patchDirty { - return true - } - - if v.Major() != c.con.Major() { - return false - } - - if v.Minor() != c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -// When there is a .x (dirty) status it automatically opts in to ~. Otherwise -// it's a straight = -func constraintTildeOrEqual(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if c.dirty { - c.msg = constraintMsg["~"] - return constraintTilde(v, c) - } - - return v.Equal(c.con) -} - -// ^* --> (any) -// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0 -// ^1.2.3 --> >=1.2.3, <2.0.0 -// ^1.2.0 --> >=1.2.0, <2.0.0 -func constraintCaret(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if v.LessThan(c.con) { - return false - } - - if v.Major() != c.con.Major() { - return false - } - - return true -} - -var constraintRangeRegex *regexp.Regexp - -const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` + - `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + - `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` - -func isX(x string) bool { - switch x { - case "x", "*", "X": - return true - default: - return false - } -} - -func rewriteRange(i string) string { - m := constraintRangeRegex.FindAllStringSubmatch(i, -1) - if m == nil { - return i - } - o := i - for _, v := range m { - t := fmt.Sprintf(">= %s, <= %s", v[1], v[11]) - o = strings.Replace(o, v[0], t, 1) - } - - return o -} diff --git a/vendor/github.com/Masterminds/semver/doc.go b/vendor/github.com/Masterminds/semver/doc.go deleted file mode 100644 index 6a6c24c6d6e..00000000000 --- a/vendor/github.com/Masterminds/semver/doc.go +++ /dev/null @@ -1,115 +0,0 @@ -/* -Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go. - -Specifically it provides the ability to: - - * Parse semantic versions - * Sort semantic versions - * Check if a semantic version fits within a set of constraints - * Optionally work with a `v` prefix - -Parsing Semantic Versions - -To parse a semantic version use the `NewVersion` function. For example, - - v, err := semver.NewVersion("1.2.3-beta.1+build345") - -If there is an error the version wasn't parseable. The version object has methods -to get the parts of the version, compare it to other versions, convert the -version back into a string, and get the original string. For more details -please see the documentation at https://godoc.org/github.com/Masterminds/semver. - -Sorting Semantic Versions - -A set of versions can be sorted using the `sort` package from the standard library. -For example, - - raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} - vs := make([]*semver.Version, len(raw)) - for i, r := range raw { - v, err := semver.NewVersion(r) - if err != nil { - t.Errorf("Error parsing version: %s", err) - } - - vs[i] = v - } - - sort.Sort(semver.Collection(vs)) - -Checking Version Constraints - -Checking a version against version constraints is one of the most featureful -parts of the package. - - c, err := semver.NewConstraint(">= 1.2.3") - if err != nil { - // Handle constraint not being parseable. - } - - v, err := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - // Check if the version meets the constraints. The a variable will be true. - a := c.Check(v) - -Basic Comparisons - -There are two elements to the comparisons. First, a comparison string is a list -of comma separated and comparisons. These are then separated by || separated or -comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a -comparison that's greater than or equal to 1.2 and less than 3.0.0 or is -greater than or equal to 4.2.3. - -The basic comparisons are: - - * `=`: equal (aliased to no operator) - * `!=`: not equal - * `>`: greater than - * `<`: less than - * `>=`: greater than or equal to - * `<=`: less than or equal to - -Hyphen Range Comparisons - -There are multiple methods to handle ranges and the first is hyphens ranges. -These look like: - - * `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` - * `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` - -Wildcards In Comparisons - -The `x`, `X`, and `*` characters can be used as a wildcard character. This works -for all comparison operators. When used on the `=` operator it falls -back to the pack level comparison (see tilde below). For example, - - * `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` - * `>= 1.2.x` is equivalent to `>= 1.2.0` - * `<= 2.x` is equivalent to `<= 3` - * `*` is equivalent to `>= 0.0.0` - -Tilde Range Comparisons (Patch) - -The tilde (`~`) comparison operator is for patch level ranges when a minor -version is specified and major level changes when the minor number is missing. -For example, - - * `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` - * `~1` is equivalent to `>= 1, < 2` - * `~2.3` is equivalent to `>= 2.3, < 2.4` - * `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` - * `~1.x` is equivalent to `>= 1, < 2` - -Caret Range Comparisons (Major) - -The caret (`^`) comparison operator is for major level changes. This is useful -when comparisons of API versions as a major change is API breaking. For example, - - * `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` - * `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` - * `^2.3` is equivalent to `>= 2.3, < 3` - * `^2.x` is equivalent to `>= 2.0.0, < 3` -*/ -package semver diff --git a/vendor/github.com/Masterminds/semver/version.go b/vendor/github.com/Masterminds/semver/version.go deleted file mode 100644 index 400d4f93412..00000000000 --- a/vendor/github.com/Masterminds/semver/version.go +++ /dev/null @@ -1,425 +0,0 @@ -package semver - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "regexp" - "strconv" - "strings" -) - -// The compiled version of the regex created at init() is cached here so it -// only needs to be created once. -var versionRegex *regexp.Regexp -var validPrereleaseRegex *regexp.Regexp - -var ( - // ErrInvalidSemVer is returned a version is found to be invalid when - // being parsed. - ErrInvalidSemVer = errors.New("Invalid Semantic Version") - - // ErrInvalidMetadata is returned when the metadata is an invalid format - ErrInvalidMetadata = errors.New("Invalid Metadata string") - - // ErrInvalidPrerelease is returned when the pre-release is an invalid format - ErrInvalidPrerelease = errors.New("Invalid Prerelease string") -) - -// SemVerRegex is the regular expression used to parse a semantic version. -const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` + - `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + - `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` - -// ValidPrerelease is the regular expression which validates -// both prerelease and metadata values. -const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)$` - -// Version represents a single semantic version. -type Version struct { - major, minor, patch int64 - pre string - metadata string - original string -} - -func init() { - versionRegex = regexp.MustCompile("^" + SemVerRegex + "$") - validPrereleaseRegex = regexp.MustCompile(ValidPrerelease) -} - -// NewVersion parses a given version and returns an instance of Version or -// an error if unable to parse the version. -func NewVersion(v string) (*Version, error) { - m := versionRegex.FindStringSubmatch(v) - if m == nil { - return nil, ErrInvalidSemVer - } - - sv := &Version{ - metadata: m[8], - pre: m[5], - original: v, - } - - var temp int64 - temp, err := strconv.ParseInt(m[1], 10, 64) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.major = temp - - if m[2] != "" { - temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 64) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.minor = temp - } else { - sv.minor = 0 - } - - if m[3] != "" { - temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 64) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.patch = temp - } else { - sv.patch = 0 - } - - return sv, nil -} - -// MustParse parses a given version and panics on error. -func MustParse(v string) *Version { - sv, err := NewVersion(v) - if err != nil { - panic(err) - } - return sv -} - -// String converts a Version object to a string. -// Note, if the original version contained a leading v this version will not. -// See the Original() method to retrieve the original value. Semantic Versions -// don't contain a leading v per the spec. Instead it's optional on -// implementation. -func (v *Version) String() string { - var buf bytes.Buffer - - fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch) - if v.pre != "" { - fmt.Fprintf(&buf, "-%s", v.pre) - } - if v.metadata != "" { - fmt.Fprintf(&buf, "+%s", v.metadata) - } - - return buf.String() -} - -// Original returns the original value passed in to be parsed. -func (v *Version) Original() string { - return v.original -} - -// Major returns the major version. -func (v *Version) Major() int64 { - return v.major -} - -// Minor returns the minor version. -func (v *Version) Minor() int64 { - return v.minor -} - -// Patch returns the patch version. -func (v *Version) Patch() int64 { - return v.patch -} - -// Prerelease returns the pre-release version. -func (v *Version) Prerelease() string { - return v.pre -} - -// Metadata returns the metadata on the version. -func (v *Version) Metadata() string { - return v.metadata -} - -// originalVPrefix returns the original 'v' prefix if any. -func (v *Version) originalVPrefix() string { - - // Note, only lowercase v is supported as a prefix by the parser. - if v.original != "" && v.original[:1] == "v" { - return v.original[:1] - } - return "" -} - -// IncPatch produces the next patch version. -// If the current version does not have prerelease/metadata information, -// it unsets metadata and prerelease values, increments patch number. -// If the current version has any of prerelease or metadata information, -// it unsets both values and keeps curent patch value -func (v Version) IncPatch() Version { - vNext := v - // according to http://semver.org/#spec-item-9 - // Pre-release versions have a lower precedence than the associated normal version. - // according to http://semver.org/#spec-item-10 - // Build metadata SHOULD be ignored when determining version precedence. - if v.pre != "" { - vNext.metadata = "" - vNext.pre = "" - } else { - vNext.metadata = "" - vNext.pre = "" - vNext.patch = v.patch + 1 - } - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// IncMinor produces the next minor version. -// Sets patch to 0. -// Increments minor number. -// Unsets metadata. -// Unsets prerelease status. -func (v Version) IncMinor() Version { - vNext := v - vNext.metadata = "" - vNext.pre = "" - vNext.patch = 0 - vNext.minor = v.minor + 1 - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// IncMajor produces the next major version. -// Sets patch to 0. -// Sets minor to 0. -// Increments major number. -// Unsets metadata. -// Unsets prerelease status. -func (v Version) IncMajor() Version { - vNext := v - vNext.metadata = "" - vNext.pre = "" - vNext.patch = 0 - vNext.minor = 0 - vNext.major = v.major + 1 - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// SetPrerelease defines the prerelease value. -// Value must not include the required 'hypen' prefix. -func (v Version) SetPrerelease(prerelease string) (Version, error) { - vNext := v - if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) { - return vNext, ErrInvalidPrerelease - } - vNext.pre = prerelease - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext, nil -} - -// SetMetadata defines metadata value. -// Value must not include the required 'plus' prefix. -func (v Version) SetMetadata(metadata string) (Version, error) { - vNext := v - if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) { - return vNext, ErrInvalidMetadata - } - vNext.metadata = metadata - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext, nil -} - -// LessThan tests if one version is less than another one. -func (v *Version) LessThan(o *Version) bool { - return v.Compare(o) < 0 -} - -// GreaterThan tests if one version is greater than another one. -func (v *Version) GreaterThan(o *Version) bool { - return v.Compare(o) > 0 -} - -// Equal tests if two versions are equal to each other. -// Note, versions can be equal with different metadata since metadata -// is not considered part of the comparable version. -func (v *Version) Equal(o *Version) bool { - return v.Compare(o) == 0 -} - -// Compare compares this version to another one. It returns -1, 0, or 1 if -// the version smaller, equal, or larger than the other version. -// -// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is -// lower than the version without a prerelease. -func (v *Version) Compare(o *Version) int { - // Compare the major, minor, and patch version for differences. If a - // difference is found return the comparison. - if d := compareSegment(v.Major(), o.Major()); d != 0 { - return d - } - if d := compareSegment(v.Minor(), o.Minor()); d != 0 { - return d - } - if d := compareSegment(v.Patch(), o.Patch()); d != 0 { - return d - } - - // At this point the major, minor, and patch versions are the same. - ps := v.pre - po := o.Prerelease() - - if ps == "" && po == "" { - return 0 - } - if ps == "" { - return 1 - } - if po == "" { - return -1 - } - - return comparePrerelease(ps, po) -} - -// UnmarshalJSON implements JSON.Unmarshaler interface. -func (v *Version) UnmarshalJSON(b []byte) error { - var s string - if err := json.Unmarshal(b, &s); err != nil { - return err - } - temp, err := NewVersion(s) - if err != nil { - return err - } - v.major = temp.major - v.minor = temp.minor - v.patch = temp.patch - v.pre = temp.pre - v.metadata = temp.metadata - v.original = temp.original - temp = nil - return nil -} - -// MarshalJSON implements JSON.Marshaler interface. -func (v *Version) MarshalJSON() ([]byte, error) { - return json.Marshal(v.String()) -} - -func compareSegment(v, o int64) int { - if v < o { - return -1 - } - if v > o { - return 1 - } - - return 0 -} - -func comparePrerelease(v, o string) int { - - // split the prelease versions by their part. The separator, per the spec, - // is a . - sparts := strings.Split(v, ".") - oparts := strings.Split(o, ".") - - // Find the longer length of the parts to know how many loop iterations to - // go through. - slen := len(sparts) - olen := len(oparts) - - l := slen - if olen > slen { - l = olen - } - - // Iterate over each part of the prereleases to compare the differences. - for i := 0; i < l; i++ { - // Since the lentgh of the parts can be different we need to create - // a placeholder. This is to avoid out of bounds issues. - stemp := "" - if i < slen { - stemp = sparts[i] - } - - otemp := "" - if i < olen { - otemp = oparts[i] - } - - d := comparePrePart(stemp, otemp) - if d != 0 { - return d - } - } - - // Reaching here means two versions are of equal value but have different - // metadata (the part following a +). They are not identical in string form - // but the version comparison finds them to be equal. - return 0 -} - -func comparePrePart(s, o string) int { - // Fastpath if they are equal - if s == o { - return 0 - } - - // When s or o are empty we can use the other in an attempt to determine - // the response. - if s == "" { - if o != "" { - return -1 - } - return 1 - } - - if o == "" { - if s != "" { - return 1 - } - return -1 - } - - // When comparing strings "99" is greater than "103". To handle - // cases like this we need to detect numbers and compare them. According - // to the semver spec, numbers are always positive. If there is a - at the - // start like -99 this is to be evaluated as an alphanum. numbers always - // have precedence over alphanum. Parsing as Uints because negative numbers - // are ignored. - - oi, n1 := strconv.ParseUint(o, 10, 64) - si, n2 := strconv.ParseUint(s, 10, 64) - - // The case where both are strings compare the strings - if n1 != nil && n2 != nil { - if s > o { - return 1 - } - return -1 - } else if n1 != nil { - // o is a string and s is a number - return -1 - } else if n2 != nil { - // s is a string and o is a number - return 1 - } - // Both are numbers - if si > oi { - return 1 - } - return -1 - -} diff --git a/vendor/github.com/Masterminds/semver/version_fuzz.go b/vendor/github.com/Masterminds/semver/version_fuzz.go deleted file mode 100644 index b42bcd62b95..00000000000 --- a/vendor/github.com/Masterminds/semver/version_fuzz.go +++ /dev/null @@ -1,10 +0,0 @@ -// +build gofuzz - -package semver - -func Fuzz(data []byte) int { - if _, err := NewVersion(string(data)); err != nil { - return 0 - } - return 1 -} diff --git a/vendor/github.com/Masterminds/sprig/.gitignore b/vendor/github.com/Masterminds/sprig/.gitignore deleted file mode 100644 index 5e3002f88f5..00000000000 --- a/vendor/github.com/Masterminds/sprig/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -vendor/ -/.glide diff --git a/vendor/github.com/Masterminds/sprig/.travis.yml b/vendor/github.com/Masterminds/sprig/.travis.yml deleted file mode 100644 index b9da8b825bb..00000000000 --- a/vendor/github.com/Masterminds/sprig/.travis.yml +++ /dev/null @@ -1,26 +0,0 @@ -language: go - -go: - - 1.9.x - - 1.10.x - - 1.11.x - - 1.12.x - - 1.13.x - - tip - -# Setting sudo access to false will let Travis CI use containers rather than -# VMs to run the tests. For more details see: -# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/ -# - http://docs.travis-ci.com/user/workers/standard-infrastructure/ -sudo: false - -script: - - make setup test - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/06e3328629952dabe3e0 - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: never # options: [always|never|change] default: always diff --git a/vendor/github.com/Masterminds/sprig/CHANGELOG.md b/vendor/github.com/Masterminds/sprig/CHANGELOG.md deleted file mode 100644 index 6a79fbde469..00000000000 --- a/vendor/github.com/Masterminds/sprig/CHANGELOG.md +++ /dev/null @@ -1,282 +0,0 @@ -# Changelog - -## Release 2.22.0 (2019-10-02) - -### Added - -- #173: Added getHostByName function to resolve dns names to ips (thanks @fcgravalos) -- #195: Added deepCopy function for use with dicts - -### Changed - -- Updated merge and mergeOverwrite documentation to explain copying and how to - use deepCopy with it - -## Release 2.21.0 (2019-09-18) - -### Added - -- #122: Added encryptAES/decryptAES functions (thanks @n0madic) -- #128: Added toDecimal support (thanks @Dean-Coakley) -- #169: Added list contcat (thanks @astorath) -- #174: Added deepEqual function (thanks @bonifaido) -- #170: Added url parse and join functions (thanks @astorath) - -### Changed - -- #171: Updated glide config for Google UUID to v1 and to add ranges to semver and testify - -### Fixed - -- #172: Fix semver wildcard example (thanks @piepmatz) -- #175: Fix dateInZone doc example (thanks @s3than) - -## Release 2.20.0 (2019-06-18) - -### Added - -- #164: Adding function to get unix epoch for a time (@mattfarina) -- #166: Adding tests for date_in_zone (@mattfarina) - -### Changed - -- #144: Fix function comments based on best practices from Effective Go (@CodeLingoTeam) -- #150: Handles pointer type for time.Time in "htmlDate" (@mapreal19) -- #161, #157, #160, #153, #158, #156, #155, #159, #152 documentation updates (@badeadan) - -### Fixed - -## Release 2.19.0 (2019-03-02) - -IMPORTANT: This release reverts a change from 2.18.0 - -In the previous release (2.18), we prematurely merged a partial change to the crypto functions that led to creating two sets of crypto functions (I blame @technosophos -- since that's me). This release rolls back that change, and does what was originally intended: It alters the existing crypto functions to use secure random. - -We debated whether this classifies as a change worthy of major revision, but given the proximity to the last release, we have decided that treating 2.18 as a faulty release is the correct course of action. We apologize for any inconvenience. - -### Changed - -- Fix substr panic 35fb796 (Alexey igrychev) -- Remove extra period 1eb7729 (Matthew Lorimor) -- Make random string functions use crypto by default 6ceff26 (Matthew Lorimor) -- README edits/fixes/suggestions 08fe136 (Lauri Apple) - - -## Release 2.18.0 (2019-02-12) - -### Added - -- Added mergeOverwrite function -- cryptographic functions that use secure random (see fe1de12) - -### Changed - -- Improve documentation of regexMatch function, resolves #139 90b89ce (Jan Tagscherer) -- Handle has for nil list 9c10885 (Daniel Cohen) -- Document behaviour of mergeOverwrite fe0dbe9 (Lukas Rieder) -- doc: adds missing documentation. 4b871e6 (Fernandez Ludovic) -- Replace outdated goutils imports 01893d2 (Matthew Lorimor) -- Surface crypto secure random strings from goutils fe1de12 (Matthew Lorimor) -- Handle untyped nil values as paramters to string functions 2b2ec8f (Morten Torkildsen) - -### Fixed - -- Fix dict merge issue and provide mergeOverwrite .dst .src1 to overwrite from src -> dst 4c59c12 (Lukas Rieder) -- Fix substr var names and comments d581f80 (Dean Coakley) -- Fix substr documentation 2737203 (Dean Coakley) - -## Release 2.17.1 (2019-01-03) - -### Fixed - -The 2.17.0 release did not have a version pinned for xstrings, which caused compilation failures when xstrings < 1.2 was used. This adds the correct version string to glide.yaml. - -## Release 2.17.0 (2019-01-03) - -### Added - -- adds alder32sum function and test 6908fc2 (marshallford) -- Added kebabcase function ca331a1 (Ilyes512) - -### Changed - -- Update goutils to 1.1.0 4e1125d (Matt Butcher) - -### Fixed - -- Fix 'has' documentation e3f2a85 (dean-coakley) -- docs(dict): fix typo in pick example dc424f9 (Dustin Specker) -- fixes spelling errors... not sure how that happened 4cf188a (marshallford) - -## Release 2.16.0 (2018-08-13) - -### Added - -- add splitn function fccb0b0 (Helgi Þorbjörnsson) -- Add slice func df28ca7 (gongdo) -- Generate serial number a3bdffd (Cody Coons) -- Extract values of dict with values function df39312 (Lawrence Jones) - -### Changed - -- Modify panic message for list.slice ae38335 (gongdo) -- Minor improvement in code quality - Removed an unreachable piece of code at defaults.go#L26:6 - Resolve formatting issues. 5834241 (Abhishek Kashyap) -- Remove duplicated documentation 1d97af1 (Matthew Fisher) -- Test on go 1.11 49df809 (Helgi Þormar Þorbjörnsson) - -### Fixed - -- Fix file permissions c5f40b5 (gongdo) -- Fix example for buildCustomCert 7779e0d (Tin Lam) - -## Release 2.15.0 (2018-04-02) - -### Added - -- #68 and #69: Add json helpers to docs (thanks @arunvelsriram) -- #66: Add ternary function (thanks @binoculars) -- #67: Allow keys function to take multiple dicts (thanks @binoculars) -- #89: Added sha1sum to crypto function (thanks @benkeil) -- #81: Allow customizing Root CA that used by genSignedCert (thanks @chenzhiwei) -- #92: Add travis testing for go 1.10 -- #93: Adding appveyor config for windows testing - -### Changed - -- #90: Updating to more recent dependencies -- #73: replace satori/go.uuid with google/uuid (thanks @petterw) - -### Fixed - -- #76: Fixed documentation typos (thanks @Thiht) -- Fixed rounding issue on the `ago` function. Note, the removes support for Go 1.8 and older - -## Release 2.14.1 (2017-12-01) - -### Fixed - -- #60: Fix typo in function name documentation (thanks @neil-ca-moore) -- #61: Removing line with {{ due to blocking github pages genertion -- #64: Update the list functions to handle int, string, and other slices for compatibility - -## Release 2.14.0 (2017-10-06) - -This new version of Sprig adds a set of functions for generating and working with SSL certificates. - -- `genCA` generates an SSL Certificate Authority -- `genSelfSignedCert` generates an SSL self-signed certificate -- `genSignedCert` generates an SSL certificate and key based on a given CA - -## Release 2.13.0 (2017-09-18) - -This release adds new functions, including: - -- `regexMatch`, `regexFindAll`, `regexFind`, `regexReplaceAll`, `regexReplaceAllLiteral`, and `regexSplit` to work with regular expressions -- `floor`, `ceil`, and `round` math functions -- `toDate` converts a string to a date -- `nindent` is just like `indent` but also prepends a new line -- `ago` returns the time from `time.Now` - -### Added - -- #40: Added basic regex functionality (thanks @alanquillin) -- #41: Added ceil floor and round functions (thanks @alanquillin) -- #48: Added toDate function (thanks @andreynering) -- #50: Added nindent function (thanks @binoculars) -- #46: Added ago function (thanks @slayer) - -### Changed - -- #51: Updated godocs to include new string functions (thanks @curtisallen) -- #49: Added ability to merge multiple dicts (thanks @binoculars) - -## Release 2.12.0 (2017-05-17) - -- `snakecase`, `camelcase`, and `shuffle` are three new string functions -- `fail` allows you to bail out of a template render when conditions are not met - -## Release 2.11.0 (2017-05-02) - -- Added `toJson` and `toPrettyJson` -- Added `merge` -- Refactored documentation - -## Release 2.10.0 (2017-03-15) - -- Added `semver` and `semverCompare` for Semantic Versions -- `list` replaces `tuple` -- Fixed issue with `join` -- Added `first`, `last`, `intial`, `rest`, `prepend`, `append`, `toString`, `toStrings`, `sortAlpha`, `reverse`, `coalesce`, `pluck`, `pick`, `compact`, `keys`, `omit`, `uniq`, `has`, `without` - -## Release 2.9.0 (2017-02-23) - -- Added `splitList` to split a list -- Added crypto functions of `genPrivateKey` and `derivePassword` - -## Release 2.8.0 (2016-12-21) - -- Added access to several path functions (`base`, `dir`, `clean`, `ext`, and `abs`) -- Added functions for _mutating_ dictionaries (`set`, `unset`, `hasKey`) - -## Release 2.7.0 (2016-12-01) - -- Added `sha256sum` to generate a hash of an input -- Added functions to convert a numeric or string to `int`, `int64`, `float64` - -## Release 2.6.0 (2016-10-03) - -- Added a `uuidv4` template function for generating UUIDs inside of a template. - -## Release 2.5.0 (2016-08-19) - -- New `trimSuffix`, `trimPrefix`, `hasSuffix`, and `hasPrefix` functions -- New aliases have been added for a few functions that didn't follow the naming conventions (`trimAll` and `abbrevBoth`) -- `trimall` and `abbrevboth` (notice the case) are deprecated and will be removed in 3.0.0 - -## Release 2.4.0 (2016-08-16) - -- Adds two functions: `until` and `untilStep` - -## Release 2.3.0 (2016-06-21) - -- cat: Concatenate strings with whitespace separators. -- replace: Replace parts of a string: `replace " " "-" "Me First"` renders "Me-First" -- plural: Format plurals: `len "foo" | plural "one foo" "many foos"` renders "many foos" -- indent: Indent blocks of text in a way that is sensitive to "\n" characters. - -## Release 2.2.0 (2016-04-21) - -- Added a `genPrivateKey` function (Thanks @bacongobbler) - -## Release 2.1.0 (2016-03-30) - -- `default` now prints the default value when it does not receive a value down the pipeline. It is much safer now to do `{{.Foo | default "bar"}}`. -- Added accessors for "hermetic" functions. These return only functions that, when given the same input, produce the same output. - -## Release 2.0.0 (2016-03-29) - -Because we switched from `int` to `int64` as the return value for all integer math functions, the library's major version number has been incremented. - -- `min` complements `max` (formerly `biggest`) -- `empty` indicates that a value is the empty value for its type -- `tuple` creates a tuple inside of a template: `{{$t := tuple "a", "b" "c"}}` -- `dict` creates a dictionary inside of a template `{{$d := dict "key1" "val1" "key2" "val2"}}` -- Date formatters have been added for HTML dates (as used in `date` input fields) -- Integer math functions can convert from a number of types, including `string` (via `strconv.ParseInt`). - -## Release 1.2.0 (2016-02-01) - -- Added quote and squote -- Added b32enc and b32dec -- add now takes varargs -- biggest now takes varargs - -## Release 1.1.0 (2015-12-29) - -- Added #4: Added contains function. strings.Contains, but with the arguments - switched to simplify common pipelines. (thanks krancour) -- Added Travis-CI testing support - -## Release 1.0.0 (2015-12-23) - -- Initial release diff --git a/vendor/github.com/Masterminds/sprig/LICENSE.txt b/vendor/github.com/Masterminds/sprig/LICENSE.txt deleted file mode 100644 index 5c95accc2e2..00000000000 --- a/vendor/github.com/Masterminds/sprig/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Sprig -Copyright (C) 2013 Masterminds - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/Masterminds/sprig/Makefile b/vendor/github.com/Masterminds/sprig/Makefile deleted file mode 100644 index 63a93fdf798..00000000000 --- a/vendor/github.com/Masterminds/sprig/Makefile +++ /dev/null @@ -1,13 +0,0 @@ - -HAS_GLIDE := $(shell command -v glide;) - -.PHONY: test -test: - go test -v . - -.PHONY: setup -setup: -ifndef HAS_GLIDE - go get -u github.com/Masterminds/glide -endif - glide install diff --git a/vendor/github.com/Masterminds/sprig/README.md b/vendor/github.com/Masterminds/sprig/README.md deleted file mode 100644 index b70569585f8..00000000000 --- a/vendor/github.com/Masterminds/sprig/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# Sprig: Template functions for Go templates -[![Stability: Sustained](https://masterminds.github.io/stability/sustained.svg)](https://masterminds.github.io/stability/sustained.html) -[![Build Status](https://travis-ci.org/Masterminds/sprig.svg?branch=master)](https://travis-ci.org/Masterminds/sprig) - -The Go language comes with a [built-in template -language](http://golang.org/pkg/text/template/), but not -very many template functions. Sprig is a library that provides more than 100 commonly -used template functions. - -It is inspired by the template functions found in -[Twig](http://twig.sensiolabs.org/documentation) and in various -JavaScript libraries, such as [underscore.js](http://underscorejs.org/). - -## Usage - -**Template developers**: Please use Sprig's [function documentation](http://masterminds.github.io/sprig/) for -detailed instructions and code snippets for the >100 template functions available. - -**Go developers**: If you'd like to include Sprig as a library in your program, -our API documentation is available [at GoDoc.org](http://godoc.org/github.com/Masterminds/sprig). - -For standard usage, read on. - -### Load the Sprig library - -To load the Sprig `FuncMap`: - -```go - -import ( - "github.com/Masterminds/sprig" - "html/template" -) - -// This example illustrates that the FuncMap *must* be set before the -// templates themselves are loaded. -tpl := template.Must( - template.New("base").Funcs(sprig.FuncMap()).ParseGlob("*.html") -) - - -``` - -### Calling the functions inside of templates - -By convention, all functions are lowercase. This seems to follow the Go -idiom for template functions (as opposed to template methods, which are -TitleCase). For example, this: - -``` -{{ "hello!" | upper | repeat 5 }} -``` - -produces this: - -``` -HELLO!HELLO!HELLO!HELLO!HELLO! -``` - -## Principles Driving Our Function Selection - -We followed these principles to decide which functions to add and how to implement them: - -- Use template functions to build layout. The following - types of operations are within the domain of template functions: - - Formatting - - Layout - - Simple type conversions - - Utilities that assist in handling common formatting and layout needs (e.g. arithmetic) -- Template functions should not return errors unless there is no way to print - a sensible value. For example, converting a string to an integer should not - produce an error if conversion fails. Instead, it should display a default - value. -- Simple math is necessary for grid layouts, pagers, and so on. Complex math - (anything other than arithmetic) should be done outside of templates. -- Template functions only deal with the data passed into them. They never retrieve - data from a source. -- Finally, do not override core Go template functions. diff --git a/vendor/github.com/Masterminds/sprig/appveyor.yml b/vendor/github.com/Masterminds/sprig/appveyor.yml deleted file mode 100644 index d545a987a3b..00000000000 --- a/vendor/github.com/Masterminds/sprig/appveyor.yml +++ /dev/null @@ -1,26 +0,0 @@ - -version: build-{build}.{branch} - -clone_folder: C:\gopath\src\github.com\Masterminds\sprig -shallow_clone: true - -environment: - GOPATH: C:\gopath - -platform: - - x64 - -install: - - go get -u github.com/Masterminds/glide - - set PATH=%GOPATH%\bin;%PATH% - - go version - - go env - -build_script: - - glide install - - go install ./... - -test_script: - - go test -v - -deploy: off diff --git a/vendor/github.com/Masterminds/sprig/crypto.go b/vendor/github.com/Masterminds/sprig/crypto.go deleted file mode 100644 index 7a418ba88d1..00000000000 --- a/vendor/github.com/Masterminds/sprig/crypto.go +++ /dev/null @@ -1,502 +0,0 @@ -package sprig - -import ( - "bytes" - "crypto/aes" - "crypto/cipher" - "crypto/dsa" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/hmac" - "crypto/rand" - "crypto/rsa" - "crypto/sha1" - "crypto/sha256" - "crypto/x509" - "crypto/x509/pkix" - "encoding/asn1" - "encoding/base64" - "encoding/binary" - "encoding/hex" - "encoding/pem" - "errors" - "fmt" - "io" - "hash/adler32" - "math/big" - "net" - "time" - - "github.com/google/uuid" - "golang.org/x/crypto/scrypt" -) - -func sha256sum(input string) string { - hash := sha256.Sum256([]byte(input)) - return hex.EncodeToString(hash[:]) -} - -func sha1sum(input string) string { - hash := sha1.Sum([]byte(input)) - return hex.EncodeToString(hash[:]) -} - -func adler32sum(input string) string { - hash := adler32.Checksum([]byte(input)) - return fmt.Sprintf("%d", hash) -} - -// uuidv4 provides a safe and secure UUID v4 implementation -func uuidv4() string { - return fmt.Sprintf("%s", uuid.New()) -} - -var master_password_seed = "com.lyndir.masterpassword" - -var password_type_templates = map[string][][]byte{ - "maximum": {[]byte("anoxxxxxxxxxxxxxxxxx"), []byte("axxxxxxxxxxxxxxxxxno")}, - "long": {[]byte("CvcvnoCvcvCvcv"), []byte("CvcvCvcvnoCvcv"), []byte("CvcvCvcvCvcvno"), []byte("CvccnoCvcvCvcv"), []byte("CvccCvcvnoCvcv"), - []byte("CvccCvcvCvcvno"), []byte("CvcvnoCvccCvcv"), []byte("CvcvCvccnoCvcv"), []byte("CvcvCvccCvcvno"), []byte("CvcvnoCvcvCvcc"), - []byte("CvcvCvcvnoCvcc"), []byte("CvcvCvcvCvccno"), []byte("CvccnoCvccCvcv"), []byte("CvccCvccnoCvcv"), []byte("CvccCvccCvcvno"), - []byte("CvcvnoCvccCvcc"), []byte("CvcvCvccnoCvcc"), []byte("CvcvCvccCvccno"), []byte("CvccnoCvcvCvcc"), []byte("CvccCvcvnoCvcc"), - []byte("CvccCvcvCvccno")}, - "medium": {[]byte("CvcnoCvc"), []byte("CvcCvcno")}, - "short": {[]byte("Cvcn")}, - "basic": {[]byte("aaanaaan"), []byte("aannaaan"), []byte("aaannaaa")}, - "pin": {[]byte("nnnn")}, -} - -var template_characters = map[byte]string{ - 'V': "AEIOU", - 'C': "BCDFGHJKLMNPQRSTVWXYZ", - 'v': "aeiou", - 'c': "bcdfghjklmnpqrstvwxyz", - 'A': "AEIOUBCDFGHJKLMNPQRSTVWXYZ", - 'a': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz", - 'n': "0123456789", - 'o': "@&%?,=[]_:-+*$#!'^~;()/.", - 'x': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz0123456789!@#$%^&*()", -} - -func derivePassword(counter uint32, password_type, password, user, site string) string { - var templates = password_type_templates[password_type] - if templates == nil { - return fmt.Sprintf("cannot find password template %s", password_type) - } - - var buffer bytes.Buffer - buffer.WriteString(master_password_seed) - binary.Write(&buffer, binary.BigEndian, uint32(len(user))) - buffer.WriteString(user) - - salt := buffer.Bytes() - key, err := scrypt.Key([]byte(password), salt, 32768, 8, 2, 64) - if err != nil { - return fmt.Sprintf("failed to derive password: %s", err) - } - - buffer.Truncate(len(master_password_seed)) - binary.Write(&buffer, binary.BigEndian, uint32(len(site))) - buffer.WriteString(site) - binary.Write(&buffer, binary.BigEndian, counter) - - var hmacv = hmac.New(sha256.New, key) - hmacv.Write(buffer.Bytes()) - var seed = hmacv.Sum(nil) - var temp = templates[int(seed[0])%len(templates)] - - buffer.Truncate(0) - for i, element := range temp { - pass_chars := template_characters[element] - pass_char := pass_chars[int(seed[i+1])%len(pass_chars)] - buffer.WriteByte(pass_char) - } - - return buffer.String() -} - -func generatePrivateKey(typ string) string { - var priv interface{} - var err error - switch typ { - case "", "rsa": - // good enough for government work - priv, err = rsa.GenerateKey(rand.Reader, 4096) - case "dsa": - key := new(dsa.PrivateKey) - // again, good enough for government work - if err = dsa.GenerateParameters(&key.Parameters, rand.Reader, dsa.L2048N256); err != nil { - return fmt.Sprintf("failed to generate dsa params: %s", err) - } - err = dsa.GenerateKey(key, rand.Reader) - priv = key - case "ecdsa": - // again, good enough for government work - priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - default: - return "Unknown type " + typ - } - if err != nil { - return fmt.Sprintf("failed to generate private key: %s", err) - } - - return string(pem.EncodeToMemory(pemBlockForKey(priv))) -} - -type DSAKeyFormat struct { - Version int - P, Q, G, Y, X *big.Int -} - -func pemBlockForKey(priv interface{}) *pem.Block { - switch k := priv.(type) { - case *rsa.PrivateKey: - return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)} - case *dsa.PrivateKey: - val := DSAKeyFormat{ - P: k.P, Q: k.Q, G: k.G, - Y: k.Y, X: k.X, - } - bytes, _ := asn1.Marshal(val) - return &pem.Block{Type: "DSA PRIVATE KEY", Bytes: bytes} - case *ecdsa.PrivateKey: - b, _ := x509.MarshalECPrivateKey(k) - return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b} - default: - return nil - } -} - -type certificate struct { - Cert string - Key string -} - -func buildCustomCertificate(b64cert string, b64key string) (certificate, error) { - crt := certificate{} - - cert, err := base64.StdEncoding.DecodeString(b64cert) - if err != nil { - return crt, errors.New("unable to decode base64 certificate") - } - - key, err := base64.StdEncoding.DecodeString(b64key) - if err != nil { - return crt, errors.New("unable to decode base64 private key") - } - - decodedCert, _ := pem.Decode(cert) - if decodedCert == nil { - return crt, errors.New("unable to decode certificate") - } - _, err = x509.ParseCertificate(decodedCert.Bytes) - if err != nil { - return crt, fmt.Errorf( - "error parsing certificate: decodedCert.Bytes: %s", - err, - ) - } - - decodedKey, _ := pem.Decode(key) - if decodedKey == nil { - return crt, errors.New("unable to decode key") - } - _, err = x509.ParsePKCS1PrivateKey(decodedKey.Bytes) - if err != nil { - return crt, fmt.Errorf( - "error parsing prive key: decodedKey.Bytes: %s", - err, - ) - } - - crt.Cert = string(cert) - crt.Key = string(key) - - return crt, nil -} - -func generateCertificateAuthority( - cn string, - daysValid int, -) (certificate, error) { - ca := certificate{} - - template, err := getBaseCertTemplate(cn, nil, nil, daysValid) - if err != nil { - return ca, err - } - // Override KeyUsage and IsCA - template.KeyUsage = x509.KeyUsageKeyEncipherment | - x509.KeyUsageDigitalSignature | - x509.KeyUsageCertSign - template.IsCA = true - - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return ca, fmt.Errorf("error generating rsa key: %s", err) - } - - ca.Cert, ca.Key, err = getCertAndKey(template, priv, template, priv) - if err != nil { - return ca, err - } - - return ca, nil -} - -func generateSelfSignedCertificate( - cn string, - ips []interface{}, - alternateDNS []interface{}, - daysValid int, -) (certificate, error) { - cert := certificate{} - - template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid) - if err != nil { - return cert, err - } - - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return cert, fmt.Errorf("error generating rsa key: %s", err) - } - - cert.Cert, cert.Key, err = getCertAndKey(template, priv, template, priv) - if err != nil { - return cert, err - } - - return cert, nil -} - -func generateSignedCertificate( - cn string, - ips []interface{}, - alternateDNS []interface{}, - daysValid int, - ca certificate, -) (certificate, error) { - cert := certificate{} - - decodedSignerCert, _ := pem.Decode([]byte(ca.Cert)) - if decodedSignerCert == nil { - return cert, errors.New("unable to decode certificate") - } - signerCert, err := x509.ParseCertificate(decodedSignerCert.Bytes) - if err != nil { - return cert, fmt.Errorf( - "error parsing certificate: decodedSignerCert.Bytes: %s", - err, - ) - } - decodedSignerKey, _ := pem.Decode([]byte(ca.Key)) - if decodedSignerKey == nil { - return cert, errors.New("unable to decode key") - } - signerKey, err := x509.ParsePKCS1PrivateKey(decodedSignerKey.Bytes) - if err != nil { - return cert, fmt.Errorf( - "error parsing prive key: decodedSignerKey.Bytes: %s", - err, - ) - } - - template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid) - if err != nil { - return cert, err - } - - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return cert, fmt.Errorf("error generating rsa key: %s", err) - } - - cert.Cert, cert.Key, err = getCertAndKey( - template, - priv, - signerCert, - signerKey, - ) - if err != nil { - return cert, err - } - - return cert, nil -} - -func getCertAndKey( - template *x509.Certificate, - signeeKey *rsa.PrivateKey, - parent *x509.Certificate, - signingKey *rsa.PrivateKey, -) (string, string, error) { - derBytes, err := x509.CreateCertificate( - rand.Reader, - template, - parent, - &signeeKey.PublicKey, - signingKey, - ) - if err != nil { - return "", "", fmt.Errorf("error creating certificate: %s", err) - } - - certBuffer := bytes.Buffer{} - if err := pem.Encode( - &certBuffer, - &pem.Block{Type: "CERTIFICATE", Bytes: derBytes}, - ); err != nil { - return "", "", fmt.Errorf("error pem-encoding certificate: %s", err) - } - - keyBuffer := bytes.Buffer{} - if err := pem.Encode( - &keyBuffer, - &pem.Block{ - Type: "RSA PRIVATE KEY", - Bytes: x509.MarshalPKCS1PrivateKey(signeeKey), - }, - ); err != nil { - return "", "", fmt.Errorf("error pem-encoding key: %s", err) - } - - return string(certBuffer.Bytes()), string(keyBuffer.Bytes()), nil -} - -func getBaseCertTemplate( - cn string, - ips []interface{}, - alternateDNS []interface{}, - daysValid int, -) (*x509.Certificate, error) { - ipAddresses, err := getNetIPs(ips) - if err != nil { - return nil, err - } - dnsNames, err := getAlternateDNSStrs(alternateDNS) - if err != nil { - return nil, err - } - serialNumberUpperBound := new(big.Int).Lsh(big.NewInt(1), 128) - serialNumber, err := rand.Int(rand.Reader, serialNumberUpperBound) - if err != nil { - return nil, err - } - return &x509.Certificate{ - SerialNumber: serialNumber, - Subject: pkix.Name{ - CommonName: cn, - }, - IPAddresses: ipAddresses, - DNSNames: dnsNames, - NotBefore: time.Now(), - NotAfter: time.Now().Add(time.Hour * 24 * time.Duration(daysValid)), - KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, - ExtKeyUsage: []x509.ExtKeyUsage{ - x509.ExtKeyUsageServerAuth, - x509.ExtKeyUsageClientAuth, - }, - BasicConstraintsValid: true, - }, nil -} - -func getNetIPs(ips []interface{}) ([]net.IP, error) { - if ips == nil { - return []net.IP{}, nil - } - var ipStr string - var ok bool - var netIP net.IP - netIPs := make([]net.IP, len(ips)) - for i, ip := range ips { - ipStr, ok = ip.(string) - if !ok { - return nil, fmt.Errorf("error parsing ip: %v is not a string", ip) - } - netIP = net.ParseIP(ipStr) - if netIP == nil { - return nil, fmt.Errorf("error parsing ip: %s", ipStr) - } - netIPs[i] = netIP - } - return netIPs, nil -} - -func getAlternateDNSStrs(alternateDNS []interface{}) ([]string, error) { - if alternateDNS == nil { - return []string{}, nil - } - var dnsStr string - var ok bool - alternateDNSStrs := make([]string, len(alternateDNS)) - for i, dns := range alternateDNS { - dnsStr, ok = dns.(string) - if !ok { - return nil, fmt.Errorf( - "error processing alternate dns name: %v is not a string", - dns, - ) - } - alternateDNSStrs[i] = dnsStr - } - return alternateDNSStrs, nil -} - -func encryptAES(password string, plaintext string) (string, error) { - if plaintext == "" { - return "", nil - } - - key := make([]byte, 32) - copy(key, []byte(password)) - block, err := aes.NewCipher(key) - if err != nil { - return "", err - } - - content := []byte(plaintext) - blockSize := block.BlockSize() - padding := blockSize - len(content)%blockSize - padtext := bytes.Repeat([]byte{byte(padding)}, padding) - content = append(content, padtext...) - - ciphertext := make([]byte, aes.BlockSize+len(content)) - - iv := ciphertext[:aes.BlockSize] - if _, err := io.ReadFull(rand.Reader, iv); err != nil { - return "", err - } - - mode := cipher.NewCBCEncrypter(block, iv) - mode.CryptBlocks(ciphertext[aes.BlockSize:], content) - - return base64.StdEncoding.EncodeToString(ciphertext), nil -} - -func decryptAES(password string, crypt64 string) (string, error) { - if crypt64 == "" { - return "", nil - } - - key := make([]byte, 32) - copy(key, []byte(password)) - - crypt, err := base64.StdEncoding.DecodeString(crypt64) - if err != nil { - return "", err - } - - block, err := aes.NewCipher(key) - if err != nil { - return "", err - } - - iv := crypt[:aes.BlockSize] - crypt = crypt[aes.BlockSize:] - decrypted := make([]byte, len(crypt)) - mode := cipher.NewCBCDecrypter(block, iv) - mode.CryptBlocks(decrypted, crypt) - - return string(decrypted[:len(decrypted)-int(decrypted[len(decrypted)-1])]), nil -} diff --git a/vendor/github.com/Masterminds/sprig/date.go b/vendor/github.com/Masterminds/sprig/date.go deleted file mode 100644 index d1d6155d72c..00000000000 --- a/vendor/github.com/Masterminds/sprig/date.go +++ /dev/null @@ -1,83 +0,0 @@ -package sprig - -import ( - "strconv" - "time" -) - -// Given a format and a date, format the date string. -// -// Date can be a `time.Time` or an `int, int32, int64`. -// In the later case, it is treated as seconds since UNIX -// epoch. -func date(fmt string, date interface{}) string { - return dateInZone(fmt, date, "Local") -} - -func htmlDate(date interface{}) string { - return dateInZone("2006-01-02", date, "Local") -} - -func htmlDateInZone(date interface{}, zone string) string { - return dateInZone("2006-01-02", date, zone) -} - -func dateInZone(fmt string, date interface{}, zone string) string { - var t time.Time - switch date := date.(type) { - default: - t = time.Now() - case time.Time: - t = date - case *time.Time: - t = *date - case int64: - t = time.Unix(date, 0) - case int: - t = time.Unix(int64(date), 0) - case int32: - t = time.Unix(int64(date), 0) - } - - loc, err := time.LoadLocation(zone) - if err != nil { - loc, _ = time.LoadLocation("UTC") - } - - return t.In(loc).Format(fmt) -} - -func dateModify(fmt string, date time.Time) time.Time { - d, err := time.ParseDuration(fmt) - if err != nil { - return date - } - return date.Add(d) -} - -func dateAgo(date interface{}) string { - var t time.Time - - switch date := date.(type) { - default: - t = time.Now() - case time.Time: - t = date - case int64: - t = time.Unix(date, 0) - case int: - t = time.Unix(int64(date), 0) - } - // Drop resolution to seconds - duration := time.Since(t).Round(time.Second) - return duration.String() -} - -func toDate(fmt, str string) time.Time { - t, _ := time.ParseInLocation(fmt, str, time.Local) - return t -} - -func unixEpoch(date time.Time) string { - return strconv.FormatInt(date.Unix(), 10) -} diff --git a/vendor/github.com/Masterminds/sprig/defaults.go b/vendor/github.com/Masterminds/sprig/defaults.go deleted file mode 100644 index ed6a8ab291c..00000000000 --- a/vendor/github.com/Masterminds/sprig/defaults.go +++ /dev/null @@ -1,83 +0,0 @@ -package sprig - -import ( - "encoding/json" - "reflect" -) - -// dfault checks whether `given` is set, and returns default if not set. -// -// This returns `d` if `given` appears not to be set, and `given` otherwise. -// -// For numeric types 0 is unset. -// For strings, maps, arrays, and slices, len() = 0 is considered unset. -// For bool, false is unset. -// Structs are never considered unset. -// -// For everything else, including pointers, a nil value is unset. -func dfault(d interface{}, given ...interface{}) interface{} { - - if empty(given) || empty(given[0]) { - return d - } - return given[0] -} - -// empty returns true if the given value has the zero value for its type. -func empty(given interface{}) bool { - g := reflect.ValueOf(given) - if !g.IsValid() { - return true - } - - // Basically adapted from text/template.isTrue - switch g.Kind() { - default: - return g.IsNil() - case reflect.Array, reflect.Slice, reflect.Map, reflect.String: - return g.Len() == 0 - case reflect.Bool: - return g.Bool() == false - case reflect.Complex64, reflect.Complex128: - return g.Complex() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return g.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return g.Uint() == 0 - case reflect.Float32, reflect.Float64: - return g.Float() == 0 - case reflect.Struct: - return false - } -} - -// coalesce returns the first non-empty value. -func coalesce(v ...interface{}) interface{} { - for _, val := range v { - if !empty(val) { - return val - } - } - return nil -} - -// toJson encodes an item into a JSON string -func toJson(v interface{}) string { - output, _ := json.Marshal(v) - return string(output) -} - -// toPrettyJson encodes an item into a pretty (indented) JSON string -func toPrettyJson(v interface{}) string { - output, _ := json.MarshalIndent(v, "", " ") - return string(output) -} - -// ternary returns the first value if the last value is true, otherwise returns the second value. -func ternary(vt interface{}, vf interface{}, v bool) interface{} { - if v { - return vt - } - - return vf -} diff --git a/vendor/github.com/Masterminds/sprig/dict.go b/vendor/github.com/Masterminds/sprig/dict.go deleted file mode 100644 index 738405b4332..00000000000 --- a/vendor/github.com/Masterminds/sprig/dict.go +++ /dev/null @@ -1,119 +0,0 @@ -package sprig - -import ( - "github.com/imdario/mergo" - "github.com/mitchellh/copystructure" -) - -func set(d map[string]interface{}, key string, value interface{}) map[string]interface{} { - d[key] = value - return d -} - -func unset(d map[string]interface{}, key string) map[string]interface{} { - delete(d, key) - return d -} - -func hasKey(d map[string]interface{}, key string) bool { - _, ok := d[key] - return ok -} - -func pluck(key string, d ...map[string]interface{}) []interface{} { - res := []interface{}{} - for _, dict := range d { - if val, ok := dict[key]; ok { - res = append(res, val) - } - } - return res -} - -func keys(dicts ...map[string]interface{}) []string { - k := []string{} - for _, dict := range dicts { - for key := range dict { - k = append(k, key) - } - } - return k -} - -func pick(dict map[string]interface{}, keys ...string) map[string]interface{} { - res := map[string]interface{}{} - for _, k := range keys { - if v, ok := dict[k]; ok { - res[k] = v - } - } - return res -} - -func omit(dict map[string]interface{}, keys ...string) map[string]interface{} { - res := map[string]interface{}{} - - omit := make(map[string]bool, len(keys)) - for _, k := range keys { - omit[k] = true - } - - for k, v := range dict { - if _, ok := omit[k]; !ok { - res[k] = v - } - } - return res -} - -func dict(v ...interface{}) map[string]interface{} { - dict := map[string]interface{}{} - lenv := len(v) - for i := 0; i < lenv; i += 2 { - key := strval(v[i]) - if i+1 >= lenv { - dict[key] = "" - continue - } - dict[key] = v[i+1] - } - return dict -} - -func merge(dst map[string]interface{}, srcs ...map[string]interface{}) interface{} { - for _, src := range srcs { - if err := mergo.Merge(&dst, src); err != nil { - // Swallow errors inside of a template. - return "" - } - } - return dst -} - -func mergeOverwrite(dst map[string]interface{}, srcs ...map[string]interface{}) interface{} { - for _, src := range srcs { - if err := mergo.MergeWithOverwrite(&dst, src); err != nil { - // Swallow errors inside of a template. - return "" - } - } - return dst -} - -func values(dict map[string]interface{}) []interface{} { - values := []interface{}{} - for _, value := range dict { - values = append(values, value) - } - - return values -} - -func deepCopy(i interface{}) interface{} { - c, err := copystructure.Copy(i) - if err != nil { - panic("deepCopy error: " + err.Error()) - } - - return c -} diff --git a/vendor/github.com/Masterminds/sprig/doc.go b/vendor/github.com/Masterminds/sprig/doc.go deleted file mode 100644 index 8f8f1d73703..00000000000 --- a/vendor/github.com/Masterminds/sprig/doc.go +++ /dev/null @@ -1,19 +0,0 @@ -/* -Sprig: Template functions for Go. - -This package contains a number of utility functions for working with data -inside of Go `html/template` and `text/template` files. - -To add these functions, use the `template.Funcs()` method: - - t := templates.New("foo").Funcs(sprig.FuncMap()) - -Note that you should add the function map before you parse any template files. - - In several cases, Sprig reverses the order of arguments from the way they - appear in the standard library. This is to make it easier to pipe - arguments into functions. - -See http://masterminds.github.io/sprig/ for more detailed documentation on each of the available functions. -*/ -package sprig diff --git a/vendor/github.com/Masterminds/sprig/functions.go b/vendor/github.com/Masterminds/sprig/functions.go deleted file mode 100644 index 7b5b0af86c0..00000000000 --- a/vendor/github.com/Masterminds/sprig/functions.go +++ /dev/null @@ -1,306 +0,0 @@ -package sprig - -import ( - "errors" - "html/template" - "os" - "path" - "reflect" - "strconv" - "strings" - ttemplate "text/template" - "time" - - util "github.com/Masterminds/goutils" - "github.com/huandu/xstrings" -) - -// Produce the function map. -// -// Use this to pass the functions into the template engine: -// -// tpl := template.New("foo").Funcs(sprig.FuncMap())) -// -func FuncMap() template.FuncMap { - return HtmlFuncMap() -} - -// HermeticTxtFuncMap returns a 'text/template'.FuncMap with only repeatable functions. -func HermeticTxtFuncMap() ttemplate.FuncMap { - r := TxtFuncMap() - for _, name := range nonhermeticFunctions { - delete(r, name) - } - return r -} - -// HermeticHtmlFuncMap returns an 'html/template'.Funcmap with only repeatable functions. -func HermeticHtmlFuncMap() template.FuncMap { - r := HtmlFuncMap() - for _, name := range nonhermeticFunctions { - delete(r, name) - } - return r -} - -// TxtFuncMap returns a 'text/template'.FuncMap -func TxtFuncMap() ttemplate.FuncMap { - return ttemplate.FuncMap(GenericFuncMap()) -} - -// HtmlFuncMap returns an 'html/template'.Funcmap -func HtmlFuncMap() template.FuncMap { - return template.FuncMap(GenericFuncMap()) -} - -// GenericFuncMap returns a copy of the basic function map as a map[string]interface{}. -func GenericFuncMap() map[string]interface{} { - gfm := make(map[string]interface{}, len(genericMap)) - for k, v := range genericMap { - gfm[k] = v - } - return gfm -} - -// These functions are not guaranteed to evaluate to the same result for given input, because they -// refer to the environemnt or global state. -var nonhermeticFunctions = []string{ - // Date functions - "date", - "date_in_zone", - "date_modify", - "now", - "htmlDate", - "htmlDateInZone", - "dateInZone", - "dateModify", - - // Strings - "randAlphaNum", - "randAlpha", - "randAscii", - "randNumeric", - "uuidv4", - - // OS - "env", - "expandenv", - - // Network - "getHostByName", -} - -var genericMap = map[string]interface{}{ - "hello": func() string { return "Hello!" }, - - // Date functions - "date": date, - "date_in_zone": dateInZone, - "date_modify": dateModify, - "now": func() time.Time { return time.Now() }, - "htmlDate": htmlDate, - "htmlDateInZone": htmlDateInZone, - "dateInZone": dateInZone, - "dateModify": dateModify, - "ago": dateAgo, - "toDate": toDate, - "unixEpoch": unixEpoch, - - // Strings - "abbrev": abbrev, - "abbrevboth": abbrevboth, - "trunc": trunc, - "trim": strings.TrimSpace, - "upper": strings.ToUpper, - "lower": strings.ToLower, - "title": strings.Title, - "untitle": untitle, - "substr": substring, - // Switch order so that "foo" | repeat 5 - "repeat": func(count int, str string) string { return strings.Repeat(str, count) }, - // Deprecated: Use trimAll. - "trimall": func(a, b string) string { return strings.Trim(b, a) }, - // Switch order so that "$foo" | trimall "$" - "trimAll": func(a, b string) string { return strings.Trim(b, a) }, - "trimSuffix": func(a, b string) string { return strings.TrimSuffix(b, a) }, - "trimPrefix": func(a, b string) string { return strings.TrimPrefix(b, a) }, - "nospace": util.DeleteWhiteSpace, - "initials": initials, - "randAlphaNum": randAlphaNumeric, - "randAlpha": randAlpha, - "randAscii": randAscii, - "randNumeric": randNumeric, - "swapcase": util.SwapCase, - "shuffle": xstrings.Shuffle, - "snakecase": xstrings.ToSnakeCase, - "camelcase": xstrings.ToCamelCase, - "kebabcase": xstrings.ToKebabCase, - "wrap": func(l int, s string) string { return util.Wrap(s, l) }, - "wrapWith": func(l int, sep, str string) string { return util.WrapCustom(str, l, sep, true) }, - // Switch order so that "foobar" | contains "foo" - "contains": func(substr string, str string) bool { return strings.Contains(str, substr) }, - "hasPrefix": func(substr string, str string) bool { return strings.HasPrefix(str, substr) }, - "hasSuffix": func(substr string, str string) bool { return strings.HasSuffix(str, substr) }, - "quote": quote, - "squote": squote, - "cat": cat, - "indent": indent, - "nindent": nindent, - "replace": replace, - "plural": plural, - "sha1sum": sha1sum, - "sha256sum": sha256sum, - "adler32sum": adler32sum, - "toString": strval, - - // Wrap Atoi to stop errors. - "atoi": func(a string) int { i, _ := strconv.Atoi(a); return i }, - "int64": toInt64, - "int": toInt, - "float64": toFloat64, - "toDecimal": toDecimal, - - //"gt": func(a, b int) bool {return a > b}, - //"gte": func(a, b int) bool {return a >= b}, - //"lt": func(a, b int) bool {return a < b}, - //"lte": func(a, b int) bool {return a <= b}, - - // split "/" foo/bar returns map[int]string{0: foo, 1: bar} - "split": split, - "splitList": func(sep, orig string) []string { return strings.Split(orig, sep) }, - // splitn "/" foo/bar/fuu returns map[int]string{0: foo, 1: bar/fuu} - "splitn": splitn, - "toStrings": strslice, - - "until": until, - "untilStep": untilStep, - - // VERY basic arithmetic. - "add1": func(i interface{}) int64 { return toInt64(i) + 1 }, - "add": func(i ...interface{}) int64 { - var a int64 = 0 - for _, b := range i { - a += toInt64(b) - } - return a - }, - "sub": func(a, b interface{}) int64 { return toInt64(a) - toInt64(b) }, - "div": func(a, b interface{}) int64 { return toInt64(a) / toInt64(b) }, - "mod": func(a, b interface{}) int64 { return toInt64(a) % toInt64(b) }, - "mul": func(a interface{}, v ...interface{}) int64 { - val := toInt64(a) - for _, b := range v { - val = val * toInt64(b) - } - return val - }, - "biggest": max, - "max": max, - "min": min, - "ceil": ceil, - "floor": floor, - "round": round, - - // string slices. Note that we reverse the order b/c that's better - // for template processing. - "join": join, - "sortAlpha": sortAlpha, - - // Defaults - "default": dfault, - "empty": empty, - "coalesce": coalesce, - "compact": compact, - "deepCopy": deepCopy, - "toJson": toJson, - "toPrettyJson": toPrettyJson, - "ternary": ternary, - - // Reflection - "typeOf": typeOf, - "typeIs": typeIs, - "typeIsLike": typeIsLike, - "kindOf": kindOf, - "kindIs": kindIs, - "deepEqual": reflect.DeepEqual, - - // OS: - "env": func(s string) string { return os.Getenv(s) }, - "expandenv": func(s string) string { return os.ExpandEnv(s) }, - - // Network: - "getHostByName": getHostByName, - - // File Paths: - "base": path.Base, - "dir": path.Dir, - "clean": path.Clean, - "ext": path.Ext, - "isAbs": path.IsAbs, - - // Encoding: - "b64enc": base64encode, - "b64dec": base64decode, - "b32enc": base32encode, - "b32dec": base32decode, - - // Data Structures: - "tuple": list, // FIXME: with the addition of append/prepend these are no longer immutable. - "list": list, - "dict": dict, - "set": set, - "unset": unset, - "hasKey": hasKey, - "pluck": pluck, - "keys": keys, - "pick": pick, - "omit": omit, - "merge": merge, - "mergeOverwrite": mergeOverwrite, - "values": values, - - "append": push, "push": push, - "prepend": prepend, - "first": first, - "rest": rest, - "last": last, - "initial": initial, - "reverse": reverse, - "uniq": uniq, - "without": without, - "has": has, - "slice": slice, - "concat": concat, - - // Crypto: - "genPrivateKey": generatePrivateKey, - "derivePassword": derivePassword, - "buildCustomCert": buildCustomCertificate, - "genCA": generateCertificateAuthority, - "genSelfSignedCert": generateSelfSignedCertificate, - "genSignedCert": generateSignedCertificate, - "encryptAES": encryptAES, - "decryptAES": decryptAES, - - // UUIDs: - "uuidv4": uuidv4, - - // SemVer: - "semver": semver, - "semverCompare": semverCompare, - - // Flow Control: - "fail": func(msg string) (string, error) { return "", errors.New(msg) }, - - // Regex - "regexMatch": regexMatch, - "regexFindAll": regexFindAll, - "regexFind": regexFind, - "regexReplaceAll": regexReplaceAll, - "regexReplaceAllLiteral": regexReplaceAllLiteral, - "regexSplit": regexSplit, - - // URLs: - "urlParse": urlParse, - "urlJoin": urlJoin, -} diff --git a/vendor/github.com/Masterminds/sprig/glide.yaml b/vendor/github.com/Masterminds/sprig/glide.yaml deleted file mode 100644 index f317d2b2b16..00000000000 --- a/vendor/github.com/Masterminds/sprig/glide.yaml +++ /dev/null @@ -1,19 +0,0 @@ -package: github.com/Masterminds/sprig -import: -- package: github.com/Masterminds/goutils - version: ^1.0.0 -- package: github.com/google/uuid - version: ^1.0.0 -- package: golang.org/x/crypto - subpackages: - - scrypt -- package: github.com/Masterminds/semver - version: ^v1.2.2 -- package: github.com/stretchr/testify - version: ^v1.2.2 -- package: github.com/imdario/mergo - version: ~0.3.7 -- package: github.com/huandu/xstrings - version: ^1.2 -- package: github.com/mitchellh/copystructure - version: ^1.0.0 diff --git a/vendor/github.com/Masterminds/sprig/list.go b/vendor/github.com/Masterminds/sprig/list.go deleted file mode 100644 index c0381bbb650..00000000000 --- a/vendor/github.com/Masterminds/sprig/list.go +++ /dev/null @@ -1,311 +0,0 @@ -package sprig - -import ( - "fmt" - "reflect" - "sort" -) - -// Reflection is used in these functions so that slices and arrays of strings, -// ints, and other types not implementing []interface{} can be worked with. -// For example, this is useful if you need to work on the output of regexs. - -func list(v ...interface{}) []interface{} { - return v -} - -func push(list interface{}, v interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - nl := make([]interface{}, l) - for i := 0; i < l; i++ { - nl[i] = l2.Index(i).Interface() - } - - return append(nl, v) - - default: - panic(fmt.Sprintf("Cannot push on type %s", tp)) - } -} - -func prepend(list interface{}, v interface{}) []interface{} { - //return append([]interface{}{v}, list...) - - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - nl := make([]interface{}, l) - for i := 0; i < l; i++ { - nl[i] = l2.Index(i).Interface() - } - - return append([]interface{}{v}, nl...) - - default: - panic(fmt.Sprintf("Cannot prepend on type %s", tp)) - } -} - -func last(list interface{}) interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - return l2.Index(l - 1).Interface() - default: - panic(fmt.Sprintf("Cannot find last on type %s", tp)) - } -} - -func first(list interface{}) interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - return l2.Index(0).Interface() - default: - panic(fmt.Sprintf("Cannot find first on type %s", tp)) - } -} - -func rest(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - nl := make([]interface{}, l-1) - for i := 1; i < l; i++ { - nl[i-1] = l2.Index(i).Interface() - } - - return nl - default: - panic(fmt.Sprintf("Cannot find rest on type %s", tp)) - } -} - -func initial(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - nl := make([]interface{}, l-1) - for i := 0; i < l-1; i++ { - nl[i] = l2.Index(i).Interface() - } - - return nl - default: - panic(fmt.Sprintf("Cannot find initial on type %s", tp)) - } -} - -func sortAlpha(list interface{}) []string { - k := reflect.Indirect(reflect.ValueOf(list)).Kind() - switch k { - case reflect.Slice, reflect.Array: - a := strslice(list) - s := sort.StringSlice(a) - s.Sort() - return s - } - return []string{strval(list)} -} - -func reverse(v interface{}) []interface{} { - tp := reflect.TypeOf(v).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(v) - - l := l2.Len() - // We do not sort in place because the incoming array should not be altered. - nl := make([]interface{}, l) - for i := 0; i < l; i++ { - nl[l-i-1] = l2.Index(i).Interface() - } - - return nl - default: - panic(fmt.Sprintf("Cannot find reverse on type %s", tp)) - } -} - -func compact(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - nl := []interface{}{} - var item interface{} - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if !empty(item) { - nl = append(nl, item) - } - } - - return nl - default: - panic(fmt.Sprintf("Cannot compact on type %s", tp)) - } -} - -func uniq(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - dest := []interface{}{} - var item interface{} - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if !inList(dest, item) { - dest = append(dest, item) - } - } - - return dest - default: - panic(fmt.Sprintf("Cannot find uniq on type %s", tp)) - } -} - -func inList(haystack []interface{}, needle interface{}) bool { - for _, h := range haystack { - if reflect.DeepEqual(needle, h) { - return true - } - } - return false -} - -func without(list interface{}, omit ...interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - res := []interface{}{} - var item interface{} - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if !inList(omit, item) { - res = append(res, item) - } - } - - return res - default: - panic(fmt.Sprintf("Cannot find without on type %s", tp)) - } -} - -func has(needle interface{}, haystack interface{}) bool { - if haystack == nil { - return false - } - tp := reflect.TypeOf(haystack).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(haystack) - var item interface{} - l := l2.Len() - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if reflect.DeepEqual(needle, item) { - return true - } - } - - return false - default: - panic(fmt.Sprintf("Cannot find has on type %s", tp)) - } -} - -// $list := [1, 2, 3, 4, 5] -// slice $list -> list[0:5] = list[:] -// slice $list 0 3 -> list[0:3] = list[:3] -// slice $list 3 5 -> list[3:5] -// slice $list 3 -> list[3:5] = list[3:] -func slice(list interface{}, indices ...interface{}) interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - var start, end int - if len(indices) > 0 { - start = toInt(indices[0]) - } - if len(indices) < 2 { - end = l - } else { - end = toInt(indices[1]) - } - - return l2.Slice(start, end).Interface() - default: - panic(fmt.Sprintf("list should be type of slice or array but %s", tp)) - } -} - -func concat(lists ...interface{}) interface{} { - var res []interface{} - for _, list := range lists { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - for i := 0; i < l2.Len(); i++ { - res = append(res, l2.Index(i).Interface()) - } - default: - panic(fmt.Sprintf("Cannot concat type %s as list", tp)) - } - } - return res -} diff --git a/vendor/github.com/Masterminds/sprig/network.go b/vendor/github.com/Masterminds/sprig/network.go deleted file mode 100644 index d786cc7363b..00000000000 --- a/vendor/github.com/Masterminds/sprig/network.go +++ /dev/null @@ -1,12 +0,0 @@ -package sprig - -import ( - "math/rand" - "net" -) - -func getHostByName(name string) string { - addrs, _ := net.LookupHost(name) - //TODO: add error handing when release v3 cames out - return addrs[rand.Intn(len(addrs))] -} diff --git a/vendor/github.com/Masterminds/sprig/numeric.go b/vendor/github.com/Masterminds/sprig/numeric.go deleted file mode 100644 index f4af4af2a7f..00000000000 --- a/vendor/github.com/Masterminds/sprig/numeric.go +++ /dev/null @@ -1,169 +0,0 @@ -package sprig - -import ( - "fmt" - "math" - "reflect" - "strconv" -) - -// toFloat64 converts 64-bit floats -func toFloat64(v interface{}) float64 { - if str, ok := v.(string); ok { - iv, err := strconv.ParseFloat(str, 64) - if err != nil { - return 0 - } - return iv - } - - val := reflect.Indirect(reflect.ValueOf(v)) - switch val.Kind() { - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return float64(val.Int()) - case reflect.Uint8, reflect.Uint16, reflect.Uint32: - return float64(val.Uint()) - case reflect.Uint, reflect.Uint64: - return float64(val.Uint()) - case reflect.Float32, reflect.Float64: - return val.Float() - case reflect.Bool: - if val.Bool() == true { - return 1 - } - return 0 - default: - return 0 - } -} - -func toInt(v interface{}) int { - //It's not optimal. Bud I don't want duplicate toInt64 code. - return int(toInt64(v)) -} - -// toInt64 converts integer types to 64-bit integers -func toInt64(v interface{}) int64 { - if str, ok := v.(string); ok { - iv, err := strconv.ParseInt(str, 10, 64) - if err != nil { - return 0 - } - return iv - } - - val := reflect.Indirect(reflect.ValueOf(v)) - switch val.Kind() { - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return val.Int() - case reflect.Uint8, reflect.Uint16, reflect.Uint32: - return int64(val.Uint()) - case reflect.Uint, reflect.Uint64: - tv := val.Uint() - if tv <= math.MaxInt64 { - return int64(tv) - } - // TODO: What is the sensible thing to do here? - return math.MaxInt64 - case reflect.Float32, reflect.Float64: - return int64(val.Float()) - case reflect.Bool: - if val.Bool() == true { - return 1 - } - return 0 - default: - return 0 - } -} - -func max(a interface{}, i ...interface{}) int64 { - aa := toInt64(a) - for _, b := range i { - bb := toInt64(b) - if bb > aa { - aa = bb - } - } - return aa -} - -func min(a interface{}, i ...interface{}) int64 { - aa := toInt64(a) - for _, b := range i { - bb := toInt64(b) - if bb < aa { - aa = bb - } - } - return aa -} - -func until(count int) []int { - step := 1 - if count < 0 { - step = -1 - } - return untilStep(0, count, step) -} - -func untilStep(start, stop, step int) []int { - v := []int{} - - if stop < start { - if step >= 0 { - return v - } - for i := start; i > stop; i += step { - v = append(v, i) - } - return v - } - - if step <= 0 { - return v - } - for i := start; i < stop; i += step { - v = append(v, i) - } - return v -} - -func floor(a interface{}) float64 { - aa := toFloat64(a) - return math.Floor(aa) -} - -func ceil(a interface{}) float64 { - aa := toFloat64(a) - return math.Ceil(aa) -} - -func round(a interface{}, p int, r_opt ...float64) float64 { - roundOn := .5 - if len(r_opt) > 0 { - roundOn = r_opt[0] - } - val := toFloat64(a) - places := toFloat64(p) - - var round float64 - pow := math.Pow(10, places) - digit := pow * val - _, div := math.Modf(digit) - if div >= roundOn { - round = math.Ceil(digit) - } else { - round = math.Floor(digit) - } - return round / pow -} - -// converts unix octal to decimal -func toDecimal(v interface{}) int64 { - result, err := strconv.ParseInt(fmt.Sprint(v), 8, 64) - if err != nil { - return 0 - } - return result -} diff --git a/vendor/github.com/Masterminds/sprig/reflect.go b/vendor/github.com/Masterminds/sprig/reflect.go deleted file mode 100644 index 8a65c132f08..00000000000 --- a/vendor/github.com/Masterminds/sprig/reflect.go +++ /dev/null @@ -1,28 +0,0 @@ -package sprig - -import ( - "fmt" - "reflect" -) - -// typeIs returns true if the src is the type named in target. -func typeIs(target string, src interface{}) bool { - return target == typeOf(src) -} - -func typeIsLike(target string, src interface{}) bool { - t := typeOf(src) - return target == t || "*"+target == t -} - -func typeOf(src interface{}) string { - return fmt.Sprintf("%T", src) -} - -func kindIs(target string, src interface{}) bool { - return target == kindOf(src) -} - -func kindOf(src interface{}) string { - return reflect.ValueOf(src).Kind().String() -} diff --git a/vendor/github.com/Masterminds/sprig/regex.go b/vendor/github.com/Masterminds/sprig/regex.go deleted file mode 100644 index 2016f66336f..00000000000 --- a/vendor/github.com/Masterminds/sprig/regex.go +++ /dev/null @@ -1,35 +0,0 @@ -package sprig - -import ( - "regexp" -) - -func regexMatch(regex string, s string) bool { - match, _ := regexp.MatchString(regex, s) - return match -} - -func regexFindAll(regex string, s string, n int) []string { - r := regexp.MustCompile(regex) - return r.FindAllString(s, n) -} - -func regexFind(regex string, s string) string { - r := regexp.MustCompile(regex) - return r.FindString(s) -} - -func regexReplaceAll(regex string, s string, repl string) string { - r := regexp.MustCompile(regex) - return r.ReplaceAllString(s, repl) -} - -func regexReplaceAllLiteral(regex string, s string, repl string) string { - r := regexp.MustCompile(regex) - return r.ReplaceAllLiteralString(s, repl) -} - -func regexSplit(regex string, s string, n int) []string { - r := regexp.MustCompile(regex) - return r.Split(s, n) -} diff --git a/vendor/github.com/Masterminds/sprig/semver.go b/vendor/github.com/Masterminds/sprig/semver.go deleted file mode 100644 index c2bf8a1fdf3..00000000000 --- a/vendor/github.com/Masterminds/sprig/semver.go +++ /dev/null @@ -1,23 +0,0 @@ -package sprig - -import ( - sv2 "github.com/Masterminds/semver" -) - -func semverCompare(constraint, version string) (bool, error) { - c, err := sv2.NewConstraint(constraint) - if err != nil { - return false, err - } - - v, err := sv2.NewVersion(version) - if err != nil { - return false, err - } - - return c.Check(v), nil -} - -func semver(version string) (*sv2.Version, error) { - return sv2.NewVersion(version) -} diff --git a/vendor/github.com/Masterminds/sprig/strings.go b/vendor/github.com/Masterminds/sprig/strings.go deleted file mode 100644 index 943fa3e8ad5..00000000000 --- a/vendor/github.com/Masterminds/sprig/strings.go +++ /dev/null @@ -1,233 +0,0 @@ -package sprig - -import ( - "encoding/base32" - "encoding/base64" - "fmt" - "reflect" - "strconv" - "strings" - - util "github.com/Masterminds/goutils" -) - -func base64encode(v string) string { - return base64.StdEncoding.EncodeToString([]byte(v)) -} - -func base64decode(v string) string { - data, err := base64.StdEncoding.DecodeString(v) - if err != nil { - return err.Error() - } - return string(data) -} - -func base32encode(v string) string { - return base32.StdEncoding.EncodeToString([]byte(v)) -} - -func base32decode(v string) string { - data, err := base32.StdEncoding.DecodeString(v) - if err != nil { - return err.Error() - } - return string(data) -} - -func abbrev(width int, s string) string { - if width < 4 { - return s - } - r, _ := util.Abbreviate(s, width) - return r -} - -func abbrevboth(left, right int, s string) string { - if right < 4 || left > 0 && right < 7 { - return s - } - r, _ := util.AbbreviateFull(s, left, right) - return r -} -func initials(s string) string { - // Wrap this just to eliminate the var args, which templates don't do well. - return util.Initials(s) -} - -func randAlphaNumeric(count int) string { - // It is not possible, it appears, to actually generate an error here. - r, _ := util.CryptoRandomAlphaNumeric(count) - return r -} - -func randAlpha(count int) string { - r, _ := util.CryptoRandomAlphabetic(count) - return r -} - -func randAscii(count int) string { - r, _ := util.CryptoRandomAscii(count) - return r -} - -func randNumeric(count int) string { - r, _ := util.CryptoRandomNumeric(count) - return r -} - -func untitle(str string) string { - return util.Uncapitalize(str) -} - -func quote(str ...interface{}) string { - out := make([]string, 0, len(str)) - for _, s := range str { - if s != nil { - out = append(out, fmt.Sprintf("%q", strval(s))) - } - } - return strings.Join(out, " ") -} - -func squote(str ...interface{}) string { - out := make([]string, 0, len(str)) - for _, s := range str { - if s != nil { - out = append(out, fmt.Sprintf("'%v'", s)) - } - } - return strings.Join(out, " ") -} - -func cat(v ...interface{}) string { - v = removeNilElements(v) - r := strings.TrimSpace(strings.Repeat("%v ", len(v))) - return fmt.Sprintf(r, v...) -} - -func indent(spaces int, v string) string { - pad := strings.Repeat(" ", spaces) - return pad + strings.Replace(v, "\n", "\n"+pad, -1) -} - -func nindent(spaces int, v string) string { - return "\n" + indent(spaces, v) -} - -func replace(old, new, src string) string { - return strings.Replace(src, old, new, -1) -} - -func plural(one, many string, count int) string { - if count == 1 { - return one - } - return many -} - -func strslice(v interface{}) []string { - switch v := v.(type) { - case []string: - return v - case []interface{}: - b := make([]string, 0, len(v)) - for _, s := range v { - if s != nil { - b = append(b, strval(s)) - } - } - return b - default: - val := reflect.ValueOf(v) - switch val.Kind() { - case reflect.Array, reflect.Slice: - l := val.Len() - b := make([]string, 0, l) - for i := 0; i < l; i++ { - value := val.Index(i).Interface() - if value != nil { - b = append(b, strval(value)) - } - } - return b - default: - if v == nil { - return []string{} - } else { - return []string{strval(v)} - } - } - } -} - -func removeNilElements(v []interface{}) []interface{} { - newSlice := make([]interface{}, 0, len(v)) - for _, i := range v { - if i != nil { - newSlice = append(newSlice, i) - } - } - return newSlice -} - -func strval(v interface{}) string { - switch v := v.(type) { - case string: - return v - case []byte: - return string(v) - case error: - return v.Error() - case fmt.Stringer: - return v.String() - default: - return fmt.Sprintf("%v", v) - } -} - -func trunc(c int, s string) string { - if len(s) <= c { - return s - } - return s[0:c] -} - -func join(sep string, v interface{}) string { - return strings.Join(strslice(v), sep) -} - -func split(sep, orig string) map[string]string { - parts := strings.Split(orig, sep) - res := make(map[string]string, len(parts)) - for i, v := range parts { - res["_"+strconv.Itoa(i)] = v - } - return res -} - -func splitn(sep string, n int, orig string) map[string]string { - parts := strings.SplitN(orig, sep, n) - res := make(map[string]string, len(parts)) - for i, v := range parts { - res["_"+strconv.Itoa(i)] = v - } - return res -} - -// substring creates a substring of the given string. -// -// If start is < 0, this calls string[:end]. -// -// If start is >= 0 and end < 0 or end bigger than s length, this calls string[start:] -// -// Otherwise, this calls string[start, end]. -func substring(start, end int, s string) string { - if start < 0 { - return s[:end] - } - if end < 0 || end > len(s) { - return s[start:] - } - return s[start:end] -} diff --git a/vendor/github.com/Masterminds/sprig/url.go b/vendor/github.com/Masterminds/sprig/url.go deleted file mode 100644 index 5f22d801f92..00000000000 --- a/vendor/github.com/Masterminds/sprig/url.go +++ /dev/null @@ -1,66 +0,0 @@ -package sprig - -import ( - "fmt" - "net/url" - "reflect" -) - -func dictGetOrEmpty(dict map[string]interface{}, key string) string { - value, ok := dict[key]; if !ok { - return "" - } - tp := reflect.TypeOf(value).Kind() - if tp != reflect.String { - panic(fmt.Sprintf("unable to parse %s key, must be of type string, but %s found", key, tp.String())) - } - return reflect.ValueOf(value).String() -} - -// parses given URL to return dict object -func urlParse(v string) map[string]interface{} { - dict := map[string]interface{}{} - parsedUrl, err := url.Parse(v) - if err != nil { - panic(fmt.Sprintf("unable to parse url: %s", err)) - } - dict["scheme"] = parsedUrl.Scheme - dict["host"] = parsedUrl.Host - dict["hostname"] = parsedUrl.Hostname() - dict["path"] = parsedUrl.Path - dict["query"] = parsedUrl.RawQuery - dict["opaque"] = parsedUrl.Opaque - dict["fragment"] = parsedUrl.Fragment - if parsedUrl.User != nil { - dict["userinfo"] = parsedUrl.User.String() - } else { - dict["userinfo"] = "" - } - - return dict -} - -// join given dict to URL string -func urlJoin(d map[string]interface{}) string { - resUrl := url.URL{ - Scheme: dictGetOrEmpty(d, "scheme"), - Host: dictGetOrEmpty(d, "host"), - Path: dictGetOrEmpty(d, "path"), - RawQuery: dictGetOrEmpty(d, "query"), - Opaque: dictGetOrEmpty(d, "opaque"), - Fragment: dictGetOrEmpty(d, "fragment"), - - } - userinfo := dictGetOrEmpty(d, "userinfo") - var user *url.Userinfo = nil - if userinfo != "" { - tempUrl, err := url.Parse(fmt.Sprintf("proto://%s@host", userinfo)) - if err != nil { - panic(fmt.Sprintf("unable to parse userinfo in dict: %s", err)) - } - user = tempUrl.User - } - - resUrl.User = user - return resUrl.String() -} diff --git a/vendor/github.com/TykTechnologies/again/LICENSE b/vendor/github.com/TykTechnologies/again/LICENSE deleted file mode 100644 index 363fa9ee77b..00000000000 --- a/vendor/github.com/TykTechnologies/again/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -Copyright 2012 Richard Crowley. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - -THIS SOFTWARE IS PROVIDED BY RICHARD CROWLEY ``AS IS'' AND ANY EXPRESS -OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL RICHARD CROWLEY OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -THE POSSIBILITY OF SUCH DAMAGE. - -The views and conclusions contained in the software and documentation -are those of the authors and should not be interpreted as representing -official policies, either expressed or implied, of Richard Crowley. diff --git a/vendor/github.com/TykTechnologies/again/README.md b/vendor/github.com/TykTechnologies/again/README.md deleted file mode 100644 index e256bb4c7cb..00000000000 --- a/vendor/github.com/TykTechnologies/again/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# again -graceful restarts with multiple listeners support for Go diff --git a/vendor/github.com/TykTechnologies/again/again.go b/vendor/github.com/TykTechnologies/again/again.go deleted file mode 100644 index 0e28dcbb65f..00000000000 --- a/vendor/github.com/TykTechnologies/again/again.go +++ /dev/null @@ -1,457 +0,0 @@ -package again - -import ( - "bytes" - "errors" - "fmt" - "io" - "log" - "net" - "os" - "os/exec" - "os/signal" - "reflect" - "strings" - "sync" - "syscall" -) - -var OnForkHook func() - -// Don't make the caller import syscall. -const ( - SIGINT = syscall.SIGINT - SIGQUIT = syscall.SIGQUIT - SIGTERM = syscall.SIGTERM - SIGUSR2 = syscall.SIGUSR2 -) - -// Service is a single service listening on a single net.Listener. -type Service struct { - Name string - FdName string - Descriptor uintptr - Listener net.Listener -} - -// Hooks callbacks invoked when specific signal is received. -type Hooks struct { - // OnSIGHUP is the function called when the server receives a SIGHUP - // signal. The normal use case for SIGHUP is to reload the - // configuration. - OnSIGHUP func(*Again) error - // OnSIGUSR1 is the function called when the server receives a - // SIGUSR1 signal. The normal use case for SIGUSR1 is to repon the - // log files. - OnSIGUSR1 func(*Again) error - // OnSIGQUIT use this for graceful shutdown - OnSIGQUIT func(*Again) error - OnSIGTERM func(*Again) error -} - -// Again manages services that need graceful restarts -type Again struct { - services *sync.Map - Hooks Hooks -} - -func New(hooks ...Hooks) Again { - var h Hooks - if len(hooks) > 0 { - h = hooks[0] - } - return Again{ - services: &sync.Map{}, - Hooks: h, - } -} - -func (a *Again) Env() (m map[string]string, err error) { - var fds []string - var names []string - var fdNames []string - a.services.Range(func(k, value interface{}) bool { - s := value.(*Service) - names = append(names, s.Name) - _, _, e1 := syscall.Syscall(syscall.SYS_FCNTL, s.Descriptor, syscall.F_SETFD, 0) - if 0 != e1 { - err = e1 - return false - } - fds = append(fds, fmt.Sprint(s.Descriptor)) - fdNames = append(fdNames, s.FdName) - return true - }) - if err != nil { - return - } - return map[string]string{ - "GOAGAIN_FD": strings.Join(fds, ","), - "GOAGAIN_SERVICE_NAME": strings.Join(names, ","), - "GOAGAIN_NAME": strings.Join(fdNames, ","), - }, nil -} - -func ListerName(l net.Listener) string { - addr := l.Addr() - return fmt.Sprintf("%s:%s->", addr.Network(), addr.String()) -} - -func (a *Again) Range(fn func(*Service)) { - a.services.Range(func(k, v interface{}) bool { - s := v.(*Service) - fn(s) - return true - }) -} - -// Close tries to close all service listeners -func (a Again) Close() error { - var e bytes.Buffer - a.Range(func(s *Service) { - if err := s.Listener.Close(); err != nil { - e.WriteString(err.Error()) - e.WriteByte('\n') - } - }) - if e.Len() > 0 { - return errors.New(e.String()) - } - return nil -} -func hasElem(v reflect.Value) bool { - switch v.Kind() { - case reflect.Ptr, reflect.Interface: - return true - default: - return false - } -} - -// Listen creates a new service with the given listener. -func (a *Again) Listen(name string, ls net.Listener) error { - v := reflect.ValueOf(ls) - if v.Kind() == reflect.Ptr { - v = v.Elem() - } - // check if we have net.Listener embedded. Its a workaround to support - // crypto/tls Listen - if ls := v.FieldByName("Listener"); ls.IsValid() { - for hasElem(ls) { - ls = ls.Elem() - } - v = ls - } - if v.Kind() != reflect.Struct { - return fmt.Errorf("Not supported by current Go version") - } - v = v.FieldByName("fd") - if !v.IsValid() { - return fmt.Errorf("Not supported by current Go version") - } - v = v.Elem() - fdField := v.FieldByName("sysfd") - if !fdField.IsValid() { - fdField = v.FieldByName("pfd").FieldByName("Sysfd") - } - - if !fdField.IsValid() { - return fmt.Errorf("Not supported by current Go version") - } - fd := uintptr(fdField.Int()) - a.services.Store(name, &Service{ - Name: name, - FdName: ListerName(ls), - Listener: ls, - Descriptor: fd, - }) - return nil -} - -func (a Again) Get(name string) *Service { - s, _ := a.services.Load(name) - if s != nil { - return s.(*Service) - } - return nil -} - -func (a Again) Delete(name string) { - a.services.Delete(name) -} - -func (a Again) GetListener(key string) net.Listener { - if s := a.Get(key); s != nil { - return s.Listener - } - return nil -} - -// Re-exec this same image without dropping the net.Listener. -func Exec(a *Again) error { - var pid int - fmt.Sscan(os.Getenv("GOAGAIN_PID"), &pid) - if syscall.Getppid() == pid { - return fmt.Errorf("goagain.Exec called by a child process") - } - argv0, err := lookPath() - if nil != err { - return err - } - if err := setEnvs(a); nil != err { - return err - } - if err := os.Setenv( - "GOAGAIN_SIGNAL", - fmt.Sprintf("%d", syscall.SIGQUIT), - ); nil != err { - return err - } - log.Println("re-executing", argv0) - return syscall.Exec(argv0, os.Args, os.Environ()) -} - -// Fork and exec this same image without dropping the net.Listener. -func ForkExec(a *Again) error { - argv0, err := lookPath() - if nil != err { - return err - } - wd, err := os.Getwd() - if nil != err { - return err - } - err = setEnvs(a) - if nil != err { - return err - } - if err := os.Setenv("GOAGAIN_PID", ""); nil != err { - return err - } - if err := os.Setenv( - "GOAGAIN_PPID", - fmt.Sprint(syscall.Getpid()), - ); nil != err { - return err - } - - sig := syscall.SIGQUIT - if err := os.Setenv("GOAGAIN_SIGNAL", fmt.Sprintf("%d", sig)); nil != err { - return err - } - - files := []*os.File{ - os.Stdin, os.Stdout, os.Stderr, - } - a.Range(func(s *Service) { - files = append(files, os.NewFile( - s.Descriptor, - ListerName(s.Listener), - )) - }) - p, err := os.StartProcess(argv0, os.Args, &os.ProcAttr{ - Dir: wd, - Env: os.Environ(), - Files: files, - Sys: &syscall.SysProcAttr{}, - }) - if nil != err { - return err - } - log.Println("spawned child", p.Pid) - if err = os.Setenv("GOAGAIN_PID", fmt.Sprint(p.Pid)); nil != err { - return err - } - return nil -} - -// IsErrClosing tests whether an error is equivalent to net.errClosing as returned by -// Accept during a graceful exit. -func IsErrClosing(err error) bool { - if opErr, ok := err.(*net.OpError); ok { - err = opErr.Err - } - return "use of closed network connection" == err.Error() -} - -// Child returns true if this process is managed by again and its a child -// process. -func Child() bool { - d := os.Getenv("GOAGAIN_PID") - if d == "" { - d = os.Getenv("GOAGAIN_PPID") - } - var pid int - _, err := fmt.Sscan(d, &pid) - return err == nil -} - -// Kill process specified in the environment with the signal specified in the -// environment; default to SIGQUIT. -func Kill() error { - var ( - pid int - sig syscall.Signal - ) - _, err := fmt.Sscan(os.Getenv("GOAGAIN_PID"), &pid) - if io.EOF == err { - _, err = fmt.Sscan(os.Getenv("GOAGAIN_PPID"), &pid) - } - if nil != err { - return err - } - if _, err := fmt.Sscan(os.Getenv("GOAGAIN_SIGNAL"), &sig); nil != err { - sig = syscall.SIGQUIT - } - log.Println("sending signal", sig, "to process", pid) - return syscall.Kill(pid, sig) -} - -// Listen checks env and constructs a Again instance if this is a child process -// that was froked by again parent. -// -// forkHook if provided will be called before forking. -func Listen(forkHook func()) (*Again, error) { - a := New() - if err := ListenFrom(&a, forkHook); err != nil { - return nil, err - } - return &a, nil -} - -func ListenFrom(a *Again, forkHook func()) error { - OnForkHook = forkHook - fds := strings.Split(os.Getenv("GOAGAIN_FD"), ",") - names := strings.Split(os.Getenv("GOAGAIN_SERVICE_NAME"), ",") - fdNames := strings.Split(os.Getenv("GOAGAIN_NAME"), ",") - if !((len(fds) == len(names)) && (len(fds) == len(fdNames))) { - errors.New(("again: names/fds mismatch")) - } - for k, f := range fds { - if f == "" { - continue - } - var s Service - _, err := fmt.Sscan(f, &s.Descriptor) - if err != nil { - return err - } - s.Name = names[k] - s.FdName = fdNames[k] - l, err := net.FileListener(os.NewFile(s.Descriptor, s.FdName)) - if err != nil { - return err - } - s.Listener = l - switch l.(type) { - case *net.TCPListener, *net.UnixListener: - default: - return fmt.Errorf( - "file descriptor is %T not *net.TCPListener or *net.UnixListener", - l, - ) - } - if err = syscall.Close(int(s.Descriptor)); nil != err { - return err - } - fmt.Println("=> ", s.Name, s.FdName) - a.services.Store(s.Name, &s) - } - return nil -} - -// Wait waits for signals -func Wait(a *Again) (syscall.Signal, error) { - ch := make(chan os.Signal, 2) - signal.Notify( - ch, - syscall.SIGHUP, - syscall.SIGINT, - syscall.SIGQUIT, - syscall.SIGTERM, - syscall.SIGUSR1, - syscall.SIGUSR2, - ) - forked := false - for { - sig := <-ch - log.Println(sig.String()) - switch sig { - - // SIGHUP should reload configuration. - case syscall.SIGHUP: - if a.Hooks.OnSIGHUP != nil { - if err := a.Hooks.OnSIGHUP(a); err != nil { - log.Println("OnSIGHUP:", err) - } - } - - // SIGINT should exit. - case syscall.SIGINT: - return syscall.SIGINT, nil - - // SIGQUIT should exit gracefully. - case syscall.SIGQUIT: - if a.Hooks.OnSIGQUIT != nil { - if err := a.Hooks.OnSIGQUIT(a); err != nil { - log.Println("OnSIGQUIT:", err) - } - } - return syscall.SIGQUIT, nil - - // SIGTERM should exit. - case syscall.SIGTERM: - if a.Hooks.OnSIGTERM != nil { - if err := a.Hooks.OnSIGHUP(a); err != nil { - log.Println("OnSIGTERM:", err) - } - } - return syscall.SIGTERM, nil - - // SIGUSR1 should reopen logs. - case syscall.SIGUSR1: - if a.Hooks.OnSIGUSR1 != nil { - if err := a.Hooks.OnSIGUSR1(a); err != nil { - log.Println("OnSIGUSR1:", err) - } - } - - // SIGUSR2 forks and re-execs the first time it is received and execs - // without forking from then on. - case syscall.SIGUSR2: - if OnForkHook != nil { - OnForkHook() - } - if forked { - return syscall.SIGUSR2, nil - } - forked = true - if err := ForkExec(a); nil != err { - return syscall.SIGUSR2, err - } - - } - } -} - -func lookPath() (argv0 string, err error) { - argv0, err = exec.LookPath(os.Args[0]) - if nil != err { - return - } - if _, err = os.Stat(argv0); nil != err { - return - } - return -} - -func setEnvs(a *Again) error { - e, err := a.Env() - if err != nil { - return err - } - for k, v := range e { - os.Setenv(k, v) - } - return nil -} diff --git a/vendor/github.com/TykTechnologies/again/go.mod b/vendor/github.com/TykTechnologies/again/go.mod deleted file mode 100644 index 9d1beb7b532..00000000000 --- a/vendor/github.com/TykTechnologies/again/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module github.com/TykTechnologies/again - -go 1.12 diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/.gitignore b/vendor/github.com/TykTechnologies/circuitbreaker/.gitignore deleted file mode 100644 index 9ed3b07cefe..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.test diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/CHANGELOG.md b/vendor/github.com/TykTechnologies/circuitbreaker/CHANGELOG.md deleted file mode 100644 index 6dbb18990ab..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/CHANGELOG.md +++ /dev/null @@ -1,230 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -## 2.2.0 - 2016-08-09 - -### Added -- Externally provided event listener channel (@spencerkimball) - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Reduce allocations around last failure time storage -- Use the Clock for window code as well -- Remove test data race -- Fix race condition in `state()` (@tamird) - -## 2.1.7 - 2016-07-27 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Set `Backoff.MaxElapsedTime` to 0 as default [@matope] -- Use a lock when modifying `nextBackoff` -- Fix goroutine leak when using timeouts [@isaldana] -- Fix window buckets that should be empty [@isaldana] -- Update backoff package, which has been renamed - -## 2.1.6 - 2016-02-02 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- client.Do() was not returning the error when it timed out [@ryanmurf] - -## 2.1.5 - 2015-11-19 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Respect backoff.Stop [@bc-vincent-zhao] - -## 2.1.4 - 2015-09-01 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- HTTP client was using a new panel object instead of the one it added the breaker to [@ryanmurf] - -## 2.1.3 - 2015-08-05 - -### Added -- Configurable bucket time and number [@thraxil] -- Use mock clock for test [@andreas] - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Bug in statsd bucket name documentation / example [@thraxil] - -## 2.1.2 - 2015-04-03 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Simplify Call() for rate breaker, fixing a reset bug - -## 2.1.1 - 2014-10-29 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Ensure the half opens counter resets when the breaker resets, or auto-resetting may not occur - -## 2.1.0 - 2014-10-16 - -### Added -- Failure, Sucess counts and Error Rate is now calculated over a sliding window -- Number of buckets in the window and the time the window spans are tuneable - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- A race condition in Call() - -## 2.0.2 - 2014-10-13 - -### Added -- ResetCounters - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Nothing - -## 2.0.1 - 2014-10-13 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Error rate should return 0.0 if there have been no samples - -## 2.0.0 - 2014-10-13 - -### Added -- All circuit breakers are now a Breaker with trip semantics handled by a TripFunc -- NewConsecutiveBreaker -- NewRateBreaker -- ConsecFailures -- ErrorRate -- Success -- Successes -- Retry logic now uses cenkalti/backoff, exponential backoff by default - -### Deprecated -- Nothing - -### Removed -- TrippableBreaker, ThresholdBreaker, FrequencyBreaker, TimeoutBreaker; all handled by Breaker now -- NewFrequencyBreaker, replaced by NewConsecutiveBreaker -- NewTimeoutBreaker, time out semantics are now handled by Call() -- NoOp(), use a Breaker with no TripFunc instead - -### Fixed -- Nothing - -## 1.1.2 - 2014-08-20 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Fixed -- For a FrequencyBreaker, Failures() should return the count since the duration start, even after resetting. - -## 1.1.1 - 2014-08-20 - -### Added -- Nothing - -### Deprecated -- Nothing - -### Fixed -- Only send the reset event if the breaker was in a tripped state - -## 1.1.0 - 2014-08-16 - -### Added -- Re-export a Panels Circuits map. It's handy and if you mess it up, it's on you. - -### Deprecated -- Nothing - -### Removed -- Nothing - -### Fixed -- Nothing - -## 1.0.0 - 2014-08-16 - -### Added -- This will be the public API for version 1.0.0. This project will follow semver rules. diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/LICENSE b/vendor/github.com/TykTechnologies/circuitbreaker/LICENSE deleted file mode 100644 index 5a188a06b00..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -(The MIT License) - -Copyright (c) 2014 Scott Barron - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/README.md b/vendor/github.com/TykTechnologies/circuitbreaker/README.md deleted file mode 100644 index 1e08423b585..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/README.md +++ /dev/null @@ -1,120 +0,0 @@ -# circuitbreaker - -Circuitbreaker provides an easy way to use the Circuit Breaker pattern in a -Go program. - -Circuit breakers are typically used when your program makes remote calls. -Remote calls can often hang for a while before they time out. If your -application makes a lot of these requests, many resources can be tied -up waiting for these time outs to occur. A circuit breaker wraps these -remote calls and will trip after a defined amount of failures or time outs -occur. When a circuit breaker is tripped any future calls will avoid making -the remote call and return an error to the caller. In the meantime, the -circuit breaker will periodically allow some calls to be tried again and -will close the circuit if those are successful. - -You can read more about this pattern and how it's used at: -- [Martin Fowler's bliki](http://martinfowler.com/bliki/CircuitBreaker.html) -- [The Netflix Tech Blog](http://techblog.netflix.com/2012/02/fault-tolerance-in-high-volume.html) -- [Release It!](http://pragprog.com/book/mnee/release-it) - -[![GoDoc](https://godoc.org/github.com/rubyist/circuitbreaker?status.svg)](https://godoc.org/github.com/rubyist/circuitbreaker) - -## Installation - -``` - go get github.com/rubyist/circuitbreaker -``` - -## Examples - -Here is a quick example of what circuitbreaker provides - -```go -// Creates a circuit breaker that will trip if the function fails 10 times -cb := circuit.NewThresholdBreaker(10) - -events := cb.Subscribe() -go func() { - for { - e := <-events - // Monitor breaker events like BreakerTripped, BreakerReset, BreakerFail, BreakerReady - } -}() - -cb.Call(func() error { - // This is where you'll do some remote call - // If it fails, return an error -}, 0) -``` - -Circuitbreaker can also wrap a time out around the remote call. - -```go -// Creates a circuit breaker that will trip after 10 failures -// using a time out of 5 seconds -cb := circuit.NewThresholdBreaker(10) - -cb.Call(func() error { - // This is where you'll do some remote call - // If it fails, return an error -}, time.Second * 5) // This will time out after 5 seconds, which counts as a failure - -// Proceed as above - -``` - -Circuitbreaker can also trip based on the number of consecutive failures. - -```go -// Creates a circuit breaker that will trip if 10 consecutive failures occur -cb := circuit.NewConsecutiveBreaker(10) - -// Proceed as above -``` - -Circuitbreaker can trip based on the error rate. - -```go -// Creates a circuit breaker based on the error rate -cb := circuit.NewRateBreaker(0.95, 100) // trip when error rate hits 95%, with at least 100 samples - -// Proceed as above -``` - -If it doesn't make sense to wrap logic in Call(), breakers can be handled manually. - -```go -cb := circuit.NewThresholdBreaker(10) - -for { - if cb.Ready() { - // Breaker is not tripped, proceed - err := doSomething() - if err != nil { - cb.Fail() // This will trip the breaker once it's failed 10 times - continue - } - cb.Success() - } else { - // Breaker is in a tripped state. - } -} -``` - -Circuitbreaker also provides a wrapper around `http.Client` that will wrap a -time out around any request. - -```go -// Passing in nil will create a regular http.Client. -// You can also build your own http.Client and pass it in -client := circuit.NewHTTPClient(time.Second * 5, 10, nil) - -resp, err := client.Get("http://example.com/resource.json") -``` - -See the godoc for more examples. - -## Bugs, Issues, Feedback - -Right here on GitHub: [https://github.com/rubyist/circuitbreaker](https://github.com/rubyist/circuitbreaker) diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/circuitbreaker.go b/vendor/github.com/TykTechnologies/circuitbreaker/circuitbreaker.go deleted file mode 100644 index 2bb8a2494f8..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/circuitbreaker.go +++ /dev/null @@ -1,460 +0,0 @@ -// Package circuit implements the Circuit Breaker pattern. It will wrap -// a function call (typically one which uses remote services) and monitors for -// failures and/or time outs. When a threshold of failures or time outs has been -// reached, future calls to the function will not run. During this state, the -// breaker will periodically allow the function to run and, if it is successful, -// will start running the function again. -// -// Circuit includes three types of circuit breakers: -// -// A Threshold Breaker will trip when the failure count reaches a given threshold. -// It does not matter how long it takes to reach the threshold and the failures do -// not need to be consecutive. -// -// A Consecutive Breaker will trip when the consecutive failure count reaches a given -// threshold. It does not matter how long it takes to reach the threshold, but the -// failures do need to be consecutive. -// -// -// When wrapping blocks of code with a Breaker's Call() function, a time out can be -// specified. If the time out is reached, the breaker's Fail() function will be called. -// -// -// Other types of circuit breakers can be easily built by creating a Breaker and -// adding a custom TripFunc. A TripFunc is called when a Breaker Fail()s and receives -// the breaker as an argument. It then returns true or false to indicate whether the -// breaker should trip. -// -// The package also provides a wrapper around an http.Client that wraps all of -// the http.Client functions with a Breaker. -// -package circuit - -import ( - "context" - "errors" - "sync" - "sync/atomic" - "time" - - "github.com/cenk/backoff" - "github.com/facebookgo/clock" -) - -// BreakerEvent indicates the type of event received over an event channel -type BreakerEvent int - -const ( - // BreakerTripped is sent when a breaker trips - BreakerTripped BreakerEvent = iota - - // BreakerReset is sent when a breaker resets - BreakerReset - - // BreakerFail is sent when Fail() is called - BreakerFail - - // BreakerReady is sent when the breaker enters the half open state and is ready to retry - BreakerReady - - // stops breaker's subscribers - BreakerStop -) - -// ListenerEvent includes a reference to the circuit breaker and the event. -type ListenerEvent struct { - CB *Breaker - Event BreakerEvent -} - -type state int - -const ( - open state = iota - halfopen state = iota - closed state = iota -) - -var ( - defaultInitialBackOffInterval = 500 * time.Millisecond - defaultBackoffMaxElapsedTime = 0 * time.Second -) - -// Error codes returned by Call -var ( - ErrBreakerOpen = errors.New("breaker open") - ErrBreakerTimeout = errors.New("breaker time out") -) - -// TripFunc is a function called by a Breaker's Fail() function and determines whether -// the breaker should trip. It will receive the Breaker as an argument and returns a -// boolean. By default, a Breaker has no TripFunc. -type TripFunc func(*Breaker) bool - -// Breaker is the base of a circuit breaker. It maintains failure and success counters -// as well as the event subscribers. -type Breaker struct { - // BackOff is the backoff policy that is used when determining if the breaker should - // attempt to retry. A breaker created with NewBreaker will use an exponential backoff - // policy by default. - BackOff backoff.BackOff - - // ShouldTrip is a TripFunc that determines whether a Fail() call should trip the breaker. - // A breaker created with NewBreaker will not have a ShouldTrip by default, and thus will - // never automatically trip. - ShouldTrip TripFunc - - // Clock is used for controlling time in tests. - Clock clock.Clock - - _ [4]byte // pad to fix golang issue #599 - consecFailures int64 - lastFailure int64 // stored as nanoseconds since the Unix epoch - halfOpens int64 - counts *window - nextBackOff time.Duration - tripped int32 - broken int32 - stopped int32 - eventReceivers []chan BreakerEvent - listeners []chan ListenerEvent - backoffLock sync.Mutex -} - -// Options holds breaker configuration options. -type Options struct { - BackOff backoff.BackOff - Clock clock.Clock - ShouldTrip TripFunc - WindowTime time.Duration - WindowBuckets int -} - -// NewBreakerWithOptions creates a base breaker with a specified backoff, clock and TripFunc -func NewBreakerWithOptions(options *Options) *Breaker { - if options == nil { - options = &Options{} - } - - if options.Clock == nil { - options.Clock = clock.New() - } - - if options.BackOff == nil { - b := backoff.NewExponentialBackOff() - b.InitialInterval = defaultInitialBackOffInterval - b.MaxElapsedTime = defaultBackoffMaxElapsedTime - b.Clock = options.Clock - b.Reset() - options.BackOff = b - } - - if options.WindowTime == 0 { - options.WindowTime = DefaultWindowTime - } - - if options.WindowBuckets == 0 { - options.WindowBuckets = DefaultWindowBuckets - } - - return &Breaker{ - BackOff: options.BackOff, - Clock: options.Clock, - ShouldTrip: options.ShouldTrip, - nextBackOff: options.BackOff.NextBackOff(), - counts: newWindow(options.WindowTime, options.WindowBuckets), - } -} - -// NewBreaker creates a base breaker with an exponential backoff and no TripFunc -func NewBreaker() *Breaker { - return NewBreakerWithOptions(nil) -} - -// NewThresholdBreaker creates a Breaker with a ThresholdTripFunc. -func NewThresholdBreaker(threshold int64) *Breaker { - return NewBreakerWithOptions(&Options{ - ShouldTrip: ThresholdTripFunc(threshold), - }) -} - -// NewConsecutiveBreaker creates a Breaker with a ConsecutiveTripFunc. -func NewConsecutiveBreaker(threshold int64) *Breaker { - return NewBreakerWithOptions(&Options{ - ShouldTrip: ConsecutiveTripFunc(threshold), - }) -} - -// NewRateBreaker creates a Breaker with a RateTripFunc. -func NewRateBreaker(rate float64, minSamples int64) *Breaker { - return NewBreakerWithOptions(&Options{ - ShouldTrip: RateTripFunc(rate, minSamples), - }) -} - -// Subscribe returns a channel of BreakerEvents. Whenever the breaker changes state, -// the state will be sent over the channel. See BreakerEvent for the types of events. -func (cb *Breaker) Subscribe() <-chan BreakerEvent { - eventReader := make(chan BreakerEvent) - output := make(chan BreakerEvent, 100) - - go func() { - for v := range eventReader { - select { - case output <- v: - // stop subscriber Go-routine if CB was asked to stop - if v == BreakerStop { - return - } - default: - <-output - output <- v - } - } - }() - cb.eventReceivers = append(cb.eventReceivers, eventReader) - return output -} - -// AddListener adds a channel of ListenerEvents on behalf of a listener. -// The listener channel must be buffered. -func (cb *Breaker) AddListener(listener chan ListenerEvent) { - cb.listeners = append(cb.listeners, listener) -} - -// RemoveListener removes a channel previously added via AddListener. -// Once removed, the channel will no longer receive ListenerEvents. -// Returns true if the listener was found and removed. -func (cb *Breaker) RemoveListener(listener chan ListenerEvent) bool { - for i, receiver := range cb.listeners { - if listener == receiver { - cb.listeners = append(cb.listeners[:i], cb.listeners[i+1:]...) - return true - } - } - return false -} - -// Trip will trip the circuit breaker. After Trip() is called, Tripped() will -// return true. -func (cb *Breaker) Trip() { - atomic.StoreInt32(&cb.tripped, 1) - now := cb.Clock.Now() - atomic.StoreInt64(&cb.lastFailure, now.UnixNano()) - cb.sendEvent(BreakerTripped) -} - -// Reset will reset the circuit breaker. After Reset() is called, Tripped() will -// return false. -func (cb *Breaker) Reset() { - atomic.StoreInt32(&cb.broken, 0) - atomic.StoreInt32(&cb.tripped, 0) - atomic.StoreInt64(&cb.halfOpens, 0) - cb.ResetCounters() - cb.sendEvent(BreakerReset) -} - -// ResetCounters will reset only the failures, consecFailures, and success counters -func (cb *Breaker) ResetCounters() { - atomic.StoreInt64(&cb.consecFailures, 0) - cb.counts.Reset() -} - -// Tripped returns true if the circuit breaker is tripped, false if it is reset. -func (cb *Breaker) Tripped() bool { - return atomic.LoadInt32(&cb.tripped) == 1 -} - -// Break trips the circuit breaker and prevents it from auto resetting. Use this when -// manual control over the circuit breaker state is needed. -func (cb *Breaker) Break() { - atomic.StoreInt32(&cb.broken, 1) - cb.Trip() -} - -// Stop stops all go-routines to process events -func (cb *Breaker) Stop() { - atomic.StoreInt32(&cb.stopped, 1) - cb.sendEvent(BreakerStop) -} - -// Failures returns the number of failures for this circuit breaker. -func (cb *Breaker) Failures() int64 { - return cb.counts.Failures() -} - -// ConsecFailures returns the number of consecutive failures that have occured. -func (cb *Breaker) ConsecFailures() int64 { - return atomic.LoadInt64(&cb.consecFailures) -} - -// Successes returns the number of successes for this circuit breaker. -func (cb *Breaker) Successes() int64 { - return cb.counts.Successes() -} - -// Fail is used to indicate a failure condition the Breaker should record. It will -// increment the failure counters and store the time of the last failure. If the -// breaker has a TripFunc it will be called, tripping the breaker if necessary. -func (cb *Breaker) Fail() { - cb.counts.Fail() - atomic.AddInt64(&cb.consecFailures, 1) - now := cb.Clock.Now() - atomic.StoreInt64(&cb.lastFailure, now.UnixNano()) - cb.sendEvent(BreakerFail) - if cb.ShouldTrip != nil && cb.ShouldTrip(cb) { - cb.Trip() - } -} - -// Success is used to indicate a success condition the Breaker should record. If -// the success was triggered by a retry attempt, the breaker will be Reset(). -func (cb *Breaker) Success() { - cb.backoffLock.Lock() - cb.BackOff.Reset() - cb.nextBackOff = cb.BackOff.NextBackOff() - cb.backoffLock.Unlock() - - state := cb.state() - if state == halfopen { - cb.Reset() - } - atomic.StoreInt64(&cb.consecFailures, 0) - cb.counts.Success() -} - -// ErrorRate returns the current error rate of the Breaker, expressed as a floating -// point number (e.g. 0.9 for 90%), since the last time the breaker was Reset. -func (cb *Breaker) ErrorRate() float64 { - return cb.counts.ErrorRate() -} - -// Ready will return true if the circuit breaker is ready to call the function. -// It will be ready if the breaker is in a reset state, or if it is time to retry -// the call for auto resetting. -func (cb *Breaker) Ready() bool { - state := cb.state() - if state == halfopen { - atomic.StoreInt64(&cb.halfOpens, 0) - cb.sendEvent(BreakerReady) - } - return state == closed || state == halfopen -} - -// Call wraps a function the Breaker will protect. A failure is recorded -// whenever the function returns an error. If the called function takes longer -// than timeout to run, a failure will be recorded. -func (cb *Breaker) Call(circuit func() error, timeout time.Duration) error { - return cb.CallContext(context.Background(), circuit, timeout) -} - -// CallContext is same as Call but if the ctx is canceled after the circuit returned an error, -// the error will not be marked as a failure because the call was canceled intentionally. -func (cb *Breaker) CallContext(ctx context.Context, circuit func() error, timeout time.Duration) error { - var err error - - if !cb.Ready() { - return ErrBreakerOpen - } - - if timeout == 0 { - err = circuit() - } else { - c := make(chan error, 1) - go func() { - c <- circuit() - close(c) - }() - - select { - case e := <-c: - err = e - case <-cb.Clock.After(timeout): - err = ErrBreakerTimeout - } - } - - if err != nil { - if ctx.Err() != context.Canceled { - cb.Fail() - } - return err - } - - cb.Success() - return nil -} - -// state returns the state of the TrippableBreaker. The states available are: -// closed - the circuit is in a reset state and is operational -// open - the circuit is in a tripped state -// halfopen - the circuit is in a tripped state but the reset timeout has passed -func (cb *Breaker) state() state { - tripped := cb.Tripped() - if tripped { - if atomic.LoadInt32(&cb.broken) == 1 { - return open - } - - last := atomic.LoadInt64(&cb.lastFailure) - since := cb.Clock.Now().Sub(time.Unix(0, last)) - - cb.backoffLock.Lock() - defer cb.backoffLock.Unlock() - - if cb.nextBackOff != backoff.Stop && since > cb.nextBackOff { - if atomic.CompareAndSwapInt64(&cb.halfOpens, 0, 1) { - cb.nextBackOff = cb.BackOff.NextBackOff() - return halfopen - } - return open - } - return open - } - return closed -} - -func (cb *Breaker) sendEvent(event BreakerEvent) { - for _, receiver := range cb.eventReceivers { - receiver <- event - } - for _, listener := range cb.listeners { - le := ListenerEvent{CB: cb, Event: event} - select { - case listener <- le: - default: - <-listener - listener <- le - } - } -} - -// ThresholdTripFunc returns a TripFunc with that trips whenever -// the failure count meets the threshold. -func ThresholdTripFunc(threshold int64) TripFunc { - return func(cb *Breaker) bool { - return cb.Failures() == threshold - } -} - -// ConsecutiveTripFunc returns a TripFunc that trips whenever -// the consecutive failure count meets the threshold. -func ConsecutiveTripFunc(threshold int64) TripFunc { - return func(cb *Breaker) bool { - return cb.ConsecFailures() == threshold - } -} - -// RateTripFunc returns a TripFunc that trips whenever the -// error rate hits the threshold. The error rate is calculated as such: -// f = number of failures -// s = number of successes -// e = f / (f + s) -// The error rate is calculated over a sliding window of 10 seconds (by default) -// This TripFunc will not trip until there have been at least minSamples events. -func RateTripFunc(rate float64, minSamples int64) TripFunc { - return func(cb *Breaker) bool { - samples := cb.Failures() + cb.Successes() - return samples >= minSamples && cb.ErrorRate() >= rate - } -} diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/client.go b/vendor/github.com/TykTechnologies/circuitbreaker/client.go deleted file mode 100644 index e91c52b26ca..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/client.go +++ /dev/null @@ -1,170 +0,0 @@ -package circuit - -import ( - "io" - "net/http" - "net/url" - "time" -) - -// HTTPClient is a wrapper around http.Client that provides circuit breaker capabilities. -// -// By default, the client will use its defaultBreaker. A BreakerLookup function may be -// provided to allow different breakers to be used based on the circumstance. See the -// implementation of NewHostBasedHTTPClient for an example of this. -type HTTPClient struct { - Client *http.Client - BreakerTripped func() - BreakerReset func() - BreakerLookup func(*HTTPClient, interface{}) *Breaker - Panel *Panel - timeout time.Duration -} - -var defaultBreakerName = "_default" - -// NewHTTPClient provides a circuit breaker wrapper around http.Client. -// It wraps all of the regular http.Client functions. Specifying 0 for timeout will -// give a breaker that does not check for time outs. -func NewHTTPClient(timeout time.Duration, threshold int64, client *http.Client) *HTTPClient { - breaker := NewThresholdBreaker(threshold) - return NewHTTPClientWithBreaker(breaker, timeout, client) -} - -// NewHostBasedHTTPClient provides a circuit breaker wrapper around http.Client. This -// client will use one circuit breaker per host parsed from the request URL. This allows -// you to use a single HTTPClient for multiple hosts with one host's breaker not affecting -// the other hosts. -func NewHostBasedHTTPClient(timeout time.Duration, threshold int64, client *http.Client) *HTTPClient { - brclient := NewHTTPClient(timeout, threshold, client) - - brclient.BreakerLookup = func(c *HTTPClient, val interface{}) *Breaker { - rawURL := val.(string) - parsedURL, err := url.Parse(rawURL) - if err != nil { - breaker, _ := c.Panel.Get(defaultBreakerName) - return breaker - } - host := parsedURL.Host - - cb, ok := c.Panel.Get(host) - if !ok { - cb = NewThresholdBreaker(threshold) - c.Panel.Add(host, cb) - } - - return cb - } - - return brclient -} - -// NewHTTPClientWithBreaker provides a circuit breaker wrapper around http.Client. -// It wraps all of the regular http.Client functions using the provided Breaker. -func NewHTTPClientWithBreaker(breaker *Breaker, timeout time.Duration, client *http.Client) *HTTPClient { - if client == nil { - client = &http.Client{} - } - - panel := NewPanel() - panel.Add(defaultBreakerName, breaker) - - brclient := &HTTPClient{Client: client, Panel: panel, timeout: timeout} - brclient.BreakerLookup = func(c *HTTPClient, val interface{}) *Breaker { - cb, _ := c.Panel.Get(defaultBreakerName) - return cb - } - - events := breaker.Subscribe() - go func() { - event := <-events - switch event { - case BreakerTripped: - brclient.runBreakerTripped() - case BreakerReset: - brclient.runBreakerReset() - } - }() - - return brclient -} - -// Do wraps http.Client Do() -func (c *HTTPClient) Do(req *http.Request) (*http.Response, error) { - var resp *http.Response - var err error - breaker := c.breakerLookup(req.URL.String()) - err = breaker.Call(func() error { - resp, err = c.Client.Do(req) - return err - }, c.timeout) - return resp, err -} - -// Get wraps http.Client Get() -func (c *HTTPClient) Get(url string) (*http.Response, error) { - var resp *http.Response - breaker := c.breakerLookup(url) - err := breaker.Call(func() error { - aresp, err := c.Client.Get(url) - resp = aresp - return err - }, c.timeout) - return resp, err -} - -// Head wraps http.Client Head() -func (c *HTTPClient) Head(url string) (*http.Response, error) { - var resp *http.Response - breaker := c.breakerLookup(url) - err := breaker.Call(func() error { - aresp, err := c.Client.Head(url) - resp = aresp - return err - }, c.timeout) - return resp, err -} - -// Post wraps http.Client Post() -func (c *HTTPClient) Post(url string, bodyType string, body io.Reader) (*http.Response, error) { - var resp *http.Response - breaker := c.breakerLookup(url) - err := breaker.Call(func() error { - aresp, err := c.Client.Post(url, bodyType, body) - resp = aresp - return err - }, c.timeout) - return resp, err -} - -// PostForm wraps http.Client PostForm() -func (c *HTTPClient) PostForm(url string, data url.Values) (*http.Response, error) { - var resp *http.Response - breaker := c.breakerLookup(url) - err := breaker.Call(func() error { - aresp, err := c.Client.PostForm(url, data) - resp = aresp - return err - }, c.timeout) - return resp, err -} - -func (c *HTTPClient) breakerLookup(val interface{}) *Breaker { - if c.BreakerLookup != nil { - return c.BreakerLookup(c, val) - } - cb, _ := c.Panel.Get(defaultBreakerName) - return cb -} - -func (c *HTTPClient) runBreakerTripped() { - if c.BreakerTripped != nil { - c.BreakerTripped() - } -} - -func (c *HTTPClient) runBreakerReset() { - if c.BreakerReset != nil { - c.BreakerReset() - } -} diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/panel.go b/vendor/github.com/TykTechnologies/circuitbreaker/panel.go deleted file mode 100644 index cd4e7b6c7ee..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/panel.go +++ /dev/null @@ -1,144 +0,0 @@ -package circuit - -import ( - "fmt" - "sync" - "time" -) - -var defaultStatsPrefixf = "circuit.%s" - -// Statter interface provides a way to gather statistics from breakers -type Statter interface { - Counter(sampleRate float32, bucket string, n ...int) - Timing(sampleRate float32, bucket string, d ...time.Duration) - Gauge(sampleRate float32, bucket string, value ...string) -} - -// PanelEvent wraps a BreakerEvent and provides the string name of the breaker -type PanelEvent struct { - Name string - Event BreakerEvent -} - -// Panel tracks a group of circuit breakers by name. -type Panel struct { - Statter Statter - StatsPrefixf string - - Circuits map[string]*Breaker - - lastTripTimes map[string]time.Time - tripTimesLock sync.RWMutex - panelLock sync.RWMutex - eventReceivers []chan PanelEvent -} - -// NewPanel creates a new Panel -func NewPanel() *Panel { - return &Panel{ - Circuits: make(map[string]*Breaker), - Statter: &noopStatter{}, - StatsPrefixf: defaultStatsPrefixf, - lastTripTimes: make(map[string]time.Time)} -} - -// Add sets the name as a reference to the given circuit breaker. -func (p *Panel) Add(name string, cb *Breaker) { - p.panelLock.Lock() - p.Circuits[name] = cb - p.panelLock.Unlock() - - events := cb.Subscribe() - - go func() { - for event := range events { - for _, receiver := range p.eventReceivers { - receiver <- PanelEvent{name, event} - } - switch event { - case BreakerTripped: - p.breakerTripped(name) - case BreakerReset: - p.breakerReset(name) - case BreakerFail: - p.breakerFail(name) - case BreakerReady: - p.breakerReady(name) - } - } - }() -} - -// Get retrieves a circuit breaker by name. If no circuit breaker exists, it -// returns the NoOp one and sets ok to false. -func (p *Panel) Get(name string) (*Breaker, bool) { - p.panelLock.RLock() - cb, ok := p.Circuits[name] - p.panelLock.RUnlock() - - if ok { - return cb, ok - } - - return NewBreaker(), ok -} - -// Subscribe returns a channel of PanelEvents. Whenever a breaker changes state, -// the PanelEvent will be sent over the channel. See BreakerEvent for the types of events. -func (p *Panel) Subscribe() <-chan PanelEvent { - eventReader := make(chan PanelEvent) - output := make(chan PanelEvent, 100) - - go func() { - for v := range eventReader { - select { - case output <- v: - default: - <-output - output <- v - } - } - }() - p.eventReceivers = append(p.eventReceivers, eventReader) - return output -} - -func (p *Panel) breakerTripped(name string) { - p.Statter.Counter(1.0, fmt.Sprintf(p.StatsPrefixf, name)+".tripped", 1) - p.tripTimesLock.Lock() - p.lastTripTimes[name] = time.Now() - p.tripTimesLock.Unlock() -} - -func (p *Panel) breakerReset(name string) { - bucket := fmt.Sprintf(p.StatsPrefixf, name) - - p.Statter.Counter(1.0, bucket+".reset", 1) - - p.tripTimesLock.RLock() - lastTrip := p.lastTripTimes[name] - p.tripTimesLock.RUnlock() - - if !lastTrip.IsZero() { - p.Statter.Timing(1.0, bucket+".trip-time", time.Since(lastTrip)) - p.tripTimesLock.Lock() - p.lastTripTimes[name] = time.Time{} - p.tripTimesLock.Unlock() - } -} - -func (p *Panel) breakerFail(name string) { - p.Statter.Counter(1.0, fmt.Sprintf(p.StatsPrefixf, name)+".fail", 1) -} - -func (p *Panel) breakerReady(name string) { - p.Statter.Counter(1.0, fmt.Sprintf(p.StatsPrefixf, name)+".ready", 1) -} - -type noopStatter struct { -} - -func (*noopStatter) Counter(sampleRate float32, bucket string, n ...int) {} -func (*noopStatter) Timing(sampleRate float32, bucket string, d ...time.Duration) {} -func (*noopStatter) Gauge(sampleRate float32, bucket string, value ...string) {} diff --git a/vendor/github.com/TykTechnologies/circuitbreaker/window.go b/vendor/github.com/TykTechnologies/circuitbreaker/window.go deleted file mode 100644 index ab83187f6ca..00000000000 --- a/vendor/github.com/TykTechnologies/circuitbreaker/window.go +++ /dev/null @@ -1,174 +0,0 @@ -package circuit - -import ( - "container/ring" - "sync" - "time" - - "github.com/facebookgo/clock" -) - -var ( - // DefaultWindowTime is the default time the window covers, 10 seconds. - DefaultWindowTime = time.Millisecond * 10000 - - // DefaultWindowBuckets is the default number of buckets the window holds, 10. - DefaultWindowBuckets = 10 -) - -// bucket holds counts of failures and successes -type bucket struct { - failure int64 - success int64 -} - -// Reset resets the counts to 0 -func (b *bucket) Reset() { - b.failure = 0 - b.success = 0 -} - -// Fail increments the failure count -func (b *bucket) Fail() { - b.failure++ -} - -// Sucecss increments the success count -func (b *bucket) Success() { - b.success++ -} - -// window maintains a ring of buckets and increments the failure and success -// counts of the current bucket. Once a specified time has elapsed, it will -// advance to the next bucket, reseting its counts. This allows the keeping of -// rolling statistics on the counts. -type window struct { - buckets *ring.Ring - bucketTime time.Duration - bucketLock sync.RWMutex - lastAccess time.Time - clock clock.Clock -} - -// newWindow creates a new window. windowTime is the time covering the entire -// window. windowBuckets is the number of buckets the window is divided into. -// An example: a 10 second window with 10 buckets will have 10 buckets covering -// 1 second each. -func newWindow(windowTime time.Duration, windowBuckets int) *window { - buckets := ring.New(windowBuckets) - for i := 0; i < buckets.Len(); i++ { - buckets.Value = &bucket{} - buckets = buckets.Next() - } - - clock := clock.New() - - bucketTime := time.Duration(windowTime.Nanoseconds() / int64(windowBuckets)) - return &window{ - buckets: buckets, - bucketTime: bucketTime, - clock: clock, - lastAccess: clock.Now(), - } -} - -// Fail records a failure in the current bucket. -func (w *window) Fail() { - w.bucketLock.Lock() - b := w.getLatestBucket() - b.Fail() - w.bucketLock.Unlock() -} - -// Success records a success in the current bucket. -func (w *window) Success() { - w.bucketLock.Lock() - b := w.getLatestBucket() - b.Success() - w.bucketLock.Unlock() -} - -// Failures returns the total number of failures recorded in all buckets. -func (w *window) Failures() int64 { - w.bucketLock.RLock() - - var failures int64 - w.buckets.Do(func(x interface{}) { - b := x.(*bucket) - failures += b.failure - }) - - w.bucketLock.RUnlock() - return failures -} - -// Successes returns the total number of successes recorded in all buckets. -func (w *window) Successes() int64 { - w.bucketLock.RLock() - - var successes int64 - w.buckets.Do(func(x interface{}) { - b := x.(*bucket) - successes += b.success - }) - w.bucketLock.RUnlock() - return successes -} - -// ErrorRate returns the error rate calculated over all buckets, expressed as -// a floating point number (e.g. 0.9 for 90%) -func (w *window) ErrorRate() float64 { - var total int64 - var failures int64 - - w.bucketLock.RLock() - w.buckets.Do(func(x interface{}) { - b := x.(*bucket) - total += b.failure + b.success - failures += b.failure - }) - w.bucketLock.RUnlock() - - if total == 0 { - return 0.0 - } - - return float64(failures) / float64(total) -} - -// Reset resets the count of all buckets. -func (w *window) Reset() { - w.bucketLock.Lock() - - w.buckets.Do(func(x interface{}) { - x.(*bucket).Reset() - }) - w.bucketLock.Unlock() -} - -// getLatestBucket returns the current bucket. If the bucket time has elapsed -// it will move to the next bucket, resetting its counts and updating the last -// access time before returning it. getLatestBucket assumes that the caller has -// locked the bucketLock -func (w *window) getLatestBucket() *bucket { - var b *bucket - b = w.buckets.Value.(*bucket) - elapsed := w.clock.Now().Sub(w.lastAccess) - - if elapsed > w.bucketTime { - // Reset the buckets between now and number of buckets ago. If - // that is more that the existing buckets, reset all. - for i := 0; i < w.buckets.Len(); i++ { - w.buckets = w.buckets.Next() - b = w.buckets.Value.(*bucket) - b.Reset() - elapsed = time.Duration(int64(elapsed) - int64(w.bucketTime)) - if elapsed < w.bucketTime { - // Done resetting buckets. - break - } - } - w.lastAccess = w.clock.Now() - } - return b -} diff --git a/vendor/github.com/TykTechnologies/drl/drl.go b/vendor/github.com/TykTechnologies/drl/drl.go deleted file mode 100644 index f26836576fb..00000000000 --- a/vendor/github.com/TykTechnologies/drl/drl.go +++ /dev/null @@ -1,168 +0,0 @@ -package drl - -import ( - "errors" - "fmt" - "sync" - "sync/atomic" - "time" -) - -type Server struct { - HostName string - ID string - LoadPerSec int64 - Percentage float64 - TagHash string -} - -type DRL struct { - Servers *Cache - mutex sync.Mutex - serverIndex map[string]Server - ThisServerID string - CurrentTotal int64 - RequestTokenValue int - currentTokenValue int64 - Ready bool -} - -func (d *DRL) SetCurrentTokenValue(newValue int64) { - atomic.StoreInt64(&d.currentTokenValue, newValue) -} - -func (d *DRL) CurrentTokenValue() int64 { - return atomic.LoadInt64(&d.currentTokenValue) -} - -func (d *DRL) Init() { - d.Servers = NewCache(4 * time.Second) - d.RequestTokenValue = 100 - d.mutex = sync.Mutex{} - d.serverIndex = make(map[string]Server) - d.Ready = true - - go func() { - for { - d.mutex.Lock() - d.cleanServerList() - d.mutex.Unlock() - time.Sleep(5 * time.Second) - } - }() -} - -func (d *DRL) uniqueID(s Server) string { - uniqueID := s.ID + "|" + s.HostName - return uniqueID -} - -func (d *DRL) totalLoadAcrossServers() int64 { - var total int64 - for sID, _ := range d.serverIndex { - _, found := d.Servers.GetNoExtend(sID) - if found { - total += d.serverIndex[sID].LoadPerSec - } - } - - d.CurrentTotal = total - - return total -} - -func (d *DRL) cleanServerList() { - toRemove := map[string]bool{} - for sID, _ := range d.serverIndex { - _, found := d.Servers.GetNoExtend(sID) - //fmt.Printf("Checking: %v found? %v\n", sID, found) - if !found { - toRemove[sID] = true - } - } - - // Update the server list - for sID, _ := range toRemove { - delete(d.serverIndex, sID) - } -} - -func (d *DRL) percentagesAcrossServers() { - for sID, _ := range d.serverIndex { - _, found := d.Servers.GetNoExtend(sID) - if found { - thisServerObject := d.serverIndex[sID] - - // The compensation should be flat out based on servers, - // not on current load, it tends to skew too conservative - thisServerObject.Percentage = 1 / float64(d.Servers.Count()) - d.serverIndex[sID] = thisServerObject - } - } -} - -func (d *DRL) calculateTokenBucketValue() error { - _, found := d.Servers.Get(d.ThisServerID) - if !found { - return errors.New("Apparently this server does not exist!") - } - // Use our own index - thisServerObject := d.serverIndex[d.ThisServerID] - - var thisTokenValue float64 - thisTokenValue = float64(d.RequestTokenValue) - - if thisServerObject.Percentage > 0 { - thisTokenValue = float64(d.RequestTokenValue) / thisServerObject.Percentage - } - - rounded := Round(thisTokenValue, .5, 0) - d.SetCurrentTokenValue(int64(rounded)) - return nil -} - -func (d *DRL) AddOrUpdateServer(s Server) error { - // Add or update the cache - d.mutex.Lock() - defer d.mutex.Unlock() - - if d.uniqueID(s) != d.ThisServerID { - thisServer, found := d.Servers.GetNoExtend(d.ThisServerID) - if found { - if thisServer.TagHash != s.TagHash { - return errors.New("Node notification from different tag group, ignoring.") - } - } else { - // We don't know enough about our own host, so let's skip for now until we do - return errors.New("DRL has no information on current host, waiting...") - } - } - - if d.serverIndex != nil { - d.serverIndex[d.uniqueID(s)] = s - } - d.Servers.Set(d.uniqueID(s), s) - - // Recalculate totals - d.totalLoadAcrossServers() - - // Recalculate percentages - d.percentagesAcrossServers() - - // Get the current token bucket value: - calcErr := d.calculateTokenBucketValue() - if calcErr != nil { - return calcErr - } - - return nil -} - -func (d *DRL) Report() string { - thisServer, found := d.Servers.GetNoExtend(d.ThisServerID) - if found { - return fmt.Sprintf("[Active Nodes]: %d [Token Bucket Value]: %d [Current Load p/s]: %d [Current Load]: %f", d.CurrentTotal, d.CurrentTokenValue, thisServer.LoadPerSec, thisServer.Percentage) - } - - return "Error: server doesn't exist!" -} diff --git a/vendor/github.com/TykTechnologies/drl/item.go b/vendor/github.com/TykTechnologies/drl/item.go deleted file mode 100644 index 2ecf6e4f3bc..00000000000 --- a/vendor/github.com/TykTechnologies/drl/item.go +++ /dev/null @@ -1,32 +0,0 @@ -package drl - -import ( - "sync" - "time" -) - -// Item represents a record in the cache map -type Item struct { - sync.RWMutex - data Server - expires *time.Time -} - -func (item *Item) touch(duration time.Duration) { - item.Lock() - expiration := time.Now().Add(duration) - item.expires = &expiration - item.Unlock() -} - -func (item *Item) expired() bool { - var value bool - item.RLock() - if item.expires == nil { - value = true - } else { - value = item.expires.Before(time.Now()) - } - item.RUnlock() - return value -} diff --git a/vendor/github.com/TykTechnologies/drl/ttlcache.go b/vendor/github.com/TykTechnologies/drl/ttlcache.go deleted file mode 100644 index 10b77f0424b..00000000000 --- a/vendor/github.com/TykTechnologies/drl/ttlcache.go +++ /dev/null @@ -1,101 +0,0 @@ -package drl - -import ( - "sync" - "time" -) - -// Cache is a synchronised map of items that auto-expire once stale -type Cache struct { - mutex sync.RWMutex - ttl time.Duration - items map[string]*Item -} - -// Set is a thread-safe way to add new items to the map -func (cache *Cache) Set(key string, data Server) { - cache.mutex.Lock() - item := &Item{data: data} - item.touch(cache.ttl) - cache.items[key] = item - cache.mutex.Unlock() -} - -// Get is a thread-safe way to lookup items -// Every lookup, also touches the item, hence extending it's life -func (cache *Cache) Get(key string) (data Server, found bool) { - cache.mutex.Lock() - item, exists := cache.items[key] - if !exists || item.expired() { - data = Server{} - found = false - } else { - item.touch(cache.ttl) - data = item.data - found = true - } - cache.mutex.Unlock() - return -} - -// GetNoExtend is a thread-safe way to lookup items -// Every lookup, also touches the item, hence extending it's life -func (cache *Cache) GetNoExtend(key string) (data Server, found bool) { - cache.mutex.Lock() - item, exists := cache.items[key] - if !exists || item.expired() { - data = Server{} - found = false - } else { - data = item.data - found = true - } - cache.mutex.Unlock() - return -} - -// Count returns the number of items in the cache -// (helpful for tracking memory leaks) -func (cache *Cache) Count() int { - cache.mutex.RLock() - count := len(cache.items) - cache.mutex.RUnlock() - return count -} - -func (cache *Cache) cleanup() { - cache.mutex.Lock() - for key, item := range cache.items { - if item.expired() { - delete(cache.items, key) - } - } - cache.mutex.Unlock() -} - -func (cache *Cache) startCleanupTimer() { - duration := cache.ttl - if duration < time.Second { - duration = time.Second - } - ticker := time.Tick(duration) - go (func() { - for { - select { - case <-ticker: - cache.cleanup() - } - } - })() -} - -// NewCache is a helper to create instance of the Cache struct -func NewCache(duration time.Duration) *Cache { - cache := &Cache{ - ttl: duration, - items: map[string]*Item{}, - mutex: sync.RWMutex{}, - } - cache.startCleanupTimer() - return cache -} diff --git a/vendor/github.com/TykTechnologies/drl/util.go b/vendor/github.com/TykTechnologies/drl/util.go deleted file mode 100644 index 7fed7c2a507..00000000000 --- a/vendor/github.com/TykTechnologies/drl/util.go +++ /dev/null @@ -1,19 +0,0 @@ -package drl - -import ( - "math" -) - -func Round(val float64, roundOn float64, places int) (newVal float64) { - var round float64 - pow := math.Pow(10, float64(places)) - digit := pow * val - _, div := math.Modf(digit) - if div >= roundOn { - round = math.Ceil(digit) - } else { - round = math.Floor(digit) - } - newVal = round / pow - return -} diff --git a/vendor/github.com/TykTechnologies/goautosocket/.gitignore b/vendor/github.com/TykTechnologies/goautosocket/.gitignore deleted file mode 100644 index 6d8f95faf4d..00000000000 --- a/vendor/github.com/TykTechnologies/goautosocket/.gitignore +++ /dev/null @@ -1,33 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -*.gor - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test - - -*.sublime-workspace -*.sw* -*.un* - -app.conf.json -docker.conf.json diff --git a/vendor/github.com/TykTechnologies/goautosocket/.travis.yml b/vendor/github.com/TykTechnologies/goautosocket/.travis.yml deleted file mode 100644 index 476900669cf..00000000000 --- a/vendor/github.com/TykTechnologies/goautosocket/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: go -go: - - 1.4 - - 1.4.1 - - 1.4.2 -script: - - go test -v -race diff --git a/vendor/github.com/TykTechnologies/goautosocket/LICENSE b/vendor/github.com/TykTechnologies/goautosocket/LICENSE deleted file mode 100644 index 4b6adf12453..00000000000 --- a/vendor/github.com/TykTechnologies/goautosocket/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Clement 'cmc' Rey - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/TykTechnologies/goautosocket/README.md b/vendor/github.com/TykTechnologies/goautosocket/README.md deleted file mode 100644 index ecb321231c1..00000000000 --- a/vendor/github.com/TykTechnologies/goautosocket/README.md +++ /dev/null @@ -1,167 +0,0 @@ -# GoAutoSocket (GAS) ![Status](https://img.shields.io/badge/status-stable-green.svg?style=plastic) [![Build Status](http://img.shields.io/travis/teh-cmc/goautosocket.svg?style=plastic)](https://travis-ci.org/teh-cmc/goautosocket) [![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=plastic)](http://godoc.org/github.com/teh-cmc/goautosocket) - -The GAS library provides auto-reconnecting TCP sockets in a tiny, fully tested, thread-safe API. - -The `TCPClient` struct embeds a `net.TCPConn` and overrides its `Read()` and `Write()` methods, making it entirely compatible with the `net.Conn` interface and the rest of the `net` package. -This means you should be able to use this library by just replacing `net.Dial` with `gas.Dial` in your code. - -## Install - -```bash -get -u github.com/teh-cmc/goautosocket -``` - -## Usage - -To test the library, you can run a local TCP server with: - - $ ncat -l 9999 -k - -and run this code: - -```go -package main - -import ( - "log" - "time" - - "github.com/teh-cmc/goautosocket" -) - -func main() { - // connect to a TCP server - conn, err := gas.Dial("tcp", "localhost:9999") - if err != nil { - log.Fatal(err) - } - - // client sends "hello, world!" to the server every second - for { - _, err := conn.Write([]byte("hello, world!\n")) - if err != nil { - // if the client reached its retry limit, give up - if err == gas.ErrMaxRetries { - log.Println("client gave up, reached retry limit") - return - } - // not a GAS error, just panic - log.Fatal(err) - } - log.Println("client says hello!") - time.Sleep(time.Second) - } -} -``` - -Then try to kill and reboot your server, the client will automatically reconnect and start sending messages again; unless it has reached its retry limit. - -## Examples - -An advanced example of a client writing to a buggy server that's randomly crashing and rebooting: - -```go -package main - -import ( - "log" - "math/rand" - "net" - "sync" - "time" - - "github.com/teh-cmc/goautosocket" -) - -func main() { - rand.Seed(time.Now().UnixNano()) - - // open a server socket - s, err := net.Listen("tcp", "localhost:0") - if err != nil { - log.Fatal(err) - } - // save the original port - addr := s.Addr() - - // connect a client to the server - c, err := gas.Dial("tcp", s.Addr().String()) - if err != nil { - log.Fatal(err) - } - defer c.Close() - - // shut down and boot up the server randomly - var swg sync.WaitGroup - swg.Add(1) - go func() { - defer swg.Done() - for i := 0; i < 5; i++ { - log.Println("server up") - time.Sleep(time.Millisecond * 100 * time.Duration(rand.Intn(20))) - if err := s.Close(); err != nil { - log.Fatal(err) - } - log.Println("server down") - time.Sleep(time.Millisecond * 100 * time.Duration(rand.Intn(20))) - s, err = net.Listen("tcp", addr.String()) - if err != nil { - log.Fatal(err) - } - } - }() - - // client writes to the server and reconnects when it has to - // this is the interesting part - var cwg sync.WaitGroup - cwg.Add(1) - go func() { - defer cwg.Done() - for { - if _, err := c.Write([]byte("hello, world!\n")); err != nil { - switch e := err.(type) { - case gas.Error: - if e == gas.ErrMaxRetries { - log.Println("client leaving, reached retry limit") - return - } - default: - log.Fatal(err) - } - } - log.Println("client says hello!") - } - }() - - // terminates the server indefinitely - swg.Wait() - if err := s.Close(); err != nil { - log.Fatal(err) - } - - // wait for the client to give up - cwg.Wait() -} -``` - -You can also find an example with concurrency [here](https://github.com/teh-cmc/goautosocket/blob/master/tcp_client_test.go#L97). - -## Disclaimer - -This was built with my needs in mind, no more, no less. That is, I needed a simple, tested and thread-safe API to handle a situation in which I have: -- on one end, a lot of goroutines concurrently writing to a TCP socket -- on the other end, a TCP server that I have no control over (hence the main reason why UDP is out of the question) and which might be rebooted at anytime -I also needed the ability to give up on sending a message after an abritrary amount of tries/time (i.e., ERR_MAX_TRIES). Pretty straightforward stuff. - -Basically, my use case is [this situation](https://github.com/teh-cmc/goautosocket/blob/master/tcp_client_test.go#L97). - -Surprisingly, I couldn't find such a library (I guess I either didn't look in the right place, or just not hard enough..? oh well); so here it is. -Do not hesitate to send a pull request if this doesn't cover all your needs (and it probably won't), they are more than welcome. - -If you're looking for some more insight, you might also want to look at [this discussion](http://redd.it/3aue82) we had on reddit. - -## License ![License](https://img.shields.io/badge/license-MIT-blue.svg?style=plastic) - -The MIT License (MIT) - see LICENSE for more details - -Copyright (c) 2015 Clement 'cmc' Rey diff --git a/vendor/github.com/TykTechnologies/goautosocket/doc.go b/vendor/github.com/TykTechnologies/goautosocket/doc.go deleted file mode 100644 index 2775775b586..00000000000 --- a/vendor/github.com/TykTechnologies/goautosocket/doc.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright © 2015 Clement 'cmc' Rey . -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file. - -/* -The GAS library provides auto-reconnecting TCP sockets in a -tiny, fully tested, thread-safe API. - -The `TCPClient` struct embeds a `net.TCPConn` and overrides -its `Read()` and `Write()` methods, making it entirely compatible -with the `net.Conn` interface and the rest of the `net` package. -This means you should be able to use this library by just -replacing `net.Dial` with `gas.Dial` in your code. - -To test the library, you can run a local TCP server with: - - $ ncat -l 9999 -k - -and run this code: - - package main - - import ( - "log" - "time" - - "github.com/teh-cmc/goautosocket" - ) - - func main() { - // connect to a TCP server - conn, err := gas.Dial("tcp", "localhost:9999") - if err != nil { - log.Fatal(err) - } - - // client sends "hello, world!" to the server every second - for { - _, err := conn.Write([]byte("hello, world!\n")) - if err != nil { - // if the client reached its retry limit, give up - if err == gas.ErrMaxRetries { - log.Println("client gave up, reached retry limit") - return - } - // not a GAS error, just panic - log.Fatal(err) - } - log.Println("client says hello!") - time.Sleep(time.Second) - } - } - -Then try to kill and reboot your server, the client will automatically reconnect and start sending messages again; unless it has reached its retry limit. -*/ -package gas diff --git a/vendor/github.com/TykTechnologies/goautosocket/error.go b/vendor/github.com/TykTechnologies/goautosocket/error.go deleted file mode 100644 index 777a1e0bccc..00000000000 --- a/vendor/github.com/TykTechnologies/goautosocket/error.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright © 2015 Clement 'cmc' Rey . -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file. - -package gas - -// ---------------------------------------------------------------------------- - -// Error is the error type of the GAS package. -// -// It implements the error interface. -type Error int - -const ( - // ErrMaxRetries is returned when the called function failed after the - // maximum number of allowed tries. - ErrMaxRetries Error = 0x01 -) - -// ---------------------------------------------------------------------------- - -// Error returns the error as a string. -func (e Error) Error() string { - switch e { - case 0x01: - return "ErrMaxRetries" - default: - return "unknown error" - } -} diff --git a/vendor/github.com/TykTechnologies/goautosocket/tcp_client.go b/vendor/github.com/TykTechnologies/goautosocket/tcp_client.go deleted file mode 100644 index d4ad43b07a6..00000000000 --- a/vendor/github.com/TykTechnologies/goautosocket/tcp_client.go +++ /dev/null @@ -1,325 +0,0 @@ -// Copyright © 2015 Clement 'cmc' Rey . -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file. - -package gas - -import ( - "io" - "os" - "net" - "sync" - "syscall" - "time" -) - -// ---------------------------------------------------------------------------- - -// TCPClient provides a TCP connection with auto-reconnect capabilities. -// -// It embeds a *net.TCPConn and thus implements the net.Conn interface. -// -// Use the SetMaxRetries() and SetRetryInterval() methods to configure retry -// values; otherwise they default to maxRetries=5 and retryInterval=100ms. -// -// TCPClient can be safely used from multiple goroutines. -type TCPClient struct { - *net.TCPConn - - lock sync.RWMutex - - maxRetries int - retryInterval time.Duration -} - -// Dial returns a new net.Conn. -// -// The new client connects to the remote address `raddr` on the network `network`, -// which must be "tcp", "tcp4", or "tcp6". -// -// This complements net package's Dial function. -func Dial(network, addr string) (net.Conn, error) { - raddr, err := net.ResolveTCPAddr(network, addr) - if err != nil { - return nil, err - } - - return DialTCP(network, nil, raddr) -} - -// DialTCP returns a new *TCPClient. -// -// The new client connects to the remote address `raddr` on the network `network`, -// which must be "tcp", "tcp4", or "tcp6". -// If `laddr` is not nil, it is used as the local address for the connection. -// -// This overrides net.TCPConn's DialTCP function. -func DialTCP(network string, laddr, raddr *net.TCPAddr) (*TCPClient, error) { - conn, err := net.DialTCP(network, laddr, raddr) - if err != nil { - return nil, err - } - - return &TCPClient{ - TCPConn: conn, - - lock: sync.RWMutex{}, - - maxRetries: 10, - retryInterval: 10 * time.Millisecond, - }, nil -} - -// ---------------------------------------------------------------------------- - -// SetMaxRetries sets the retry limit for the TCPClient. -// -// Assuming i is the current retry iteration, the total sleep time is -// t = retryInterval * (2^i) -// -// This function completely Lock()s the TCPClient. -func (c *TCPClient) SetMaxRetries(maxRetries int) { - c.lock.Lock() - defer c.lock.Unlock() - - c.maxRetries = maxRetries -} - -// GetMaxRetries gets the retry limit for the TCPClient. -// -// Assuming i is the current retry iteration, the total sleep time is -// t = retryInterval * (2^i) -func (c *TCPClient) GetMaxRetries() int { - c.lock.RLock() - defer c.lock.RUnlock() - - return c.maxRetries -} - -// SetRetryInterval sets the retry interval for the TCPClient. -// -// Assuming i is the current retry iteration, the total sleep time is -// t = retryInterval * (2^i) -// -// This function completely Lock()s the TCPClient. -func (c *TCPClient) SetRetryInterval(retryInterval time.Duration) { - c.lock.Lock() - defer c.lock.Unlock() - - c.retryInterval = retryInterval -} - -// GetRetryInterval gets the retry interval for the TCPClient. -// -// Assuming i is the current retry iteration, the total sleep time is -// t = retryInterval * (2^i) -func (c *TCPClient) GetRetryInterval() time.Duration { - c.lock.RLock() - defer c.lock.RUnlock() - - return c.retryInterval -} - -// ---------------------------------------------------------------------------- - -// reconnect builds a new TCP connection to replace the embedded *net.TCPConn. -// -// This function completely Lock()s the TCPClient. -// -// TODO: keep old socket configuration (timeout, linger...). -func (c *TCPClient) reconnect() error { - c.lock.Lock() - defer c.lock.Unlock() - - raddr := c.TCPConn.RemoteAddr() - conn, err := net.DialTCP(raddr.Network(), nil, raddr.(*net.TCPAddr)) - if err != nil { - return err - } - - c.TCPConn.Close() - c.TCPConn = conn - return nil -} - -// ---------------------------------------------------------------------------- - -// Read wraps net.TCPConn's Read method with reconnect capabilities. -// -// It will return ErrMaxRetries if the retry limit is reached. -func (c *TCPClient) Read(b []byte) (int, error) { - c.lock.RLock() - defer c.lock.RUnlock() - - disconnected := false - - t := c.retryInterval - for i := 0; i < c.maxRetries; i++ { - if disconnected { - time.Sleep(t) - t *= 2 - c.lock.RUnlock() - if err := c.reconnect(); err != nil { - switch e := err.(type) { - case *net.OpError: - if errno(e.Err) == syscall.ECONNREFUSED { - disconnected = true - c.lock.RLock() - continue - } - return -1, err - default: - return -1, err - } - } else { - disconnected = false - } - c.lock.RLock() - } - n, err := c.TCPConn.Read(b) - if err == nil { - return n, err - } - switch e := err.(type) { - case *net.OpError: - if errno(e.Err) == syscall.ECONNRESET || - errno(e.Err) == syscall.EPIPE { - disconnected = true - } else { - return n, err - } - default: - if err.Error() == "EOF" { - disconnected = true - } else { - return n, err - } - } - t *= 2 - } - - return -1, ErrMaxRetries -} - -// ReadFrom wraps net.TCPConn's ReadFrom method with reconnect capabilities. -// -// It will return ErrMaxRetries if the retry limit is reached. -func (c *TCPClient) ReadFrom(r io.Reader) (int64, error) { - c.lock.RLock() - defer c.lock.RUnlock() - - disconnected := false - - t := c.retryInterval - for i := 0; i < c.maxRetries; i++ { - if disconnected { - time.Sleep(t) - t *= 2 - c.lock.RUnlock() - if err := c.reconnect(); err != nil { - switch e := err.(type) { - case *net.OpError: - if errno(e.Err) == syscall.ECONNREFUSED { - disconnected = true - c.lock.RLock() - continue - } - return -1, err - default: - return -1, err - } - } else { - disconnected = false - } - c.lock.RLock() - } - n, err := c.TCPConn.ReadFrom(r) - if err == nil { - return n, err - } - switch e := err.(type) { - case *net.OpError: - if errno(e.Err) == syscall.ECONNRESET || - errno(e.Err) == syscall.EPIPE { - disconnected = true - } else { - return n, err - } - default: - if err.Error() == "EOF" { - disconnected = true - } else { - return n, err - } - } - t *= 2 - } - - return -1, ErrMaxRetries -} - -// Write wraps net.TCPConn's Write method with reconnect capabilities. -// -// It will return ErrMaxRetries if the retry limit is reached. -func (c *TCPClient) Write(b []byte) (int, error) { - c.lock.RLock() - defer c.lock.RUnlock() - - disconnected := false - - t := c.retryInterval - for i := 0; i < c.maxRetries; i++ { - if disconnected { - time.Sleep(t) - t *= 2 - c.lock.RUnlock() - if err := c.reconnect(); err != nil { - switch e := err.(type) { - case *net.OpError: - if errno(e.Err) == syscall.ECONNREFUSED { - disconnected = true - c.lock.RLock() - continue - } - return -1, err - default: - return -1, err - } - } else { - disconnected = false - } - c.lock.RLock() - } - n, err := c.TCPConn.Write(b) - if err == nil { - return n, err - } - switch e := err.(type) { - case *net.OpError: - if errno(e.Err) == syscall.ECONNRESET || - errno(e.Err) == syscall.EPIPE { - disconnected = true - } else { - return n, err - } - default: - return n, err - } - } - - return -1, ErrMaxRetries -} - -func errno(err error) syscall.Errno { - switch v := err.(type) { - case syscall.Errno: - return v - case *os.SyscallError: - if errno, ok := v.Err.(syscall.Errno); ok { - return errno - } - } - - return syscall.Errno(0x0) -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/.gitignore b/vendor/github.com/TykTechnologies/gojsonschema/.gitignore deleted file mode 100644 index c1e0636fd4d..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.sw[nop] diff --git a/vendor/github.com/TykTechnologies/gojsonschema/.travis.yml b/vendor/github.com/TykTechnologies/gojsonschema/.travis.yml deleted file mode 100644 index 9cc01e8abd2..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: go -go: - - 1.3 -before_install: - - go get github.com/sigu-399/gojsonreference - - go get github.com/sigu-399/gojsonpointer - - go get github.com/stretchr/testify/assert diff --git a/vendor/github.com/TykTechnologies/gojsonschema/LICENSE-APACHE-2.0.txt b/vendor/github.com/TykTechnologies/gojsonschema/LICENSE-APACHE-2.0.txt deleted file mode 100644 index 55ede8a42cc..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/LICENSE-APACHE-2.0.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2015 xeipuuv - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/TykTechnologies/gojsonschema/README.md b/vendor/github.com/TykTechnologies/gojsonschema/README.md deleted file mode 100644 index 127bdd16804..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/README.md +++ /dev/null @@ -1,236 +0,0 @@ -[![Build Status](https://travis-ci.org/xeipuuv/gojsonschema.svg)](https://travis-ci.org/xeipuuv/gojsonschema) - -# gojsonschema - -## Description - -An implementation of JSON Schema, based on IETF's draft v4 - Go language - -References : - -* http://json-schema.org -* http://json-schema.org/latest/json-schema-core.html -* http://json-schema.org/latest/json-schema-validation.html - -## Installation - -``` -go get github.com/xeipuuv/gojsonschema -``` - -Dependencies : -* [github.com/xeipuuv/gojsonpointer](https://github.com/xeipuuv/gojsonpointer) -* [github.com/xeipuuv/gojsonreference](https://github.com/xeipuuv/gojsonreference) -* [github.com/stretchr/testify/assert](https://github.com/stretchr/testify#assert-package) - -## Usage - -### Example - -```go - -package main - -import ( - "fmt" - "github.com/xeipuuv/gojsonschema" -) - -func main() { - - schemaLoader := gojsonschema.NewReferenceLoader("file:///home/me/schema.json") - documentLoader := gojsonschema.NewReferenceLoader("file:///home/me/document.json") - - result, err := gojsonschema.Validate(schemaLoader, documentLoader) - if err != nil { - panic(err.Error()) - } - - if result.Valid() { - fmt.Printf("The document is valid\n") - } else { - fmt.Printf("The document is not valid. see errors :\n") - for _, desc := range result.Errors() { - fmt.Printf("- %s\n", desc) - } - } - -} - - -``` - -#### Loaders - -There are various ways to load your JSON data. -In order to load your schemas and documents, -first declare an appropriate loader : - -* Web / HTTP, using a reference : - -```go -loader := gojsonschema.NewReferenceLoader("http://www.some_host.com/schema.json") -``` - -* Local file, using a reference : - -```go -loader := gojsonschema.NewReferenceLoader("file:///home/me/schema.json") -``` - -References use the URI scheme, the prefix (file://) and a full path to the file are required. - -* JSON strings : - -```go -loader := gojsonschema.NewStringLoader(`{"type": "string"}`) -``` - -* Custom Go types : - -```go -m := map[string]interface{}{"type": "string"} -loader := gojsonschema.NewGoLoader(m) -``` - -And - -```go -type Root struct { - Users []User `json:"users"` -} - -type User struct { - Name string `json:"name"` -} - -... - -data := Root{} -data.Users = append(data.Users, User{"John"}) -data.Users = append(data.Users, User{"Sophia"}) -data.Users = append(data.Users, User{"Bill"}) - -loader := gojsonschema.NewGoLoader(data) -``` - -#### Validation - -Once the loaders are set, validation is easy : - -```go -result, err := gojsonschema.Validate(schemaLoader, documentLoader) -``` - -Alternatively, you might want to load a schema only once and process to multiple validations : - -```go -schema, err := gojsonschema.NewSchema(schemaLoader) -... -result1, err := schema.Validate(documentLoader1) -... -result2, err := schema.Validate(documentLoader2) -... -// etc ... -``` - -To check the result : - -```go - if result.Valid() { - fmt.Printf("The document is valid\n") - } else { - fmt.Printf("The document is not valid. see errors :\n") - for _, err := range result.Errors() { - // Err implements the ResultError interface - fmt.Printf("- %s\n", err) - } - } -``` - -## Working with Errors - -The library handles string error codes which you can customize by creating your own gojsonschema.locale and setting it -```go -gojsonschema.Locale = YourCustomLocale{} -``` - -However, each error contains additional contextual information. - -**err.Type()**: *string* Returns the "type" of error that occurred. Note you can also type check. See below - -Note: An error of RequiredType has an err.Type() return value of "required" - - "required": RequiredError - "invalid_type": InvalidTypeError - "number_any_of": NumberAnyOfError - "number_one_of": NumberOneOfError - "number_all_of": NumberAllOfError - "number_not": NumberNotError - "missing_dependency": MissingDependencyError - "internal": InternalError - "enum": EnumError - "array_no_additional_items": ArrayNoAdditionalItemsError - "array_min_items": ArrayMinItemsError - "array_max_items": ArrayMaxItemsError - "unique": ItemsMustBeUniqueError - "array_min_properties": ArrayMinPropertiesError - "array_max_properties": ArrayMaxPropertiesError - "additional_property_not_allowed": AdditionalPropertyNotAllowedError - "invalid_property_pattern": InvalidPropertyPatternError - "string_gte": StringLengthGTEError - "string_lte": StringLengthLTEError - "pattern": DoesNotMatchPatternError - "multiple_of": MultipleOfError - "number_gte": NumberGTEError - "number_gt": NumberGTError - "number_lte": NumberLTEError - "number_lt": NumberLTError - -**err.Value()**: *interface{}* Returns the value given - -**err.Context()**: *gojsonschema.jsonContext* Returns the context. This has a String() method that will print something like this: (root).firstName - -**err.Field()**: *string* Returns the fieldname in the format firstName, or for embedded properties, person.firstName. This returns the same as the String() method on *err.Context()* but removes the (root). prefix. - -**err.Description()**: *string* The error description. This is based on the locale you are using. See the beginning of this section for overwriting the locale with a custom implementation. - -**err.Details()**: *gojsonschema.ErrorDetails* Returns a map[string]interface{} of additional error details specific to the error. For example, GTE errors will have a "min" value, LTE will have a "max" value. See errors.go for a full description of all the error details. Every error always contains a "field" key that holds the value of *err.Field()* - -Note in most cases, the err.Details() will be used to generate replacement strings in your locales, and not used directly. These strings follow the text/template format i.e. -``` -{{.field}} must be greater than or equal to {{.min}} -``` - -## Formats -JSON Schema allows for optional "format" property to validate strings against well-known formats. gojsonschema ships with all of the formats defined in the spec that you can use like this: -````json -{"type": "string", "format": "email"} -```` -Available formats: date-time, hostname, email, ipv4, ipv6, uri. - -For repetitive or more complex formats, you can create custom format checkers and add them to gojsonschema like this: - -```go -// Define the format checker -type RoleFormatChecker struct {} - -// Ensure it meets the gojsonschema.FormatChecker interface -func (f RoleFormatChecker) IsFormat(input string) bool { - return strings.HasPrefix("ROLE_", input) -} - -// Add it to the library -gojsonschema.FormatCheckers.Add("role", RoleFormatChecker{}) -```` - -Now to use in your json schema: -````json -{"type": "string", "format": "role"} -```` - -## Uses - -gojsonschema uses the following test suite : - -https://github.com/json-schema/JSON-Schema-Test-Suite diff --git a/vendor/github.com/TykTechnologies/gojsonschema/errors.go b/vendor/github.com/TykTechnologies/gojsonschema/errors.go deleted file mode 100644 index 1090decb9c4..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/errors.go +++ /dev/null @@ -1,306 +0,0 @@ -package gojsonschema - -import ( - "bytes" - "sync" - "text/template" -) - -var errorTemplates errorTemplate = errorTemplate{template.New("errors-new"), sync.RWMutex{}} - -// template.Template is not thread-safe for writing, so some locking is done -// sync.RWMutex is used for efficiently locking when new templates are created -type errorTemplate struct { - *template.Template - sync.RWMutex -} - -type ( - // RequiredError. ErrorDetails: property string - RequiredError struct { - ResultErrorFields - } - - // InvalidTypeError. ErrorDetails: expected, given - InvalidTypeError struct { - ResultErrorFields - } - - // NumberAnyOfError. ErrorDetails: - - NumberAnyOfError struct { - ResultErrorFields - } - - // NumberOneOfError. ErrorDetails: - - NumberOneOfError struct { - ResultErrorFields - } - - // NumberAllOfError. ErrorDetails: - - NumberAllOfError struct { - ResultErrorFields - } - - // NumberNotError. ErrorDetails: - - NumberNotError struct { - ResultErrorFields - } - - // MissingDependencyError. ErrorDetails: dependency - MissingDependencyError struct { - ResultErrorFields - } - - // InternalError. ErrorDetails: error - InternalError struct { - ResultErrorFields - } - - // EnumError. ErrorDetails: allowed - EnumError struct { - ResultErrorFields - } - - // ArrayNoAdditionalItemsError. ErrorDetails: - - ArrayNoAdditionalItemsError struct { - ResultErrorFields - } - - // ArrayMinItemsError. ErrorDetails: min - ArrayMinItemsError struct { - ResultErrorFields - } - - // ArrayMaxItemsError. ErrorDetails: max - ArrayMaxItemsError struct { - ResultErrorFields - } - - // ItemsMustBeUniqueError. ErrorDetails: type - ItemsMustBeUniqueError struct { - ResultErrorFields - } - - // ArrayMinPropertiesError. ErrorDetails: min - ArrayMinPropertiesError struct { - ResultErrorFields - } - - // ArrayMaxPropertiesError. ErrorDetails: max - ArrayMaxPropertiesError struct { - ResultErrorFields - } - - // AdditionalPropertyNotAllowedError. ErrorDetails: property - AdditionalPropertyNotAllowedError struct { - ResultErrorFields - } - - // InvalidPropertyPatternError. ErrorDetails: property, pattern - InvalidPropertyPatternError struct { - ResultErrorFields - } - - // StringLengthGTEError. ErrorDetails: min - StringLengthGTEError struct { - ResultErrorFields - } - - // StringLengthLTEError. ErrorDetails: max - StringLengthLTEError struct { - ResultErrorFields - } - - // StringNumericGTEError. ErrorDetails: min_numeric - StringNumericGTEError struct { - ResultErrorFields - } - - // StringSpecialGTEError. ErrorDetails: min_special - StringSpecialGTEError struct { - ResultErrorFields - } - - // StringMultiCaseError. ErrorDetails: multi_case - StringMultiCaseError struct { - ResultErrorFields - } - - // StringSequentialError. ErrorDetails: disable_sequential - StringSequentialError struct { - ResultErrorFields - } - - // DoesNotMatchPatternError. ErrorDetails: pattern - DoesNotMatchPatternError struct { - ResultErrorFields - } - - // DoesNotMatchFormatError. ErrorDetails: format - DoesNotMatchFormatError struct { - ResultErrorFields - } - - // MultipleOfError. ErrorDetails: multiple - MultipleOfError struct { - ResultErrorFields - } - - // NumberGTEError. ErrorDetails: min - NumberGTEError struct { - ResultErrorFields - } - - // NumberGTError. ErrorDetails: min - NumberGTError struct { - ResultErrorFields - } - - // NumberLTEError. ErrorDetails: max - NumberLTEError struct { - ResultErrorFields - } - - // NumberLTError. ErrorDetails: max - NumberLTError struct { - ResultErrorFields - } -) - -// newError takes a ResultError type and sets the type, context, description, details, value, and field -func newError(err ResultError, context *jsonContext, value interface{}, locale locale, details ErrorDetails) { - var t string - var d string - switch err.(type) { - case *RequiredError: - t = "required" - d = locale.Required() - case *InvalidTypeError: - t = "invalid_type" - d = locale.InvalidType() - case *NumberAnyOfError: - t = "number_any_of" - d = locale.NumberAnyOf() - case *NumberOneOfError: - t = "number_one_of" - d = locale.NumberOneOf() - case *NumberAllOfError: - t = "number_all_of" - d = locale.NumberAllOf() - case *NumberNotError: - t = "number_not" - d = locale.NumberNot() - case *MissingDependencyError: - t = "missing_dependency" - d = locale.MissingDependency() - case *InternalError: - t = "internal" - d = locale.Internal() - case *EnumError: - t = "enum" - d = locale.Enum() - case *ArrayNoAdditionalItemsError: - t = "array_no_additional_items" - d = locale.ArrayNoAdditionalItems() - case *ArrayMinItemsError: - t = "array_min_items" - d = locale.ArrayMinItems() - case *ArrayMaxItemsError: - t = "array_max_items" - d = locale.ArrayMaxItems() - case *ItemsMustBeUniqueError: - t = "unique" - d = locale.Unique() - case *ArrayMinPropertiesError: - t = "array_min_properties" - d = locale.ArrayMinProperties() - case *ArrayMaxPropertiesError: - t = "array_max_properties" - d = locale.ArrayMaxProperties() - case *AdditionalPropertyNotAllowedError: - t = "additional_property_not_allowed" - d = locale.AdditionalPropertyNotAllowed() - case *InvalidPropertyPatternError: - t = "invalid_property_pattern" - d = locale.InvalidPropertyPattern() - case *StringLengthGTEError: - t = "string_gte" - d = locale.StringGTE() - case *StringLengthLTEError: - t = "string_lte" - d = locale.StringLTE() - case *StringNumericGTEError: - t = "numeric_gte" - d = locale.NumericGTE() - case *StringSpecialGTEError: - t = "special_gte" - d = locale.SpecialGTE() - case *StringMultiCaseError: - t = "multi_case" - d = locale.MultiCase() - case *StringSequentialError: - t = "disable_sequential" - d = locale.Sequential() - case *DoesNotMatchPatternError: - t = "pattern" - d = locale.DoesNotMatchPattern() - case *DoesNotMatchFormatError: - t = "format" - d = locale.DoesNotMatchFormat() - case *MultipleOfError: - t = "multiple_of" - d = locale.MultipleOf() - case *NumberGTEError: - t = "number_gte" - d = locale.NumberGTE() - case *NumberGTError: - t = "number_gt" - d = locale.NumberGT() - case *NumberLTEError: - t = "number_lte" - d = locale.NumberLTE() - case *NumberLTError: - t = "number_lt" - d = locale.NumberLT() - } - - err.SetType(t) - err.SetContext(context) - err.SetValue(value) - err.SetDetails(details) - details["field"] = err.Field() - err.SetDescription(formatErrorDescription(d, details)) -} - -// formatErrorDescription takes a string in the default text/template -// format and converts it to a string with replacements. The fields come -// from the ErrorDetails struct and vary for each type of error. -func formatErrorDescription(s string, details ErrorDetails) string { - - var tpl *template.Template - var descrAsBuffer bytes.Buffer - var err error - - errorTemplates.RLock() - tpl = errorTemplates.Lookup(s) - errorTemplates.RUnlock() - - if tpl == nil { - errorTemplates.Lock() - tpl = errorTemplates.New(s) - - tpl, err = tpl.Parse(s) - errorTemplates.Unlock() - - if err != nil { - return err.Error() - } - } - - err = tpl.Execute(&descrAsBuffer, details) - if err != nil { - return err.Error() - } - - return descrAsBuffer.String() -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/format_checkers.go b/vendor/github.com/TykTechnologies/gojsonschema/format_checkers.go deleted file mode 100644 index c7214b0455b..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/format_checkers.go +++ /dev/null @@ -1,194 +0,0 @@ -package gojsonschema - -import ( - "net" - "net/url" - "reflect" - "regexp" - "strings" - "time" -) - -type ( - // FormatChecker is the interface all formatters added to FormatCheckerChain must implement - FormatChecker interface { - IsFormat(input string) bool - } - - // FormatCheckerChain holds the formatters - FormatCheckerChain struct { - formatters map[string]FormatChecker - } - - // EmailFormatter verifies email address formats - EmailFormatChecker struct{} - - // IPV4FormatChecker verifies IP addresses in the ipv4 format - IPV4FormatChecker struct{} - - // IPV6FormatChecker verifies IP addresses in the ipv6 format - IPV6FormatChecker struct{} - - // DateTimeFormatChecker verifies date/time formats per RFC3339 5.6 - // - // Valid formats: - // Partial Time: HH:MM:SS - // Full Date: YYYY-MM-DD - // Full Time: HH:MM:SSZ-07:00 - // Date Time: YYYY-MM-DDTHH:MM:SSZ-0700 - // - // Where - // YYYY = 4DIGIT year - // MM = 2DIGIT month ; 01-12 - // DD = 2DIGIT day-month ; 01-28, 01-29, 01-30, 01-31 based on month/year - // HH = 2DIGIT hour ; 00-23 - // MM = 2DIGIT ; 00-59 - // SS = 2DIGIT ; 00-58, 00-60 based on leap second rules - // T = Literal - // Z = Literal - // - // Note: Nanoseconds are also suported in all formats - // - // http://tools.ietf.org/html/rfc3339#section-5.6 - DateTimeFormatChecker struct{} - - // URIFormatCheckers validates a URI with a valid Scheme per RFC3986 - URIFormatChecker struct{} - - // HostnameFormatChecker validates a hostname is in the correct format - HostnameFormatChecker struct{} - - // UUIDFormatChecker validates a UUID is in the correct format - UUIDFormatChecker struct{} - - // RegexFormatChecker validates a regex is in the correct format - RegexFormatChecker struct{} -) - -var ( - // Formatters holds the valid formatters, and is a public variable - // so library users can add custom formatters - FormatCheckers = FormatCheckerChain{ - formatters: map[string]FormatChecker{ - "date-time": DateTimeFormatChecker{}, - "hostname": HostnameFormatChecker{}, - "email": EmailFormatChecker{}, - "ipv4": IPV4FormatChecker{}, - "ipv6": IPV6FormatChecker{}, - "uri": URIFormatChecker{}, - "uuid": UUIDFormatChecker{}, - "regex": RegexFormatChecker{}, - }, - } - - // Regex credit: https://github.com/asaskevich/govalidator - rxEmail = regexp.MustCompile("^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$") - - // Regex credit: https://www.socketloop.com/tutorials/golang-validate-hostname - rxHostname = regexp.MustCompile(`^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$`) - - rxUUID = regexp.MustCompile("^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$") -) - -// Add adds a FormatChecker to the FormatCheckerChain -// The name used will be the value used for the format key in your json schema -func (c *FormatCheckerChain) Add(name string, f FormatChecker) *FormatCheckerChain { - c.formatters[name] = f - - return c -} - -// Remove deletes a FormatChecker from the FormatCheckerChain (if it exists) -func (c *FormatCheckerChain) Remove(name string) *FormatCheckerChain { - delete(c.formatters, name) - - return c -} - -// Has checks to see if the FormatCheckerChain holds a FormatChecker with the given name -func (c *FormatCheckerChain) Has(name string) bool { - _, ok := c.formatters[name] - - return ok -} - -// IsFormat will check an input against a FormatChecker with the given name -// to see if it is the correct format -func (c *FormatCheckerChain) IsFormat(name string, input interface{}) bool { - f, ok := c.formatters[name] - - if !ok { - return false - } - - if !isKind(input, reflect.String) { - return false - } - - inputString := input.(string) - - return f.IsFormat(inputString) -} - -func (f EmailFormatChecker) IsFormat(input string) bool { - return rxEmail.MatchString(input) -} - -// Credit: https://github.com/asaskevich/govalidator -func (f IPV4FormatChecker) IsFormat(input string) bool { - ip := net.ParseIP(input) - return ip != nil && strings.Contains(input, ".") -} - -// Credit: https://github.com/asaskevich/govalidator -func (f IPV6FormatChecker) IsFormat(input string) bool { - ip := net.ParseIP(input) - return ip != nil && strings.Contains(input, ":") -} - -func (f DateTimeFormatChecker) IsFormat(input string) bool { - formats := []string{ - "15:04:05", - "15:04:05Z07:00", - "2006-01-02", - time.RFC3339, - time.RFC3339Nano, - } - - for _, format := range formats { - if _, err := time.Parse(format, input); err == nil { - return true - } - } - - return false -} - -func (f URIFormatChecker) IsFormat(input string) bool { - u, err := url.Parse(input) - if err != nil || u.Scheme == "" { - return false - } - - return true -} - -func (f HostnameFormatChecker) IsFormat(input string) bool { - return rxHostname.MatchString(input) && len(input) < 256 -} - -func (f UUIDFormatChecker) IsFormat(input string) bool { - return rxUUID.MatchString(input) -} - -// IsFormat implements FormatChecker interface. -func (f RegexFormatChecker) IsFormat(input string) bool { - if input == "" { - return true - } - _, err := regexp.Compile(input) - if err != nil { - return false - } - return true -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/glide.yaml b/vendor/github.com/TykTechnologies/gojsonschema/glide.yaml deleted file mode 100644 index 7aef8c0951d..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/glide.yaml +++ /dev/null @@ -1,12 +0,0 @@ -package: github.com/xeipuuv/gojsonschema -license: Apache 2.0 -import: -- package: github.com/xeipuuv/gojsonschema - -- package: github.com/xeipuuv/gojsonpointer - -- package: github.com/xeipuuv/gojsonreference - -- package: github.com/stretchr/testify/assert - version: ^1.1.3 - diff --git a/vendor/github.com/TykTechnologies/gojsonschema/internalLog.go b/vendor/github.com/TykTechnologies/gojsonschema/internalLog.go deleted file mode 100644 index 4ef7a8d03e7..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/internalLog.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Very simple log wrapper. -// Used for debugging/testing purposes. -// -// created 01-01-2015 - -package gojsonschema - -import ( - "log" -) - -const internalLogEnabled = false - -func internalLog(format string, v ...interface{}) { - log.Printf(format, v...) -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/jsonContext.go b/vendor/github.com/TykTechnologies/gojsonschema/jsonContext.go deleted file mode 100644 index fcc8d9d6f1f..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/jsonContext.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2013 MongoDB, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author tolsen -// author-github https://github.com/tolsen -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Implements a persistent (immutable w/ shared structure) singly-linked list of strings for the purpose of storing a json context -// -// created 04-09-2013 - -package gojsonschema - -import "bytes" - -// jsonContext implements a persistent linked-list of strings -type jsonContext struct { - head string - tail *jsonContext -} - -func newJsonContext(head string, tail *jsonContext) *jsonContext { - return &jsonContext{head, tail} -} - -// String displays the context in reverse. -// This plays well with the data structure's persistent nature with -// Cons and a json document's tree structure. -func (c *jsonContext) String(del ...string) string { - byteArr := make([]byte, 0, c.stringLen()) - buf := bytes.NewBuffer(byteArr) - c.writeStringToBuffer(buf, del) - - return buf.String() -} - -func (c *jsonContext) stringLen() int { - length := 0 - if c.tail != nil { - length = c.tail.stringLen() + 1 // add 1 for "." - } - - length += len(c.head) - return length -} - -func (c *jsonContext) writeStringToBuffer(buf *bytes.Buffer, del []string) { - if c.tail != nil { - c.tail.writeStringToBuffer(buf, del) - - if len(del) > 0 { - buf.WriteString(del[0]) - } else { - buf.WriteString(".") - } - } - - buf.WriteString(c.head) -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/jsonLoader.go b/vendor/github.com/TykTechnologies/gojsonschema/jsonLoader.go deleted file mode 100644 index cab6ed05bdc..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/jsonLoader.go +++ /dev/null @@ -1,340 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Different strategies to load JSON files. -// Includes References (file and HTTP), JSON strings and Go types. -// -// created 01-02-2015 - -package gojsonschema - -import ( - "bytes" - "encoding/json" - "errors" - "io" - "io/ioutil" - "net/http" - "os" - "path/filepath" - "runtime" - "strings" - - "github.com/xeipuuv/gojsonreference" -) - -var osFS = osFileSystem(os.Open) - -// JSON loader interface - -type JSONLoader interface { - JsonSource() interface{} - LoadJSON() (interface{}, error) - JsonReference() (gojsonreference.JsonReference, error) - LoaderFactory() JSONLoaderFactory -} - -type JSONLoaderFactory interface { - New(source string) JSONLoader -} - -type DefaultJSONLoaderFactory struct { -} - -type FileSystemJSONLoaderFactory struct { - fs http.FileSystem -} - -func (d DefaultJSONLoaderFactory) New(source string) JSONLoader { - return &jsonReferenceLoader{ - fs: osFS, - source: source, - } -} - -func (f FileSystemJSONLoaderFactory) New(source string) JSONLoader { - return &jsonReferenceLoader{ - fs: f.fs, - source: source, - } -} - -// osFileSystem is a functional wrapper for os.Open that implements http.FileSystem. -type osFileSystem func(string) (*os.File, error) - -func (o osFileSystem) Open(name string) (http.File, error) { - return o(name) -} - -// JSON Reference loader -// references are used to load JSONs from files and HTTP - -type jsonReferenceLoader struct { - fs http.FileSystem - source string -} - -func (l *jsonReferenceLoader) JsonSource() interface{} { - return l.source -} - -func (l *jsonReferenceLoader) JsonReference() (gojsonreference.JsonReference, error) { - return gojsonreference.NewJsonReference(l.JsonSource().(string)) -} - -func (l *jsonReferenceLoader) LoaderFactory() JSONLoaderFactory { - return &FileSystemJSONLoaderFactory{ - fs: l.fs, - } -} - -// NewReferenceLoader returns a JSON reference loader using the given source and the local OS file system. -func NewReferenceLoader(source string) *jsonReferenceLoader { - return &jsonReferenceLoader{ - fs: osFS, - source: source, - } -} - -// NewReferenceLoaderFileSystem returns a JSON reference loader using the given source and file system. -func NewReferenceLoaderFileSystem(source string, fs http.FileSystem) *jsonReferenceLoader { - return &jsonReferenceLoader{ - fs: fs, - source: source, - } -} - -func (l *jsonReferenceLoader) LoadJSON() (interface{}, error) { - - var err error - - reference, err := gojsonreference.NewJsonReference(l.JsonSource().(string)) - if err != nil { - return nil, err - } - - refToUrl := reference - refToUrl.GetUrl().Fragment = "" - - var document interface{} - - if reference.HasFileScheme { - - filename := strings.Replace(refToUrl.GetUrl().Path, "file://", "", -1) - if runtime.GOOS == "windows" { - // on Windows, a file URL may have an extra leading slash, use slashes - // instead of backslashes, and have spaces escaped - if strings.HasPrefix(filename, "/") { - filename = filename[1:] - } - filename = filepath.FromSlash(filename) - } - - document, err = l.loadFromFile(filename) - if err != nil { - return nil, err - } - - } else { - - document, err = l.loadFromHTTP(refToUrl.String()) - if err != nil { - return nil, err - } - - } - - return document, nil - -} - -func (l *jsonReferenceLoader) loadFromHTTP(address string) (interface{}, error) { - - resp, err := http.Get(address) - if err != nil { - return nil, err - } - - // must return HTTP Status 200 OK - if resp.StatusCode != http.StatusOK { - return nil, errors.New(formatErrorDescription(Locale.httpBadStatus(), ErrorDetails{"status": resp.Status})) - } - - bodyBuff, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - return decodeJsonUsingNumber(bytes.NewReader(bodyBuff)) - -} - -func (l *jsonReferenceLoader) loadFromFile(path string) (interface{}, error) { - f, err := l.fs.Open(path) - if err != nil { - return nil, err - } - defer f.Close() - - bodyBuff, err := ioutil.ReadAll(f) - if err != nil { - return nil, err - } - - return decodeJsonUsingNumber(bytes.NewReader(bodyBuff)) - -} - -// JSON string loader - -type jsonStringLoader struct { - source string -} - -func (l *jsonStringLoader) JsonSource() interface{} { - return l.source -} - -func (l *jsonStringLoader) JsonReference() (gojsonreference.JsonReference, error) { - return gojsonreference.NewJsonReference("#") -} - -func (l *jsonStringLoader) LoaderFactory() JSONLoaderFactory { - return &DefaultJSONLoaderFactory{} -} - -func NewStringLoader(source string) *jsonStringLoader { - return &jsonStringLoader{source: source} -} - -func (l *jsonStringLoader) LoadJSON() (interface{}, error) { - - return decodeJsonUsingNumber(strings.NewReader(l.JsonSource().(string))) - -} - -// JSON bytes loader - -type jsonBytesLoader struct { - source []byte -} - -func (l *jsonBytesLoader) JsonSource() interface{} { - return l.source -} - -func (l *jsonBytesLoader) JsonReference() (gojsonreference.JsonReference, error) { - return gojsonreference.NewJsonReference("#") -} - -func (l *jsonBytesLoader) LoaderFactory() JSONLoaderFactory { - return &DefaultJSONLoaderFactory{} -} - -func NewBytesLoader(source []byte) *jsonBytesLoader { - return &jsonBytesLoader{source: source} -} - -func (l *jsonBytesLoader) LoadJSON() (interface{}, error) { - return decodeJsonUsingNumber(bytes.NewReader(l.JsonSource().([]byte))) -} - -// JSON Go (types) loader -// used to load JSONs from the code as maps, interface{}, structs ... - -type jsonGoLoader struct { - source interface{} -} - -func (l *jsonGoLoader) JsonSource() interface{} { - return l.source -} - -func (l *jsonGoLoader) JsonReference() (gojsonreference.JsonReference, error) { - return gojsonreference.NewJsonReference("#") -} - -func (l *jsonGoLoader) LoaderFactory() JSONLoaderFactory { - return &DefaultJSONLoaderFactory{} -} - -func NewGoLoader(source interface{}) *jsonGoLoader { - return &jsonGoLoader{source: source} -} - -func (l *jsonGoLoader) LoadJSON() (interface{}, error) { - - // convert it to a compliant JSON first to avoid types "mismatches" - - jsonBytes, err := json.Marshal(l.JsonSource()) - if err != nil { - return nil, err - } - - return decodeJsonUsingNumber(bytes.NewReader(jsonBytes)) - -} - -type jsonIOLoader struct { - buf *bytes.Buffer -} - -func NewReaderLoader(source io.Reader) (*jsonIOLoader, io.Reader) { - buf := &bytes.Buffer{} - return &jsonIOLoader{buf: buf}, io.TeeReader(source, buf) -} - -func NewWriterLoader(source io.Writer) (*jsonIOLoader, io.Writer) { - buf := &bytes.Buffer{} - return &jsonIOLoader{buf: buf}, io.MultiWriter(source, buf) -} - -func (l *jsonIOLoader) JsonSource() interface{} { - return l.buf.String() -} - -func (l *jsonIOLoader) LoadJSON() (interface{}, error) { - return decodeJsonUsingNumber(l.buf) -} - -func (l *jsonIOLoader) JsonReference() (gojsonreference.JsonReference, error) { - return gojsonreference.NewJsonReference("#") -} - -func (l *jsonIOLoader) LoaderFactory() JSONLoaderFactory { - return &DefaultJSONLoaderFactory{} -} - -func decodeJsonUsingNumber(r io.Reader) (interface{}, error) { - - var document interface{} - - decoder := json.NewDecoder(r) - decoder.UseNumber() - - err := decoder.Decode(&document) - if err != nil { - return nil, err - } - - return document, nil - -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/locales.go b/vendor/github.com/TykTechnologies/gojsonschema/locales.go deleted file mode 100644 index 1c3171d1090..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/locales.go +++ /dev/null @@ -1,300 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Contains const string and messages. -// -// created 01-01-2015 - -package gojsonschema - -type ( - // locale is an interface for definining custom error strings - locale interface { - Required() string - InvalidType() string - NumberAnyOf() string - NumberOneOf() string - NumberAllOf() string - NumberNot() string - MissingDependency() string - Internal() string - Enum() string - ArrayNotEnoughItems() string - ArrayNoAdditionalItems() string - ArrayMinItems() string - ArrayMaxItems() string - Unique() string - ArrayMinProperties() string - ArrayMaxProperties() string - AdditionalPropertyNotAllowed() string - InvalidPropertyPattern() string - StringGTE() string - StringLTE() string - NumericGTE() string - SpecialGTE() string - MultiCase() string - Sequential() string - DoesNotMatchPattern() string - DoesNotMatchFormat() string - MultipleOf() string - NumberGTE() string - NumberGT() string - NumberLTE() string - NumberLT() string - - // Schema validations - RegexPattern() string - GreaterThanZero() string - MustBeOfA() string - MustBeOfAn() string - CannotBeUsedWithout() string - CannotBeGT() string - MustBeOfType() string - MustBeValidRegex() string - MustBeValidFormat() string - MustBeGTEZero() string - KeyCannotBeGreaterThan() string - KeyItemsMustBeOfType() string - KeyItemsMustBeUnique() string - ReferenceMustBeCanonical() string - NotAValidType() string - Duplicated() string - httpBadStatus() string - - // ErrorFormat - ErrorFormat() string - } - - // DefaultLocale is the default locale for this package - DefaultLocale struct{} -) - -func (l DefaultLocale) Required() string { - return `{{.property}} is required` -} - -func (l DefaultLocale) InvalidType() string { - return `Invalid type. Expected: {{.expected}}, given: {{.given}}` -} - -func (l DefaultLocale) NumberAnyOf() string { - return `Must validate at least one schema (anyOf)` -} - -func (l DefaultLocale) NumberOneOf() string { - return `Must validate one and only one schema (oneOf)` -} - -func (l DefaultLocale) NumberAllOf() string { - return `Must validate all the schemas (allOf)` -} - -func (l DefaultLocale) NumberNot() string { - return `Must not validate the schema (not)` -} - -func (l DefaultLocale) MissingDependency() string { - return `Has a dependency on {{.dependency}}` -} - -func (l DefaultLocale) Internal() string { - return `Internal Error {{.error}}` -} - -func (l DefaultLocale) Enum() string { - return `{{.field}} must be one of the following: {{.allowed}}` -} - -func (l DefaultLocale) ArrayNoAdditionalItems() string { - return `No additional items allowed on array` -} - -func (l DefaultLocale) ArrayNotEnoughItems() string { - return `Not enough items on array to match positional list of schema` -} - -func (l DefaultLocale) ArrayMinItems() string { - return `Array must have at least {{.min}} items` -} - -func (l DefaultLocale) ArrayMaxItems() string { - return `Array must have at most {{.max}} items` -} - -func (l DefaultLocale) Unique() string { - return `{{.type}} items must be unique` -} - -func (l DefaultLocale) ArrayMinProperties() string { - return `Must have at least {{.min}} properties` -} - -func (l DefaultLocale) ArrayMaxProperties() string { - return `Must have at most {{.max}} properties` -} - -func (l DefaultLocale) AdditionalPropertyNotAllowed() string { - return `Additional property {{.property}} is not allowed` -} - -func (l DefaultLocale) InvalidPropertyPattern() string { - return `Property "{{.property}}" does not match pattern {{.pattern}}` -} - -func (l DefaultLocale) StringGTE() string { - return `String length must be greater than or equal to {{.min}}` -} - -func (l DefaultLocale) StringLTE() string { - return `String length must be less than or equal to {{.max}}` -} - -func (l DefaultLocale) NumericGTE() string { - return `String must include at least {{.min_numeric}} numeric characters` -} - -func (l DefaultLocale) SpecialGTE() string { - return `String must include at least {{.min_special}} special characters (like '@', '$', '*' etc.)` -} - -func (l DefaultLocale) MultiCase() string { - return `String must include both lower and upper case characters` -} - -func (l DefaultLocale) Sequential() string { - return `String must not include sequential chars: {{.sequential_chars}}` -} - -func (l DefaultLocale) DoesNotMatchPattern() string { - return `Does not match pattern '{{.pattern}}'` -} - -func (l DefaultLocale) DoesNotMatchFormat() string { - return `Does not match format '{{.format}}'` -} - -func (l DefaultLocale) MultipleOf() string { - return `Must be a multiple of {{.multiple}}` -} - -func (l DefaultLocale) NumberGTE() string { - return `Must be greater than or equal to {{.min}}` -} - -func (l DefaultLocale) NumberGT() string { - return `Must be greater than {{.min}}` -} - -func (l DefaultLocale) NumberLTE() string { - return `Must be less than or equal to {{.max}}` -} - -func (l DefaultLocale) NumberLT() string { - return `Must be less than {{.max}}` -} - -// Schema validators -func (l DefaultLocale) RegexPattern() string { - return `Invalid regex pattern '{{.pattern}}'` -} - -func (l DefaultLocale) GreaterThanZero() string { - return `{{.number}} must be strictly greater than 0` -} - -func (l DefaultLocale) MustBeOfA() string { - return `{{.x}} must be of a {{.y}}` -} - -func (l DefaultLocale) MustBeOfAn() string { - return `{{.x}} must be of an {{.y}}` -} - -func (l DefaultLocale) CannotBeUsedWithout() string { - return `{{.x}} cannot be used without {{.y}}` -} - -func (l DefaultLocale) CannotBeGT() string { - return `{{.x}} cannot be greater than {{.y}}` -} - -func (l DefaultLocale) MustBeOfType() string { - return `{{.key}} must be of type {{.type}}` -} - -func (l DefaultLocale) MustBeValidRegex() string { - return `{{.key}} must be a valid regex` -} - -func (l DefaultLocale) MustBeValidFormat() string { - return `{{.key}} must be a valid format {{.given}}` -} - -func (l DefaultLocale) MustBeGTEZero() string { - return `{{.key}} must be greater than or equal to 0` -} - -func (l DefaultLocale) KeyCannotBeGreaterThan() string { - return `{{.key}} cannot be greater than {{.y}}` -} - -func (l DefaultLocale) KeyItemsMustBeOfType() string { - return `{{.key}} items must be {{.type}}` -} - -func (l DefaultLocale) KeyItemsMustBeUnique() string { - return `{{.key}} items must be unique` -} - -func (l DefaultLocale) ReferenceMustBeCanonical() string { - return `Reference {{.reference}} must be canonical` -} - -func (l DefaultLocale) NotAValidType() string { - return `{{.type}} is not a valid type -- ` -} - -func (l DefaultLocale) Duplicated() string { - return `{{.type}} type is duplicated` -} - -func (l DefaultLocale) httpBadStatus() string { - return `Could not read schema from HTTP, response status is {{.status}}` -} - -// Replacement options: field, description, context, value -func (l DefaultLocale) ErrorFormat() string { - return `{{.field}}: {{.description}}` -} - -const ( - STRING_NUMBER = "number" - STRING_ARRAY_OF_STRINGS = "array of strings" - STRING_ARRAY_OF_SCHEMAS = "array of schemas" - STRING_SCHEMA = "schema" - STRING_SCHEMA_OR_ARRAY_OF_STRINGS = "schema or array of strings" - STRING_PROPERTIES = "properties" - STRING_DEPENDENCY = "dependency" - STRING_PROPERTY = "property" - STRING_UNDEFINED = "undefined" - STRING_CONTEXT_ROOT = "(root)" - STRING_ROOT_SCHEMA_PROPERTY = "(root)" -) diff --git a/vendor/github.com/TykTechnologies/gojsonschema/result.go b/vendor/github.com/TykTechnologies/gojsonschema/result.go deleted file mode 100644 index 6ad56ae8656..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/result.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Result and ResultError implementations. -// -// created 01-01-2015 - -package gojsonschema - -import ( - "fmt" - "strings" -) - -type ( - // ErrorDetails is a map of details specific to each error. - // While the values will vary, every error will contain a "field" value - ErrorDetails map[string]interface{} - - // ResultError is the interface that library errors must implement - ResultError interface { - Field() string - SetType(string) - Type() string - SetContext(*jsonContext) - Context() *jsonContext - SetDescription(string) - Description() string - SetValue(interface{}) - Value() interface{} - SetDetails(ErrorDetails) - Details() ErrorDetails - String() string - } - - // ResultErrorFields holds the fields for each ResultError implementation. - // ResultErrorFields implements the ResultError interface, so custom errors - // can be defined by just embedding this type - ResultErrorFields struct { - errorType string // A string with the type of error (i.e. invalid_type) - context *jsonContext // Tree like notation of the part that failed the validation. ex (root).a.b ... - description string // A human readable error message - value interface{} // Value given by the JSON file that is the source of the error - details ErrorDetails - } - - Result struct { - errors []ResultError - // Scores how well the validation matched. Useful in generating - // better error messages for anyOf and oneOf. - score int - } -) - -// Field outputs the field name without the root context -// i.e. firstName or person.firstName instead of (root).firstName or (root).person.firstName -func (v *ResultErrorFields) Field() string { - if p, ok := v.Details()["property"]; ok { - if str, isString := p.(string); isString { - return str - } - } - - return strings.TrimPrefix(v.context.String(), STRING_ROOT_SCHEMA_PROPERTY+".") -} - -func (v *ResultErrorFields) SetType(errorType string) { - v.errorType = errorType -} - -func (v *ResultErrorFields) Type() string { - return v.errorType -} - -func (v *ResultErrorFields) SetContext(context *jsonContext) { - v.context = context -} - -func (v *ResultErrorFields) Context() *jsonContext { - return v.context -} - -func (v *ResultErrorFields) SetDescription(description string) { - v.description = description -} - -func (v *ResultErrorFields) Description() string { - return v.description -} - -func (v *ResultErrorFields) SetValue(value interface{}) { - v.value = value -} - -func (v *ResultErrorFields) Value() interface{} { - return v.value -} - -func (v *ResultErrorFields) SetDetails(details ErrorDetails) { - v.details = details -} - -func (v *ResultErrorFields) Details() ErrorDetails { - return v.details -} - -func (v ResultErrorFields) String() string { - // as a fallback, the value is displayed go style - valueString := fmt.Sprintf("%v", v.value) - - // marshal the go value value to json - if v.value == nil { - valueString = TYPE_NULL - } else { - if vs, err := marshalToJsonString(v.value); err == nil { - if vs == nil { - valueString = TYPE_NULL - } else { - valueString = *vs - } - } - } - - return formatErrorDescription(Locale.ErrorFormat(), ErrorDetails{ - "context": v.context.String(), - "description": v.description, - "value": valueString, - "field": v.Field(), - }) -} - -func (v *Result) Valid() bool { - return len(v.errors) == 0 -} - -func (v *Result) Errors() []ResultError { - return v.errors -} - -func (v *Result) addError(err ResultError, context *jsonContext, value interface{}, details ErrorDetails) { - newError(err, context, value, Locale, details) - v.errors = append(v.errors, err) - v.score -= 2 // results in a net -1 when added to the +1 we get at the end of the validation function -} - -// Used to copy errors from a sub-schema to the main one -func (v *Result) mergeErrors(otherResult *Result) { - v.errors = append(v.errors, otherResult.Errors()...) - v.score += otherResult.score -} - -func (v *Result) incrementScore() { - v.score++ -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/schema.go b/vendor/github.com/TykTechnologies/gojsonschema/schema.go deleted file mode 100644 index f71212917c8..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/schema.go +++ /dev/null @@ -1,986 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Defines Schema, the main entry to every subSchema. -// Contains the parsing logic and error checking. -// -// created 26-02-2013 - -package gojsonschema - -import ( - // "encoding/json" - "errors" - "reflect" - "regexp" - - "github.com/xeipuuv/gojsonreference" -) - -var ( - // Locale is the default locale to use - // Library users can overwrite with their own implementation - Locale locale = DefaultLocale{} -) - -func NewSchema(l JSONLoader) (*Schema, error) { - ref, err := l.JsonReference() - if err != nil { - return nil, err - } - - d := Schema{} - d.pool = newSchemaPool(l.LoaderFactory()) - d.documentReference = ref - d.referencePool = newSchemaReferencePool() - - var doc interface{} - if ref.String() != "" { - // Get document from schema pool - spd, err := d.pool.GetDocument(d.documentReference) - if err != nil { - return nil, err - } - doc = spd.Document - } else { - // Load JSON directly - doc, err = l.LoadJSON() - if err != nil { - return nil, err - } - d.pool.SetStandaloneDocument(doc) - } - - err = d.parse(doc) - if err != nil { - return nil, err - } - - return &d, nil -} - -type Schema struct { - documentReference gojsonreference.JsonReference - rootSchema *subSchema - pool *schemaPool - referencePool *schemaReferencePool -} - -func (d *Schema) parse(document interface{}) error { - d.rootSchema = &subSchema{property: STRING_ROOT_SCHEMA_PROPERTY} - return d.parseSchema(document, d.rootSchema) -} - -func (d *Schema) SetRootSchemaName(name string) { - d.rootSchema.property = name -} - -// Parses a subSchema -// -// Pretty long function ( sorry :) )... but pretty straight forward, repetitive and boring -// Not much magic involved here, most of the job is to validate the key names and their values, -// then the values are copied into subSchema struct -// -func (d *Schema) parseSchema(documentNode interface{}, currentSchema *subSchema) error { - - if !isKind(documentNode, reflect.Map) { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_OBJECT, - "given": STRING_SCHEMA, - }, - )) - } - - m := documentNode.(map[string]interface{}) - - if currentSchema == d.rootSchema { - currentSchema.ref = &d.documentReference - } - - // $subSchema - if existsMapKey(m, KEY_SCHEMA) { - if !isKind(m[KEY_SCHEMA], reflect.String) { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_STRING, - "given": KEY_SCHEMA, - }, - )) - } - schemaRef := m[KEY_SCHEMA].(string) - schemaReference, err := gojsonreference.NewJsonReference(schemaRef) - currentSchema.subSchema = &schemaReference - if err != nil { - return err - } - } - - // $ref - if existsMapKey(m, KEY_REF) && !isKind(m[KEY_REF], reflect.String) { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_STRING, - "given": KEY_REF, - }, - )) - } - if k, ok := m[KEY_REF].(string); ok { - - jsonReference, err := gojsonreference.NewJsonReference(k) - if err != nil { - return err - } - - if jsonReference.HasFullUrl { - currentSchema.ref = &jsonReference - } else { - inheritedReference, err := currentSchema.ref.Inherits(jsonReference) - if err != nil { - return err - } - - currentSchema.ref = inheritedReference - } - - if sch, ok := d.referencePool.Get(currentSchema.ref.String() + k); ok { - currentSchema.refSchema = sch - - } else { - err := d.parseReference(documentNode, currentSchema, k) - if err != nil { - return err - } - - return nil - } - } - - // definitions - if existsMapKey(m, KEY_DEFINITIONS) { - if isKind(m[KEY_DEFINITIONS], reflect.Map) { - currentSchema.definitions = make(map[string]*subSchema) - for dk, dv := range m[KEY_DEFINITIONS].(map[string]interface{}) { - if isKind(dv, reflect.Map) { - newSchema := &subSchema{property: KEY_DEFINITIONS, parent: currentSchema, ref: currentSchema.ref} - currentSchema.definitions[dk] = newSchema - err := d.parseSchema(dv, newSchema) - if err != nil { - return errors.New(err.Error()) - } - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": STRING_ARRAY_OF_SCHEMAS, - "given": KEY_DEFINITIONS, - }, - )) - } - } - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": STRING_ARRAY_OF_SCHEMAS, - "given": KEY_DEFINITIONS, - }, - )) - } - - } - - // id - if existsMapKey(m, KEY_ID) && !isKind(m[KEY_ID], reflect.String) { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_STRING, - "given": KEY_ID, - }, - )) - } - if k, ok := m[KEY_ID].(string); ok { - currentSchema.id = &k - } - - // title - if existsMapKey(m, KEY_TITLE) && !isKind(m[KEY_TITLE], reflect.String) { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_STRING, - "given": KEY_TITLE, - }, - )) - } - if k, ok := m[KEY_TITLE].(string); ok { - currentSchema.title = &k - } - - // description - if existsMapKey(m, KEY_DESCRIPTION) && !isKind(m[KEY_DESCRIPTION], reflect.String) { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_STRING, - "given": KEY_DESCRIPTION, - }, - )) - } - if k, ok := m[KEY_DESCRIPTION].(string); ok { - currentSchema.description = &k - } - - // type - if existsMapKey(m, KEY_TYPE) { - if isKind(m[KEY_TYPE], reflect.String) { - if k, ok := m[KEY_TYPE].(string); ok { - err := currentSchema.types.Add(k) - if err != nil { - return err - } - } - } else { - if isKind(m[KEY_TYPE], reflect.Slice) { - arrayOfTypes := m[KEY_TYPE].([]interface{}) - for _, typeInArray := range arrayOfTypes { - if reflect.ValueOf(typeInArray).Kind() != reflect.String { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_STRING + "/" + STRING_ARRAY_OF_STRINGS, - "given": KEY_TYPE, - }, - )) - } else { - currentSchema.types.Add(typeInArray.(string)) - } - } - - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_STRING + "/" + STRING_ARRAY_OF_STRINGS, - "given": KEY_TYPE, - }, - )) - } - } - } - - // properties - if existsMapKey(m, KEY_PROPERTIES) { - err := d.parseProperties(m[KEY_PROPERTIES], currentSchema) - if err != nil { - return err - } - } - - // additionalProperties - if existsMapKey(m, KEY_ADDITIONAL_PROPERTIES) { - if isKind(m[KEY_ADDITIONAL_PROPERTIES], reflect.Bool) { - currentSchema.additionalProperties = m[KEY_ADDITIONAL_PROPERTIES].(bool) - } else if isKind(m[KEY_ADDITIONAL_PROPERTIES], reflect.Map) { - newSchema := &subSchema{property: KEY_ADDITIONAL_PROPERTIES, parent: currentSchema, ref: currentSchema.ref} - currentSchema.additionalProperties = newSchema - err := d.parseSchema(m[KEY_ADDITIONAL_PROPERTIES], newSchema) - if err != nil { - return errors.New(err.Error()) - } - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_BOOLEAN + "/" + STRING_SCHEMA, - "given": KEY_ADDITIONAL_PROPERTIES, - }, - )) - } - } - - // patternProperties - if existsMapKey(m, KEY_PATTERN_PROPERTIES) { - if isKind(m[KEY_PATTERN_PROPERTIES], reflect.Map) { - patternPropertiesMap := m[KEY_PATTERN_PROPERTIES].(map[string]interface{}) - if len(patternPropertiesMap) > 0 { - currentSchema.patternProperties = make(map[string]*subSchema) - for k, v := range patternPropertiesMap { - _, err := regexp.MatchString(k, "") - if err != nil { - return errors.New(formatErrorDescription( - Locale.RegexPattern(), - ErrorDetails{"pattern": k}, - )) - } - newSchema := &subSchema{property: k, parent: currentSchema, ref: currentSchema.ref} - err = d.parseSchema(v, newSchema) - if err != nil { - return errors.New(err.Error()) - } - currentSchema.patternProperties[k] = newSchema - } - } - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": STRING_SCHEMA, - "given": KEY_PATTERN_PROPERTIES, - }, - )) - } - } - - // dependencies - if existsMapKey(m, KEY_DEPENDENCIES) { - err := d.parseDependencies(m[KEY_DEPENDENCIES], currentSchema) - if err != nil { - return err - } - } - - // items - if existsMapKey(m, KEY_ITEMS) { - if isKind(m[KEY_ITEMS], reflect.Slice) { - for _, itemElement := range m[KEY_ITEMS].([]interface{}) { - if isKind(itemElement, reflect.Map) { - newSchema := &subSchema{parent: currentSchema, property: KEY_ITEMS} - newSchema.ref = currentSchema.ref - currentSchema.AddItemsChild(newSchema) - err := d.parseSchema(itemElement, newSchema) - if err != nil { - return err - } - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": STRING_SCHEMA + "/" + STRING_ARRAY_OF_SCHEMAS, - "given": KEY_ITEMS, - }, - )) - } - currentSchema.itemsChildrenIsSingleSchema = false - } - } else if isKind(m[KEY_ITEMS], reflect.Map) { - newSchema := &subSchema{parent: currentSchema, property: KEY_ITEMS} - newSchema.ref = currentSchema.ref - currentSchema.AddItemsChild(newSchema) - err := d.parseSchema(m[KEY_ITEMS], newSchema) - if err != nil { - return err - } - currentSchema.itemsChildrenIsSingleSchema = true - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": STRING_SCHEMA + "/" + STRING_ARRAY_OF_SCHEMAS, - "given": KEY_ITEMS, - }, - )) - } - } - - // additionalItems - if existsMapKey(m, KEY_ADDITIONAL_ITEMS) { - if isKind(m[KEY_ADDITIONAL_ITEMS], reflect.Bool) { - currentSchema.additionalItems = m[KEY_ADDITIONAL_ITEMS].(bool) - } else if isKind(m[KEY_ADDITIONAL_ITEMS], reflect.Map) { - newSchema := &subSchema{property: KEY_ADDITIONAL_ITEMS, parent: currentSchema, ref: currentSchema.ref} - currentSchema.additionalItems = newSchema - err := d.parseSchema(m[KEY_ADDITIONAL_ITEMS], newSchema) - if err != nil { - return errors.New(err.Error()) - } - } else { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": TYPE_BOOLEAN + "/" + STRING_SCHEMA, - "given": KEY_ADDITIONAL_ITEMS, - }, - )) - } - } - - // validation : number / integer - - if existsMapKey(m, KEY_MULTIPLE_OF) { - multipleOfValue := mustBeNumber(m[KEY_MULTIPLE_OF]) - if multipleOfValue == nil { - return errors.New(formatErrorDescription( - Locale.InvalidType(), - ErrorDetails{ - "expected": STRING_NUMBER, - "given": KEY_MULTIPLE_OF, - }, - )) - } - if *multipleOfValue <= 0 { - return errors.New(formatErrorDescription( - Locale.GreaterThanZero(), - ErrorDetails{"number": KEY_MULTIPLE_OF}, - )) - } - currentSchema.multipleOf = multipleOfValue - } - - if existsMapKey(m, KEY_MINIMUM) { - minimumValue := mustBeNumber(m[KEY_MINIMUM]) - if minimumValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_MINIMUM, "y": STRING_NUMBER}, - )) - } - currentSchema.minimum = minimumValue - } - - if existsMapKey(m, KEY_EXCLUSIVE_MINIMUM) { - if isKind(m[KEY_EXCLUSIVE_MINIMUM], reflect.Bool) { - if currentSchema.minimum == nil { - return errors.New(formatErrorDescription( - Locale.CannotBeUsedWithout(), - ErrorDetails{"x": KEY_EXCLUSIVE_MINIMUM, "y": KEY_MINIMUM}, - )) - } - exclusiveMinimumValue := m[KEY_EXCLUSIVE_MINIMUM].(bool) - currentSchema.exclusiveMinimum = exclusiveMinimumValue - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_EXCLUSIVE_MINIMUM, "y": TYPE_BOOLEAN}, - )) - } - } - - if existsMapKey(m, KEY_MAXIMUM) { - maximumValue := mustBeNumber(m[KEY_MAXIMUM]) - if maximumValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_MAXIMUM, "y": STRING_NUMBER}, - )) - } - currentSchema.maximum = maximumValue - } - - if existsMapKey(m, KEY_EXCLUSIVE_MAXIMUM) { - if isKind(m[KEY_EXCLUSIVE_MAXIMUM], reflect.Bool) { - if currentSchema.maximum == nil { - return errors.New(formatErrorDescription( - Locale.CannotBeUsedWithout(), - ErrorDetails{"x": KEY_EXCLUSIVE_MAXIMUM, "y": KEY_MAXIMUM}, - )) - } - exclusiveMaximumValue := m[KEY_EXCLUSIVE_MAXIMUM].(bool) - currentSchema.exclusiveMaximum = exclusiveMaximumValue - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_EXCLUSIVE_MAXIMUM, "y": STRING_NUMBER}, - )) - } - } - - if currentSchema.minimum != nil && currentSchema.maximum != nil { - if *currentSchema.minimum > *currentSchema.maximum { - return errors.New(formatErrorDescription( - Locale.CannotBeGT(), - ErrorDetails{"x": KEY_MINIMUM, "y": KEY_MAXIMUM}, - )) - } - } - - // validation : string - - if existsMapKey(m, KEY_MIN_LENGTH) { - minLengthIntegerValue := mustBeInteger(m[KEY_MIN_LENGTH]) - if minLengthIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MIN_LENGTH, "y": TYPE_INTEGER}, - )) - } - if *minLengthIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MIN_LENGTH}, - )) - } - currentSchema.minLength = minLengthIntegerValue - } - - if existsMapKey(m, KEY_MAX_LENGTH) { - maxLengthIntegerValue := mustBeInteger(m[KEY_MAX_LENGTH]) - if maxLengthIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MAX_LENGTH, "y": TYPE_INTEGER}, - )) - } - if *maxLengthIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MAX_LENGTH}, - )) - } - currentSchema.maxLength = maxLengthIntegerValue - } - - if currentSchema.minLength != nil && currentSchema.maxLength != nil { - if *currentSchema.minLength > *currentSchema.maxLength { - return errors.New(formatErrorDescription( - Locale.CannotBeGT(), - ErrorDetails{"x": KEY_MIN_LENGTH, "y": KEY_MAX_LENGTH}, - )) - } - } - - if existsMapKey(m, KEY_MIN_NUMERIC) { - minNumericIntegerValue := mustBeInteger(m[KEY_MIN_NUMERIC]) - if minNumericIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MIN_NUMERIC, "y": TYPE_INTEGER}, - )) - } - if *minNumericIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MIN_NUMERIC}, - )) - } - currentSchema.minNumeric = minNumericIntegerValue - } - - if existsMapKey(m, KEY_MIN_SPECIAL) { - minSpecialIntegerValue := mustBeInteger(m[KEY_MIN_SPECIAL]) - if minSpecialIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MIN_SPECIAL, "y": TYPE_INTEGER}, - )) - } - if *minSpecialIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MIN_SPECIAL}, - )) - } - currentSchema.minSpecial = minSpecialIntegerValue - } - - if existsMapKey(m, KEY_MULTI_CASE) { - if isKind(m[KEY_MULTI_CASE], reflect.Bool) { - currentSchema.multiCase = m[KEY_MULTI_CASE].(bool) - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_MULTI_CASE, "y": TYPE_BOOLEAN}, - )) - } - } - - if existsMapKey(m, KEY_DISABLE_SEQUENTIAL) { - if isKind(m[KEY_DISABLE_SEQUENTIAL], reflect.Bool) { - currentSchema.disableSequential = m[KEY_DISABLE_SEQUENTIAL].(bool) - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_DISABLE_SEQUENTIAL, "y": TYPE_BOOLEAN}, - )) - } - } - - if existsMapKey(m, KEY_PATTERN) { - if isKind(m[KEY_PATTERN], reflect.String) { - regexpObject, err := regexp.Compile(m[KEY_PATTERN].(string)) - if err != nil { - return errors.New(formatErrorDescription( - Locale.MustBeValidRegex(), - ErrorDetails{"key": KEY_PATTERN}, - )) - } - currentSchema.pattern = regexpObject - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_PATTERN, "y": TYPE_STRING}, - )) - } - } - - if existsMapKey(m, KEY_FORMAT) { - formatString, ok := m[KEY_FORMAT].(string) - if ok && FormatCheckers.Has(formatString) { - currentSchema.format = formatString - } else { - return errors.New(formatErrorDescription( - Locale.MustBeValidFormat(), - ErrorDetails{"key": KEY_FORMAT, "given": m[KEY_FORMAT]}, - )) - } - } - - // validation : object - - if existsMapKey(m, KEY_MIN_PROPERTIES) { - minPropertiesIntegerValue := mustBeInteger(m[KEY_MIN_PROPERTIES]) - if minPropertiesIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MIN_PROPERTIES, "y": TYPE_INTEGER}, - )) - } - if *minPropertiesIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MIN_PROPERTIES}, - )) - } - currentSchema.minProperties = minPropertiesIntegerValue - } - - if existsMapKey(m, KEY_MAX_PROPERTIES) { - maxPropertiesIntegerValue := mustBeInteger(m[KEY_MAX_PROPERTIES]) - if maxPropertiesIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MAX_PROPERTIES, "y": TYPE_INTEGER}, - )) - } - if *maxPropertiesIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MAX_PROPERTIES}, - )) - } - currentSchema.maxProperties = maxPropertiesIntegerValue - } - - if currentSchema.minProperties != nil && currentSchema.maxProperties != nil { - if *currentSchema.minProperties > *currentSchema.maxProperties { - return errors.New(formatErrorDescription( - Locale.KeyCannotBeGreaterThan(), - ErrorDetails{"key": KEY_MIN_PROPERTIES, "y": KEY_MAX_PROPERTIES}, - )) - } - } - - if existsMapKey(m, KEY_REQUIRED) { - if isKind(m[KEY_REQUIRED], reflect.Slice) { - requiredValues := m[KEY_REQUIRED].([]interface{}) - for _, requiredValue := range requiredValues { - if isKind(requiredValue, reflect.String) { - err := currentSchema.AddRequired(requiredValue.(string)) - if err != nil { - return err - } - } else { - return errors.New(formatErrorDescription( - Locale.KeyItemsMustBeOfType(), - ErrorDetails{"key": KEY_REQUIRED, "type": TYPE_STRING}, - )) - } - } - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_REQUIRED, "y": TYPE_ARRAY}, - )) - } - } - - // validation : array - - if existsMapKey(m, KEY_MIN_ITEMS) { - minItemsIntegerValue := mustBeInteger(m[KEY_MIN_ITEMS]) - if minItemsIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MIN_ITEMS, "y": TYPE_INTEGER}, - )) - } - if *minItemsIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MIN_ITEMS}, - )) - } - currentSchema.minItems = minItemsIntegerValue - } - - if existsMapKey(m, KEY_MAX_ITEMS) { - maxItemsIntegerValue := mustBeInteger(m[KEY_MAX_ITEMS]) - if maxItemsIntegerValue == nil { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_MAX_ITEMS, "y": TYPE_INTEGER}, - )) - } - if *maxItemsIntegerValue < 0 { - return errors.New(formatErrorDescription( - Locale.MustBeGTEZero(), - ErrorDetails{"key": KEY_MAX_ITEMS}, - )) - } - currentSchema.maxItems = maxItemsIntegerValue - } - - if existsMapKey(m, KEY_UNIQUE_ITEMS) { - if isKind(m[KEY_UNIQUE_ITEMS], reflect.Bool) { - currentSchema.uniqueItems = m[KEY_UNIQUE_ITEMS].(bool) - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfA(), - ErrorDetails{"x": KEY_UNIQUE_ITEMS, "y": TYPE_BOOLEAN}, - )) - } - } - - // validation : all - - if existsMapKey(m, KEY_ENUM) { - if isKind(m[KEY_ENUM], reflect.Slice) { - for _, v := range m[KEY_ENUM].([]interface{}) { - err := currentSchema.AddEnum(v) - if err != nil { - return err - } - } - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_ENUM, "y": TYPE_ARRAY}, - )) - } - } - - // validation : subSchema - - if existsMapKey(m, KEY_ONE_OF) { - if isKind(m[KEY_ONE_OF], reflect.Slice) { - for _, v := range m[KEY_ONE_OF].([]interface{}) { - newSchema := &subSchema{property: KEY_ONE_OF, parent: currentSchema, ref: currentSchema.ref} - currentSchema.AddOneOf(newSchema) - err := d.parseSchema(v, newSchema) - if err != nil { - return err - } - } - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_ONE_OF, "y": TYPE_ARRAY}, - )) - } - } - - if existsMapKey(m, KEY_ANY_OF) { - if isKind(m[KEY_ANY_OF], reflect.Slice) { - for _, v := range m[KEY_ANY_OF].([]interface{}) { - newSchema := &subSchema{property: KEY_ANY_OF, parent: currentSchema, ref: currentSchema.ref} - currentSchema.AddAnyOf(newSchema) - err := d.parseSchema(v, newSchema) - if err != nil { - return err - } - } - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_ANY_OF, "y": TYPE_ARRAY}, - )) - } - } - - if existsMapKey(m, KEY_ALL_OF) { - if isKind(m[KEY_ALL_OF], reflect.Slice) { - for _, v := range m[KEY_ALL_OF].([]interface{}) { - newSchema := &subSchema{property: KEY_ALL_OF, parent: currentSchema, ref: currentSchema.ref} - currentSchema.AddAllOf(newSchema) - err := d.parseSchema(v, newSchema) - if err != nil { - return err - } - } - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_ANY_OF, "y": TYPE_ARRAY}, - )) - } - } - - if existsMapKey(m, KEY_NOT) { - if isKind(m[KEY_NOT], reflect.Map) { - newSchema := &subSchema{property: KEY_NOT, parent: currentSchema, ref: currentSchema.ref} - currentSchema.SetNot(newSchema) - err := d.parseSchema(m[KEY_NOT], newSchema) - if err != nil { - return err - } - } else { - return errors.New(formatErrorDescription( - Locale.MustBeOfAn(), - ErrorDetails{"x": KEY_NOT, "y": TYPE_OBJECT}, - )) - } - } - - return nil -} - -func (d *Schema) parseReference(documentNode interface{}, currentSchema *subSchema, reference string) error { - var refdDocumentNode interface{} - jsonPointer := currentSchema.ref.GetPointer() - standaloneDocument := d.pool.GetStandaloneDocument() - - if standaloneDocument != nil { - - var err error - refdDocumentNode, _, err = jsonPointer.Get(standaloneDocument) - if err != nil { - return err - } - - } else { - dsp, err := d.pool.GetDocument(*currentSchema.ref) - if err != nil { - return err - } - - refdDocumentNode, _, err = jsonPointer.Get(dsp.Document) - if err != nil { - return err - } - - } - - if !isKind(refdDocumentNode, reflect.Map) { - return errors.New(formatErrorDescription( - Locale.MustBeOfType(), - ErrorDetails{"key": STRING_SCHEMA, "type": TYPE_OBJECT}, - )) - } - - // returns the loaded referenced subSchema for the caller to update its current subSchema - newSchemaDocument := refdDocumentNode.(map[string]interface{}) - newSchema := &subSchema{property: KEY_REF, parent: currentSchema, ref: currentSchema.ref} - d.referencePool.Add(currentSchema.ref.String()+reference, newSchema) - - err := d.parseSchema(newSchemaDocument, newSchema) - if err != nil { - return err - } - - currentSchema.refSchema = newSchema - - return nil - -} - -func (d *Schema) parseProperties(documentNode interface{}, currentSchema *subSchema) error { - - if !isKind(documentNode, reflect.Map) { - return errors.New(formatErrorDescription( - Locale.MustBeOfType(), - ErrorDetails{"key": STRING_PROPERTIES, "type": TYPE_OBJECT}, - )) - } - - m := documentNode.(map[string]interface{}) - for k := range m { - schemaProperty := k - newSchema := &subSchema{property: schemaProperty, parent: currentSchema, ref: currentSchema.ref} - currentSchema.AddPropertiesChild(newSchema) - err := d.parseSchema(m[k], newSchema) - if err != nil { - return err - } - } - - return nil -} - -func (d *Schema) parseDependencies(documentNode interface{}, currentSchema *subSchema) error { - - if !isKind(documentNode, reflect.Map) { - return errors.New(formatErrorDescription( - Locale.MustBeOfType(), - ErrorDetails{"key": KEY_DEPENDENCIES, "type": TYPE_OBJECT}, - )) - } - - m := documentNode.(map[string]interface{}) - currentSchema.dependencies = make(map[string]interface{}) - - for k := range m { - switch reflect.ValueOf(m[k]).Kind() { - - case reflect.Slice: - values := m[k].([]interface{}) - var valuesToRegister []string - - for _, value := range values { - if !isKind(value, reflect.String) { - return errors.New(formatErrorDescription( - Locale.MustBeOfType(), - ErrorDetails{ - "key": STRING_DEPENDENCY, - "type": STRING_SCHEMA_OR_ARRAY_OF_STRINGS, - }, - )) - } else { - valuesToRegister = append(valuesToRegister, value.(string)) - } - currentSchema.dependencies[k] = valuesToRegister - } - - case reflect.Map: - depSchema := &subSchema{property: k, parent: currentSchema, ref: currentSchema.ref} - err := d.parseSchema(m[k], depSchema) - if err != nil { - return err - } - currentSchema.dependencies[k] = depSchema - - default: - return errors.New(formatErrorDescription( - Locale.MustBeOfType(), - ErrorDetails{ - "key": STRING_DEPENDENCY, - "type": STRING_SCHEMA_OR_ARRAY_OF_STRINGS, - }, - )) - } - - } - - return nil -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/schemaPool.go b/vendor/github.com/TykTechnologies/gojsonschema/schemaPool.go deleted file mode 100644 index f2ad641af3c..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/schemaPool.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Defines resources pooling. -// Eases referencing and avoids downloading the same resource twice. -// -// created 26-02-2013 - -package gojsonschema - -import ( - "errors" - - "github.com/xeipuuv/gojsonreference" -) - -type schemaPoolDocument struct { - Document interface{} -} - -type schemaPool struct { - schemaPoolDocuments map[string]*schemaPoolDocument - standaloneDocument interface{} - jsonLoaderFactory JSONLoaderFactory -} - -func newSchemaPool(f JSONLoaderFactory) *schemaPool { - - p := &schemaPool{} - p.schemaPoolDocuments = make(map[string]*schemaPoolDocument) - p.standaloneDocument = nil - p.jsonLoaderFactory = f - - return p -} - -func (p *schemaPool) SetStandaloneDocument(document interface{}) { - p.standaloneDocument = document -} - -func (p *schemaPool) GetStandaloneDocument() (document interface{}) { - return p.standaloneDocument -} - -func (p *schemaPool) GetDocument(reference gojsonreference.JsonReference) (*schemaPoolDocument, error) { - - if internalLogEnabled { - internalLog("Get Document ( %s )", reference.String()) - } - - var err error - - // It is not possible to load anything that is not canonical... - if !reference.IsCanonical() { - return nil, errors.New(formatErrorDescription( - Locale.ReferenceMustBeCanonical(), - ErrorDetails{"reference": reference}, - )) - } - - refToUrl := reference - refToUrl.GetUrl().Fragment = "" - - var spd *schemaPoolDocument - - // Try to find the requested document in the pool - for k := range p.schemaPoolDocuments { - if k == refToUrl.String() { - spd = p.schemaPoolDocuments[k] - } - } - - if spd != nil { - if internalLogEnabled { - internalLog(" From pool") - } - return spd, nil - } - - jsonReferenceLoader := p.jsonLoaderFactory.New(reference.String()) - document, err := jsonReferenceLoader.LoadJSON() - if err != nil { - return nil, err - } - - spd = &schemaPoolDocument{Document: document} - // add the document to the pool for potential later use - p.schemaPoolDocuments[refToUrl.String()] = spd - - return spd, nil -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/schemaReferencePool.go b/vendor/github.com/TykTechnologies/gojsonschema/schemaReferencePool.go deleted file mode 100644 index 294e36a732a..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/schemaReferencePool.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Pool of referenced schemas. -// -// created 25-06-2013 - -package gojsonschema - -import ( - "fmt" -) - -type schemaReferencePool struct { - documents map[string]*subSchema -} - -func newSchemaReferencePool() *schemaReferencePool { - - p := &schemaReferencePool{} - p.documents = make(map[string]*subSchema) - - return p -} - -func (p *schemaReferencePool) Get(ref string) (r *subSchema, o bool) { - - if internalLogEnabled { - internalLog(fmt.Sprintf("Schema Reference ( %s )", ref)) - } - - if sch, ok := p.documents[ref]; ok { - if internalLogEnabled { - internalLog(fmt.Sprintf(" From pool")) - } - return sch, true - } - - return nil, false -} - -func (p *schemaReferencePool) Add(ref string, sch *subSchema) { - - if internalLogEnabled { - internalLog(fmt.Sprintf("Add Schema Reference %s to pool", ref)) - } - - p.documents[ref] = sch -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/schemaType.go b/vendor/github.com/TykTechnologies/gojsonschema/schemaType.go deleted file mode 100644 index e13a0fb0cbf..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/schemaType.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Helper structure to handle schema types, and the combination of them. -// -// created 28-02-2013 - -package gojsonschema - -import ( - "errors" - "fmt" - "strings" -) - -type jsonSchemaType struct { - types []string -} - -// Is the schema typed ? that is containing at least one type -// When not typed, the schema does not need any type validation -func (t *jsonSchemaType) IsTyped() bool { - return len(t.types) > 0 -} - -func (t *jsonSchemaType) Add(etype string) error { - - if !isStringInSlice(JSON_TYPES, etype) { - return errors.New(formatErrorDescription(Locale.NotAValidType(), ErrorDetails{"type": etype})) - } - - if t.Contains(etype) { - return errors.New(formatErrorDescription(Locale.Duplicated(), ErrorDetails{"type": etype})) - } - - t.types = append(t.types, etype) - - return nil -} - -func (t *jsonSchemaType) Contains(etype string) bool { - - for _, v := range t.types { - if v == etype { - return true - } - } - - return false -} - -func (t *jsonSchemaType) String() string { - - if len(t.types) == 0 { - return STRING_UNDEFINED // should never happen - } - - // Displayed as a list [type1,type2,...] - if len(t.types) > 1 { - return fmt.Sprintf("[%s]", strings.Join(t.types, ",")) - } - - // Only one type: name only - return t.types[0] -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/subSchema.go b/vendor/github.com/TykTechnologies/gojsonschema/subSchema.go deleted file mode 100644 index 75abc5f5262..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/subSchema.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Defines the structure of a sub-subSchema. -// A sub-subSchema can contain other sub-schemas. -// -// created 27-02-2013 - -package gojsonschema - -import ( - "errors" - "regexp" - "strings" - - "github.com/xeipuuv/gojsonreference" -) - -const ( - KEY_SCHEMA = "$subSchema" - KEY_ID = "$id" - KEY_REF = "$ref" - KEY_TITLE = "title" - KEY_DESCRIPTION = "description" - KEY_TYPE = "type" - KEY_ITEMS = "items" - KEY_ADDITIONAL_ITEMS = "additionalItems" - KEY_PROPERTIES = "properties" - KEY_PATTERN_PROPERTIES = "patternProperties" - KEY_ADDITIONAL_PROPERTIES = "additionalProperties" - KEY_DEFINITIONS = "definitions" - KEY_MULTIPLE_OF = "multipleOf" - KEY_MINIMUM = "minimum" - KEY_MAXIMUM = "maximum" - KEY_EXCLUSIVE_MINIMUM = "exclusiveMinimum" - KEY_EXCLUSIVE_MAXIMUM = "exclusiveMaximum" - KEY_MIN_LENGTH = "minLength" - KEY_MAX_LENGTH = "maxLength" - KEY_MIN_NUMERIC = "minNumeric" - KEY_MIN_SPECIAL = "minSpecial" - KEY_MULTI_CASE = "multiCase" - KEY_DISABLE_SEQUENTIAL = "disableSequential" - KEY_PATTERN = "pattern" - KEY_FORMAT = "format" - KEY_MIN_PROPERTIES = "minProperties" - KEY_MAX_PROPERTIES = "maxProperties" - KEY_DEPENDENCIES = "dependencies" - KEY_REQUIRED = "required" - KEY_MIN_ITEMS = "minItems" - KEY_MAX_ITEMS = "maxItems" - KEY_UNIQUE_ITEMS = "uniqueItems" - KEY_ENUM = "enum" - KEY_ONE_OF = "oneOf" - KEY_ANY_OF = "anyOf" - KEY_ALL_OF = "allOf" - KEY_NOT = "not" -) - -type subSchema struct { - - // basic subSchema meta properties - id *string - title *string - description *string - - property string - - // Types associated with the subSchema - types jsonSchemaType - - // Reference url - ref *gojsonreference.JsonReference - // Schema referenced - refSchema *subSchema - // Json reference - subSchema *gojsonreference.JsonReference - - // hierarchy - parent *subSchema - definitions map[string]*subSchema - definitionsChildren []*subSchema - itemsChildren []*subSchema - itemsChildrenIsSingleSchema bool - propertiesChildren []*subSchema - - // validation : number / integer - multipleOf *float64 - maximum *float64 - exclusiveMaximum bool - minimum *float64 - exclusiveMinimum bool - - // validation : string - minLength *int - maxLength *int - minNumeric *int - minSpecial *int - multiCase bool - disableSequential bool - pattern *regexp.Regexp - format string - - // validation : object - minProperties *int - maxProperties *int - required []string - - dependencies map[string]interface{} - additionalProperties interface{} - patternProperties map[string]*subSchema - - // validation : array - minItems *int - maxItems *int - uniqueItems bool - - additionalItems interface{} - - // validation : all - enum []string - - // validation : subSchema - oneOf []*subSchema - anyOf []*subSchema - allOf []*subSchema - not *subSchema -} - -func (s *subSchema) AddEnum(i interface{}) error { - - is, err := marshalToJsonString(i) - if err != nil { - return err - } - - if isStringInSlice(s.enum, *is) { - return errors.New(formatErrorDescription( - Locale.KeyItemsMustBeUnique(), - ErrorDetails{"key": KEY_ENUM}, - )) - } - - s.enum = append(s.enum, *is) - - return nil -} - -func (s *subSchema) ContainsEnum(i interface{}) (bool, error) { - - is, err := marshalToJsonString(i) - if err != nil { - return false, err - } - - return isStringInSlice(s.enum, *is), nil -} - -func (s *subSchema) AddOneOf(subSchema *subSchema) { - s.oneOf = append(s.oneOf, subSchema) -} - -func (s *subSchema) AddAllOf(subSchema *subSchema) { - s.allOf = append(s.allOf, subSchema) -} - -func (s *subSchema) AddAnyOf(subSchema *subSchema) { - s.anyOf = append(s.anyOf, subSchema) -} - -func (s *subSchema) SetNot(subSchema *subSchema) { - s.not = subSchema -} - -func (s *subSchema) AddRequired(value string) error { - - if isStringInSlice(s.required, value) { - return errors.New(formatErrorDescription( - Locale.KeyItemsMustBeUnique(), - ErrorDetails{"key": KEY_REQUIRED}, - )) - } - - s.required = append(s.required, value) - - return nil -} - -func (s *subSchema) AddDefinitionChild(child *subSchema) { - s.definitionsChildren = append(s.definitionsChildren, child) -} - -func (s *subSchema) AddItemsChild(child *subSchema) { - s.itemsChildren = append(s.itemsChildren, child) -} - -func (s *subSchema) AddPropertiesChild(child *subSchema) { - s.propertiesChildren = append(s.propertiesChildren, child) -} - -func (s *subSchema) PatternPropertiesString() string { - - if s.patternProperties == nil || len(s.patternProperties) == 0 { - return STRING_UNDEFINED // should never happen - } - - patternPropertiesKeySlice := []string{} - for pk := range s.patternProperties { - patternPropertiesKeySlice = append(patternPropertiesKeySlice, `"`+pk+`"`) - } - - if len(patternPropertiesKeySlice) == 1 { - return patternPropertiesKeySlice[0] - } - - return "[" + strings.Join(patternPropertiesKeySlice, ",") + "]" - -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/types.go b/vendor/github.com/TykTechnologies/gojsonschema/types.go deleted file mode 100644 index 952d22ef65e..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/types.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Contains const types for schema and JSON. -// -// created 28-02-2013 - -package gojsonschema - -const ( - TYPE_ARRAY = `array` - TYPE_BOOLEAN = `boolean` - TYPE_INTEGER = `integer` - TYPE_NUMBER = `number` - TYPE_NULL = `null` - TYPE_OBJECT = `object` - TYPE_STRING = `string` -) - -var JSON_TYPES []string -var SCHEMA_TYPES []string - -func init() { - JSON_TYPES = []string{ - TYPE_ARRAY, - TYPE_BOOLEAN, - TYPE_INTEGER, - TYPE_NUMBER, - TYPE_NULL, - TYPE_OBJECT, - TYPE_STRING} - - SCHEMA_TYPES = []string{ - TYPE_ARRAY, - TYPE_BOOLEAN, - TYPE_INTEGER, - TYPE_NUMBER, - TYPE_OBJECT, - TYPE_STRING} -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/utils.go b/vendor/github.com/TykTechnologies/gojsonschema/utils.go deleted file mode 100644 index 26cf75ebf79..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/utils.go +++ /dev/null @@ -1,208 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Various utility functions. -// -// created 26-02-2013 - -package gojsonschema - -import ( - "encoding/json" - "fmt" - "math" - "reflect" - "strconv" -) - -func isKind(what interface{}, kind reflect.Kind) bool { - target := what - if isJsonNumber(what) { - // JSON Numbers are strings! - target = *mustBeNumber(what) - } - return reflect.ValueOf(target).Kind() == kind -} - -func existsMapKey(m map[string]interface{}, k string) bool { - _, ok := m[k] - return ok -} - -func isStringInSlice(s []string, what string) bool { - for i := range s { - if s[i] == what { - return true - } - } - return false -} - -func marshalToJsonString(value interface{}) (*string, error) { - - mBytes, err := json.Marshal(value) - if err != nil { - return nil, err - } - - sBytes := string(mBytes) - return &sBytes, nil -} - -func isJsonNumber(what interface{}) bool { - - switch what.(type) { - - case json.Number: - return true - } - - return false -} - -func checkJsonNumber(what interface{}) (isValidFloat64 bool, isValidInt64 bool, isValidInt32 bool) { - - jsonNumber := what.(json.Number) - - f64, errFloat64 := jsonNumber.Float64() - s64 := strconv.FormatFloat(f64, 'f', -1, 64) - _, errInt64 := strconv.ParseInt(s64, 10, 64) - - isValidFloat64 = errFloat64 == nil - isValidInt64 = errInt64 == nil - - _, errInt32 := strconv.ParseInt(s64, 10, 32) - isValidInt32 = isValidInt64 && errInt32 == nil - - return - -} - -// same as ECMA Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER -const ( - max_json_float = float64(1<<53 - 1) // 9007199254740991.0 2^53 - 1 - min_json_float = -float64(1<<53 - 1) //-9007199254740991.0 -2^53 - 1 -) - -func isFloat64AnInteger(f float64) bool { - - if math.IsNaN(f) || math.IsInf(f, 0) || f < min_json_float || f > max_json_float { - return false - } - - return f == float64(int64(f)) || f == float64(uint64(f)) -} - -func mustBeInteger(what interface{}) *int { - - if isJsonNumber(what) { - - number := what.(json.Number) - - _, _, isValidInt32 := checkJsonNumber(number) - - if isValidInt32 { - - int64Value, err := number.Int64() - if err != nil { - return nil - } - - int32Value := int(int64Value) - return &int32Value - - } else { - return nil - } - - } - - return nil -} - -func mustBeNumber(what interface{}) *float64 { - - if isJsonNumber(what) { - - number := what.(json.Number) - float64Value, err := number.Float64() - - if err == nil { - return &float64Value - } else { - return nil - } - - } - - return nil - -} - -// formats a number so that it is displayed as the smallest string possible -func resultErrorFormatJsonNumber(n json.Number) string { - - if int64Value, err := n.Int64(); err == nil { - return fmt.Sprintf("%d", int64Value) - } - - float64Value, _ := n.Float64() - - return fmt.Sprintf("%g", float64Value) -} - -// formats a number so that it is displayed as the smallest string possible -func resultErrorFormatNumber(n float64) string { - - if isFloat64AnInteger(n) { - return fmt.Sprintf("%d", int64(n)) - } - - return fmt.Sprintf("%g", n) -} - -func convertDocumentNode(val interface{}) interface{} { - - if lval, ok := val.([]interface{}); ok { - - res := []interface{}{} - for _, v := range lval { - res = append(res, convertDocumentNode(v)) - } - - return res - - } - - if mval, ok := val.(map[interface{}]interface{}); ok { - - res := map[string]interface{}{} - - for k, v := range mval { - res[k.(string)] = convertDocumentNode(v) - } - - return res - - } - - return val -} diff --git a/vendor/github.com/TykTechnologies/gojsonschema/validation.go b/vendor/github.com/TykTechnologies/gojsonschema/validation.go deleted file mode 100644 index 597e0a1c7dc..00000000000 --- a/vendor/github.com/TykTechnologies/gojsonschema/validation.go +++ /dev/null @@ -1,901 +0,0 @@ -// Copyright 2015 xeipuuv ( https://github.com/xeipuuv ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author xeipuuv -// author-github https://github.com/xeipuuv -// author-mail xeipuuv@gmail.com -// -// repository-name gojsonschema -// repository-desc An implementation of JSON Schema, based on IETF's draft v4 - Go language. -// -// description Extends Schema and subSchema, implements the validation phase. -// -// created 28-02-2013 - -package gojsonschema - -import ( - "encoding/json" - "reflect" - "regexp" - "strconv" - "strings" - "unicode/utf8" -) - -func Validate(ls JSONLoader, ld JSONLoader) (*Result, error) { - - var err error - - // load schema - - schema, err := NewSchema(ls) - if err != nil { - return nil, err - } - - // begine validation - - return schema.Validate(ld) - -} - -func (v *Schema) Validate(l JSONLoader) (*Result, error) { - - // load document - - root, err := l.LoadJSON() - if err != nil { - return nil, err - } - - // begin validation - - result := &Result{} - context := newJsonContext(STRING_CONTEXT_ROOT, nil) - v.rootSchema.validateRecursive(v.rootSchema, root, result, context) - - return result, nil - -} - -func (v *subSchema) subValidateWithContext(document interface{}, context *jsonContext) *Result { - result := &Result{} - v.validateRecursive(v, document, result, context) - return result -} - -// Walker function to validate the json recursively against the subSchema -func (v *subSchema) validateRecursive(currentSubSchema *subSchema, currentNode interface{}, result *Result, context *jsonContext) { - - if internalLogEnabled { - internalLog("validateRecursive %s", context.String()) - internalLog(" %v", currentNode) - } - - // Handle referenced schemas, returns directly when a $ref is found - if currentSubSchema.refSchema != nil { - v.validateRecursive(currentSubSchema.refSchema, currentNode, result, context) - return - } - - // Check for null value - if currentNode == nil { - if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_NULL) { - result.addError( - new(InvalidTypeError), - context, - currentNode, - ErrorDetails{ - "expected": currentSubSchema.types.String(), - "given": TYPE_NULL, - }, - ) - return - } - - currentSubSchema.validateSchema(currentSubSchema, currentNode, result, context) - v.validateCommon(currentSubSchema, currentNode, result, context) - - } else { // Not a null value - - if isJsonNumber(currentNode) { - - value := currentNode.(json.Number) - - _, isValidInt64, _ := checkJsonNumber(value) - - validType := currentSubSchema.types.Contains(TYPE_NUMBER) || (isValidInt64 && currentSubSchema.types.Contains(TYPE_INTEGER)) - - if currentSubSchema.types.IsTyped() && !validType { - - givenType := TYPE_INTEGER - if !isValidInt64 { - givenType = TYPE_NUMBER - } - - result.addError( - new(InvalidTypeError), - context, - currentNode, - ErrorDetails{ - "expected": currentSubSchema.types.String(), - "given": givenType, - }, - ) - return - } - - currentSubSchema.validateSchema(currentSubSchema, value, result, context) - v.validateNumber(currentSubSchema, value, result, context) - v.validateCommon(currentSubSchema, value, result, context) - v.validateString(currentSubSchema, value, result, context) - - } else { - - rValue := reflect.ValueOf(currentNode) - rKind := rValue.Kind() - - switch rKind { - - // Slice => JSON array - - case reflect.Slice: - - if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_ARRAY) { - result.addError( - new(InvalidTypeError), - context, - currentNode, - ErrorDetails{ - "expected": currentSubSchema.types.String(), - "given": TYPE_ARRAY, - }, - ) - return - } - - castCurrentNode := currentNode.([]interface{}) - - currentSubSchema.validateSchema(currentSubSchema, castCurrentNode, result, context) - - v.validateArray(currentSubSchema, castCurrentNode, result, context) - v.validateCommon(currentSubSchema, castCurrentNode, result, context) - - // Map => JSON object - - case reflect.Map: - if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_OBJECT) { - result.addError( - new(InvalidTypeError), - context, - currentNode, - ErrorDetails{ - "expected": currentSubSchema.types.String(), - "given": TYPE_OBJECT, - }, - ) - return - } - - castCurrentNode, ok := currentNode.(map[string]interface{}) - if !ok { - castCurrentNode = convertDocumentNode(currentNode).(map[string]interface{}) - } - - currentSubSchema.validateSchema(currentSubSchema, castCurrentNode, result, context) - - v.validateObject(currentSubSchema, castCurrentNode, result, context) - v.validateCommon(currentSubSchema, castCurrentNode, result, context) - - for _, pSchema := range currentSubSchema.propertiesChildren { - nextNode, ok := castCurrentNode[pSchema.property] - if ok { - subContext := newJsonContext(pSchema.property, context) - v.validateRecursive(pSchema, nextNode, result, subContext) - } - } - - // Simple JSON values : string, number, boolean - - case reflect.Bool: - - if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_BOOLEAN) { - result.addError( - new(InvalidTypeError), - context, - currentNode, - ErrorDetails{ - "expected": currentSubSchema.types.String(), - "given": TYPE_BOOLEAN, - }, - ) - return - } - - value := currentNode.(bool) - - currentSubSchema.validateSchema(currentSubSchema, value, result, context) - v.validateNumber(currentSubSchema, value, result, context) - v.validateCommon(currentSubSchema, value, result, context) - v.validateString(currentSubSchema, value, result, context) - - case reflect.String: - - if currentSubSchema.types.IsTyped() && !currentSubSchema.types.Contains(TYPE_STRING) { - result.addError( - new(InvalidTypeError), - context, - currentNode, - ErrorDetails{ - "expected": currentSubSchema.types.String(), - "given": TYPE_STRING, - }, - ) - return - } - - value := currentNode.(string) - - currentSubSchema.validateSchema(currentSubSchema, value, result, context) - v.validateNumber(currentSubSchema, value, result, context) - v.validateCommon(currentSubSchema, value, result, context) - v.validateString(currentSubSchema, value, result, context) - - } - - } - - } - - result.incrementScore() -} - -// Different kinds of validation there, subSchema / common / array / object / string... -func (v *subSchema) validateSchema(currentSubSchema *subSchema, currentNode interface{}, result *Result, context *jsonContext) { - - if internalLogEnabled { - internalLog("validateSchema %s", context.String()) - internalLog(" %v", currentNode) - } - - if len(currentSubSchema.anyOf) > 0 { - - validatedAnyOf := false - var bestValidationResult *Result - - for _, anyOfSchema := range currentSubSchema.anyOf { - if !validatedAnyOf { - validationResult := anyOfSchema.subValidateWithContext(currentNode, context) - validatedAnyOf = validationResult.Valid() - - if !validatedAnyOf && (bestValidationResult == nil || validationResult.score > bestValidationResult.score) { - bestValidationResult = validationResult - } - } - } - if !validatedAnyOf { - - result.addError(new(NumberAnyOfError), context, currentNode, ErrorDetails{}) - - if bestValidationResult != nil { - // add error messages of closest matching subSchema as - // that's probably the one the user was trying to match - result.mergeErrors(bestValidationResult) - } - } - } - - if len(currentSubSchema.oneOf) > 0 { - - nbValidated := 0 - var bestValidationResult *Result - - for _, oneOfSchema := range currentSubSchema.oneOf { - validationResult := oneOfSchema.subValidateWithContext(currentNode, context) - if validationResult.Valid() { - nbValidated++ - } else if nbValidated == 0 && (bestValidationResult == nil || validationResult.score > bestValidationResult.score) { - bestValidationResult = validationResult - } - } - - if nbValidated != 1 { - - result.addError(new(NumberOneOfError), context, currentNode, ErrorDetails{}) - - if nbValidated == 0 { - // add error messages of closest matching subSchema as - // that's probably the one the user was trying to match - result.mergeErrors(bestValidationResult) - } - } - - } - - if len(currentSubSchema.allOf) > 0 { - nbValidated := 0 - - for _, allOfSchema := range currentSubSchema.allOf { - validationResult := allOfSchema.subValidateWithContext(currentNode, context) - if validationResult.Valid() { - nbValidated++ - } - result.mergeErrors(validationResult) - } - - if nbValidated != len(currentSubSchema.allOf) { - result.addError(new(NumberAllOfError), context, currentNode, ErrorDetails{}) - } - } - - if currentSubSchema.not != nil { - validationResult := currentSubSchema.not.subValidateWithContext(currentNode, context) - if validationResult.Valid() { - result.addError(new(NumberNotError), context, currentNode, ErrorDetails{}) - } - } - - if currentSubSchema.dependencies != nil && len(currentSubSchema.dependencies) > 0 { - if isKind(currentNode, reflect.Map) { - for elementKey := range currentNode.(map[string]interface{}) { - if dependency, ok := currentSubSchema.dependencies[elementKey]; ok { - switch dependency := dependency.(type) { - - case []string: - for _, dependOnKey := range dependency { - if _, dependencyResolved := currentNode.(map[string]interface{})[dependOnKey]; !dependencyResolved { - result.addError( - new(MissingDependencyError), - context, - currentNode, - ErrorDetails{"dependency": dependOnKey}, - ) - } - } - - case *subSchema: - dependency.validateRecursive(dependency, currentNode, result, context) - - } - } - } - } - } - - result.incrementScore() -} - -func (v *subSchema) validateCommon(currentSubSchema *subSchema, value interface{}, result *Result, context *jsonContext) { - - if internalLogEnabled { - internalLog("validateCommon %s", context.String()) - internalLog(" %v", value) - } - - // enum: - if len(currentSubSchema.enum) > 0 { - has, err := currentSubSchema.ContainsEnum(value) - if err != nil { - result.addError(new(InternalError), context, value, ErrorDetails{"error": err}) - } - if !has { - result.addError( - new(EnumError), - context, - value, - ErrorDetails{ - "allowed": strings.Join(currentSubSchema.enum, ", "), - }, - ) - } - } - - result.incrementScore() -} - -func (v *subSchema) validateArray(currentSubSchema *subSchema, value []interface{}, result *Result, context *jsonContext) { - - if internalLogEnabled { - internalLog("validateArray %s", context.String()) - internalLog(" %v", value) - } - - nbValues := len(value) - - // TODO explain - if currentSubSchema.itemsChildrenIsSingleSchema { - for i := range value { - subContext := newJsonContext(strconv.Itoa(i), context) - validationResult := currentSubSchema.itemsChildren[0].subValidateWithContext(value[i], subContext) - result.mergeErrors(validationResult) - } - } else { - if currentSubSchema.itemsChildren != nil && len(currentSubSchema.itemsChildren) > 0 { - - nbItems := len(currentSubSchema.itemsChildren) - - // while we have both schemas and values, check them against each other - for i := 0; i != nbItems && i != nbValues; i++ { - subContext := newJsonContext(strconv.Itoa(i), context) - validationResult := currentSubSchema.itemsChildren[i].subValidateWithContext(value[i], subContext) - result.mergeErrors(validationResult) - } - - if nbItems < nbValues { - // we have less schemas than elements in the instance array, - // but that might be ok if "additionalItems" is specified. - - switch currentSubSchema.additionalItems.(type) { - case bool: - if !currentSubSchema.additionalItems.(bool) { - result.addError(new(ArrayNoAdditionalItemsError), context, value, ErrorDetails{}) - } - case *subSchema: - additionalItemSchema := currentSubSchema.additionalItems.(*subSchema) - for i := nbItems; i != nbValues; i++ { - subContext := newJsonContext(strconv.Itoa(i), context) - validationResult := additionalItemSchema.subValidateWithContext(value[i], subContext) - result.mergeErrors(validationResult) - } - } - } - } - } - - // minItems & maxItems - if currentSubSchema.minItems != nil { - if nbValues < int(*currentSubSchema.minItems) { - result.addError( - new(ArrayMinItemsError), - context, - value, - ErrorDetails{"min": *currentSubSchema.minItems}, - ) - } - } - if currentSubSchema.maxItems != nil { - if nbValues > int(*currentSubSchema.maxItems) { - result.addError( - new(ArrayMaxItemsError), - context, - value, - ErrorDetails{"max": *currentSubSchema.maxItems}, - ) - } - } - - // uniqueItems: - if currentSubSchema.uniqueItems { - var stringifiedItems []string - for _, v := range value { - vString, err := marshalToJsonString(v) - if err != nil { - result.addError(new(InternalError), context, value, ErrorDetails{"err": err}) - } - if isStringInSlice(stringifiedItems, *vString) { - result.addError( - new(ItemsMustBeUniqueError), - context, - value, - ErrorDetails{"type": TYPE_ARRAY}, - ) - } - stringifiedItems = append(stringifiedItems, *vString) - } - } - - result.incrementScore() -} - -func (v *subSchema) validateObject(currentSubSchema *subSchema, value map[string]interface{}, result *Result, context *jsonContext) { - - if internalLogEnabled { - internalLog("validateObject %s", context.String()) - internalLog(" %v", value) - } - - // minProperties & maxProperties: - if currentSubSchema.minProperties != nil { - if len(value) < int(*currentSubSchema.minProperties) { - result.addError( - new(ArrayMinPropertiesError), - context, - value, - ErrorDetails{"min": *currentSubSchema.minProperties}, - ) - } - } - if currentSubSchema.maxProperties != nil { - if len(value) > int(*currentSubSchema.maxProperties) { - result.addError( - new(ArrayMaxPropertiesError), - context, - value, - ErrorDetails{"max": *currentSubSchema.maxProperties}, - ) - } - } - - // required: - for _, requiredProperty := range currentSubSchema.required { - _, ok := value[requiredProperty] - if ok { - result.incrementScore() - } else { - result.addError( - new(RequiredError), - context, - value, - ErrorDetails{"property": requiredProperty}, - ) - } - } - - // additionalProperty & patternProperty: - if currentSubSchema.additionalProperties != nil { - - switch currentSubSchema.additionalProperties.(type) { - case bool: - - if !currentSubSchema.additionalProperties.(bool) { - - for pk := range value { - - found := false - for _, spValue := range currentSubSchema.propertiesChildren { - if pk == spValue.property { - found = true - } - } - - pp_has, pp_match := v.validatePatternProperty(currentSubSchema, pk, value[pk], result, context) - - if found { - - if pp_has && !pp_match { - result.addError( - new(AdditionalPropertyNotAllowedError), - context, - value, - ErrorDetails{"property": pk}, - ) - } - - } else { - - if !pp_has || !pp_match { - result.addError( - new(AdditionalPropertyNotAllowedError), - context, - value, - ErrorDetails{"property": pk}, - ) - } - - } - } - } - - case *subSchema: - - additionalPropertiesSchema := currentSubSchema.additionalProperties.(*subSchema) - for pk := range value { - - found := false - for _, spValue := range currentSubSchema.propertiesChildren { - if pk == spValue.property { - found = true - } - } - - pp_has, pp_match := v.validatePatternProperty(currentSubSchema, pk, value[pk], result, context) - - if found { - - if pp_has && !pp_match { - validationResult := additionalPropertiesSchema.subValidateWithContext(value[pk], context) - result.mergeErrors(validationResult) - } - - } else { - - if !pp_has || !pp_match { - validationResult := additionalPropertiesSchema.subValidateWithContext(value[pk], context) - result.mergeErrors(validationResult) - } - - } - - } - } - } else { - - for pk := range value { - - pp_has, pp_match := v.validatePatternProperty(currentSubSchema, pk, value[pk], result, context) - - if pp_has && !pp_match { - - result.addError( - new(InvalidPropertyPatternError), - context, - value, - ErrorDetails{ - "property": pk, - "pattern": currentSubSchema.PatternPropertiesString(), - }, - ) - } - - } - } - - result.incrementScore() -} - -func (v *subSchema) validatePatternProperty(currentSubSchema *subSchema, key string, value interface{}, result *Result, context *jsonContext) (has bool, matched bool) { - - if internalLogEnabled { - internalLog("validatePatternProperty %s", context.String()) - internalLog(" %s %v", key, value) - } - - has = false - - validatedkey := false - - for pk, pv := range currentSubSchema.patternProperties { - if matches, _ := regexp.MatchString(pk, key); matches { - has = true - subContext := newJsonContext(key, context) - validationResult := pv.subValidateWithContext(value, subContext) - result.mergeErrors(validationResult) - if validationResult.Valid() { - validatedkey = true - } - } - } - - if !validatedkey { - return has, false - } - - result.incrementScore() - - return has, true -} - -func (v *subSchema) validateString(currentSubSchema *subSchema, value interface{}, result *Result, context *jsonContext) { - - // Ignore JSON numbers - if isJsonNumber(value) { - return - } - - // Ignore non strings - if !isKind(value, reflect.String) { - return - } - - if internalLogEnabled { - internalLog("validateString %s", context.String()) - internalLog(" %v", value) - } - - stringValue := value.(string) - - // minLength & maxLength: - if currentSubSchema.minLength != nil { - if utf8.RuneCount([]byte(stringValue)) < int(*currentSubSchema.minLength) { - result.addError( - new(StringLengthGTEError), - context, - value, - ErrorDetails{"min": *currentSubSchema.minLength}, - ) - } - } - if currentSubSchema.maxLength != nil { - if utf8.RuneCount([]byte(stringValue)) > int(*currentSubSchema.maxLength) { - result.addError( - new(StringLengthLTEError), - context, - value, - ErrorDetails{"max": *currentSubSchema.maxLength}, - ) - } - } - - if currentSubSchema.minNumeric != nil { - re := regexp.MustCompile(`\pN`) - - if len(re.FindAllString(stringValue, -1)) < int(*currentSubSchema.minNumeric) { - result.addError( - new(StringNumericGTEError), - context, - value, - ErrorDetails{"min_numeric": *currentSubSchema.minNumeric}, - ) - } - } - - if currentSubSchema.minSpecial != nil { - re := regexp.MustCompile(`[^\pN\pL]`) - - if len(re.FindAllString(stringValue, -1)) < int(*currentSubSchema.minSpecial) { - result.addError( - new(StringSpecialGTEError), - context, - value, - ErrorDetails{"min_special": *currentSubSchema.minSpecial}, - ) - } - } - - if currentSubSchema.multiCase { - // See http://www.regular-expressions.info/unicode.html on unicode regexp docs - reL := regexp.MustCompile(`[\p{Ll}\pN]`) - reU := regexp.MustCompile(`\p{Lu}`) - - if len(reL.FindAllString(stringValue, -1)) == 0 || len(reU.FindAllString(stringValue, -1)) == 0 { - result.addError( - new(StringMultiCaseError), - context, - value, - ErrorDetails{"multi_case": currentSubSchema.multiCase}, - ) - } - } - - if currentSubSchema.disableSequential { - re := regexp.MustCompile("(?i)(abc|bcd|cde|def|efg|fgh|ghi|hij|ijk|jkl|klm|lmn|mno|nop|opq|pqr|qrs|rst|stu|tuv|uvw|vwx|wxy|xyz|012|123|234|345|456|567|678|789)") - - var seq []string - - for i := range stringValue { - if i < 2 { - continue - } - - if stringValue[i-2] == stringValue[i-1] && stringValue[i-1] == stringValue[i] { - seq = append(seq, stringValue[i-2:i]) - } - } - - m := re.FindAllString(stringValue, -1) - allM := append(seq, m...) - - if len(allM) > 0 { - result.addError( - new(StringSequentialError), - context, - value, - ErrorDetails{"sequential_chars": strings.Join(allM, ", ")}, - ) - } - } - - // pattern: - if currentSubSchema.pattern != nil { - if !currentSubSchema.pattern.MatchString(stringValue) { - result.addError( - new(DoesNotMatchPatternError), - context, - value, - ErrorDetails{"pattern": currentSubSchema.pattern}, - ) - - } - } - - // format - if currentSubSchema.format != "" { - if !FormatCheckers.IsFormat(currentSubSchema.format, stringValue) { - result.addError( - new(DoesNotMatchFormatError), - context, - value, - ErrorDetails{"format": currentSubSchema.format}, - ) - } - } - - result.incrementScore() -} - -func (v *subSchema) validateNumber(currentSubSchema *subSchema, value interface{}, result *Result, context *jsonContext) { - - // Ignore non numbers - if !isJsonNumber(value) { - return - } - - if internalLogEnabled { - internalLog("validateNumber %s", context.String()) - internalLog(" %v", value) - } - - number := value.(json.Number) - float64Value, _ := number.Float64() - - // multipleOf: - if currentSubSchema.multipleOf != nil { - - if !isFloat64AnInteger(float64Value / *currentSubSchema.multipleOf) { - result.addError( - new(MultipleOfError), - context, - resultErrorFormatJsonNumber(number), - ErrorDetails{"multiple": *currentSubSchema.multipleOf}, - ) - } - } - - //maximum & exclusiveMaximum: - if currentSubSchema.maximum != nil { - if currentSubSchema.exclusiveMaximum { - if float64Value >= *currentSubSchema.maximum { - result.addError( - new(NumberLTError), - context, - resultErrorFormatJsonNumber(number), - ErrorDetails{ - "max": resultErrorFormatNumber(*currentSubSchema.maximum), - }, - ) - } - } else { - if float64Value > *currentSubSchema.maximum { - result.addError( - new(NumberLTEError), - context, - resultErrorFormatJsonNumber(number), - ErrorDetails{ - "max": resultErrorFormatNumber(*currentSubSchema.maximum), - }, - ) - } - } - } - - //minimum & exclusiveMinimum: - if currentSubSchema.minimum != nil { - if currentSubSchema.exclusiveMinimum { - if float64Value <= *currentSubSchema.minimum { - result.addError( - new(NumberGTError), - context, - resultErrorFormatJsonNumber(number), - ErrorDetails{ - "min": resultErrorFormatNumber(*currentSubSchema.minimum), - }, - ) - } - } else { - if float64Value < *currentSubSchema.minimum { - result.addError( - new(NumberGTEError), - context, - resultErrorFormatJsonNumber(number), - ErrorDetails{ - "min": resultErrorFormatNumber(*currentSubSchema.minimum), - }, - ) - } - } - } - - result.incrementScore() -} diff --git a/vendor/github.com/TykTechnologies/gorpc/.gitignore b/vendor/github.com/TykTechnologies/gorpc/.gitignore deleted file mode 100644 index c93cf804d2e..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -tags -*.pprof -*.test diff --git a/vendor/github.com/TykTechnologies/gorpc/LICENSE b/vendor/github.com/TykTechnologies/gorpc/LICENSE deleted file mode 100644 index a43e7ad1c92..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Aliaksandr Valialkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/vendor/github.com/TykTechnologies/gorpc/Makefile b/vendor/github.com/TykTechnologies/gorpc/Makefile deleted file mode 100644 index a59dca751c5..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -test: - GOMAXPROCS=1 go test - GOMAXPROCS=2 go test - GOMAXPROCS=4 go test - GOMAXPROCS=8 go test - -test-386: - GOARCH=386 GOMAXPROCS=1 go test - GOARCH=386 GOMAXPROCS=2 go test - GOARCH=386 GOMAXPROCS=4 go test - GOARCH=386 GOMAXPROCS=8 go test - -bench-1-goprocs: - GOMAXPROCS=1 go test -test.bench=".*" - -bench-2-goprocs: - GOMAXPROCS=2 go test -test.bench=".*" - -bench-4-goprocs: - GOMAXPROCS=4 go test -test.bench=".*" - -bench-8-goprocs: - GOMAXPROCS=8 go test -test.bench=".*" - diff --git a/vendor/github.com/TykTechnologies/gorpc/README.md b/vendor/github.com/TykTechnologies/gorpc/README.md deleted file mode 100644 index 0b3d7e5f68b..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/README.md +++ /dev/null @@ -1,121 +0,0 @@ -gorpc -===== - -Simple, fast and scalable golang RPC library for high load. - - -Gorpc provides the following features useful for highly loaded projects -with RPC: - -* It minimizes the number of connect() syscalls by pipelining request - and response messages over a single connection. - -* It minimizes the number of send() syscalls by packing as much - as possible pending requests and responses into a single compressed buffer - before passing it into send() syscall. - -* It minimizes the number of recv() syscalls by reading and buffering as much - as possible data from the network. - -* It supports RPC batching, which allows preparing multiple requests and sending - them to the server in a single batch. - -These features help the OS minimizing overhead (CPU load, the number of -TCP connections in TIME_WAIT and CLOSE_WAIT states, the number of network -packets and the amount of network bandwidth) required for RPC processing under -high load. - - -Gorpc additionally provides the following features missing -in [net/rpc](http://golang.org/pkg/net/rpc/): - -* Client automatically manages connections and automatically reconnects - to the server on connection errors. -* Client supports response timeouts out of the box. -* Client supports RPC batching out of the box. -* Client detects stuck servers and immediately returns error to the caller. -* Client supports fast message passing to the Server, i.e. requests - without responses. -* Both Client and Server provide network stats and RPC stats out of the box. -* Commonly used RPC transports such as TCP, TLS and unix socket are available - out of the box. -* RPC transport compression is provided out of the box. -* Server provides graceful shutdown out of the box. -* Server supports RPC handlers' councurrency throttling out of the box. -* Server may pass client address to RPC handlers. -* Server gracefully handles panic in RPC handlers. -* Dispatcher accepts functions as RPC handlers. -* Dispatcher supports registering multiple receiver objects of the same type - under distinct names. -* Dispatcher supports RPC handlers with zero, one (request) or two (client - address and request) arguments and zero, one (either response or error) - or two (response, error) return values. - - -Dispatcher API provided by gorpc allows easily converting usual functions -and/or struct methods into RPC versions on both client and server sides. -See [Dispatcher examples](http://godoc.org/github.com/valyala/gorpc#Dispatcher) -for more details. - - -By default TCP connections are used as underlying gorpc transport. -But it is possible using arbitrary underlying transport - just provide custom -implementations for Client.Dial and Server.Listener. -RPC authentication, authorization and encryption can be easily implemented -via custom underlying transport and/or via OnConnect callbacks. -Currently gorpc provides TCP, TLS and unix socket transport out of the box. - - -Currently gorpc with default settings is successfully used in highly loaded -production environment serving up to 40K qps. Switching from http-based rpc -to gorpc reduced required network bandwidth from 300 Mbit/s to 24 Mbit/s. - - -Docs -==== - -See http://godoc.org/github.com/valyala/gorpc . - - -Usage -===== - -Server: -```go -s := &gorpc.Server{ - // Accept clients on this TCP address. - Addr: ":12345", - - // Echo handler - just return back the message we received from the client - Handler: func(clientAddr string, request interface{}) interface{} { - log.Printf("Obtained request %+v from the client %s\n", request, clientAddr) - return request - }, -} -if err := s.Serve(); err != nil { - log.Fatalf("Cannot start rpc server: %s", err) -} -``` - -Client: -```go -c := &gorpc.Client{ - // TCP address of the server. - Addr: "rpc.server.addr:12345", -} -c.Start() - -resp, err := c.Call("foobar") -if err != nil { - log.Fatalf("Error when sending request to server: %s", err) -} -if resp.(string) != "foobar" { - log.Fatalf("Unexpected response from the server: %+v", resp) -} -``` - -Both client and server collect connection stats - the number of bytes -read / written and the number of calls / errors to send(), recv(), connect() -and accept(). This stats is available at Client.Stats and Server.Stats. - -See tests for more usage examples. diff --git a/vendor/github.com/TykTechnologies/gorpc/TODO b/vendor/github.com/TykTechnologies/gorpc/TODO deleted file mode 100644 index 31eac72237e..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/TODO +++ /dev/null @@ -1,3 +0,0 @@ -- Add support for channel request and response. -- Add support for io.Writer, io.Reader and io.ReadWriter request and response. -- Add HTTP transport via HTTP connection hijacking similar to net/rpc. diff --git a/vendor/github.com/TykTechnologies/gorpc/client.go b/vendor/github.com/TykTechnologies/gorpc/client.go deleted file mode 100644 index cba9e4c7bd8..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/client.go +++ /dev/null @@ -1,718 +0,0 @@ -package gorpc - -import ( - "fmt" - "io" - "net" - "sync" - "time" -) - -// Client implements RPC client. -// -// The client must be started with Client.Start() before use. -// -// It is absolutely safe and encouraged using a single client across arbitrary -// number of concurrently running goroutines. -// -// Default client settings are optimized for high load, so don't override -// them without valid reason. -type Client struct { - // Server address to connect to. - // - // The address format depends on the underlying transport provided - // by Client.Dial. The following transports are provided out of the box: - // * TCP - see NewTCPClient() and NewTCPServer(). - // * TLS - see NewTLSClient() and NewTLSServer(). - // * Unix sockets - see NewUnixClient() and NewUnixServer(). - // - // By default TCP transport is used. - Addr string - - // The number of concurrent connections the client should establish - // to the sever. - // By default only one connection is established. - Conns int - - // The maximum number of pending requests in the queue. - // - // The number of pending requsts should exceed the expected number - // of concurrent goroutines calling client's methods. - // Otherwise a lot of ClientError.Overflow errors may appear. - // - // Default is DefaultPendingMessages. - PendingRequests int - - // Delay between request flushes. - // - // Negative values lead to immediate requests' sending to the server - // without their buffering. This minimizes rpc latency at the cost - // of higher CPU and network usage. - // - // Default value is DefaultFlushDelay. - FlushDelay time.Duration - - // Maximum request time. - // Default value is DefaultRequestTimeout. - RequestTimeout time.Duration - - // Disable data compression. - // By default data compression is enabled. - DisableCompression bool - - // Size of send buffer per each underlying connection in bytes. - // Default value is DefaultBufferSize. - SendBufferSize int - - // Size of recv buffer per each underlying connection in bytes. - // Default value is DefaultBufferSize. - RecvBufferSize int - - // OnConnect is called whenever connection to server is established. - // The callback can be used for authentication/authorization/encryption - // and/or for custom transport wrapping. - // - // See also Dial callback, which can be used for sophisticated - // transport implementation. - OnConnect OnConnectFunc - - // The client calls this callback when it needs new connection - // to the server. - // The client passes Client.Addr into Dial(). - // - // Override this callback if you want custom underlying transport - // and/or authentication/authorization. - // Don't forget overriding Server.Listener accordingly. - // - // See also OnConnect for authentication/authorization purposes. - // - // * NewTLSClient() and NewTLSServer() can be used for encrypted rpc. - // * NewUnixClient() and NewUnixServer() can be used for fast local - // inter-process rpc. - // - // By default it returns TCP connections established to the Client.Addr. - Dial DialFunc - - // LogError is used for error logging. - // - // By default the function set via SetErrorLogger() is used. - LogError LoggerFunc - - // Connection statistics. - // - // The stats doesn't reset automatically. Feel free resetting it - // any time you wish. - Stats ConnStats - - requestsChan chan *AsyncResult - - clientStopChan chan struct{} - stopWg sync.WaitGroup -} - -// Start starts rpc client. Establishes connection to the server on Client.Addr. -// -// All the response types the server may return must be registered -// via gorpc.RegisterType() before starting the client. -// There is no need in registering base Go types such as int, string, bool, -// float64, etc. or arrays, slices and maps containing base Go types. -func (c *Client) Start() { - if c.LogError == nil { - c.LogError = errorLogger - } - if c.clientStopChan != nil { - panic("gorpc.Client: the given client is already started. Call Client.Stop() before calling Client.Start() again!") - } - - if c.PendingRequests <= 0 { - c.PendingRequests = DefaultPendingMessages - } - if c.FlushDelay == 0 { - c.FlushDelay = DefaultFlushDelay - } - if c.RequestTimeout <= 0 { - c.RequestTimeout = DefaultRequestTimeout - } - if c.SendBufferSize <= 0 { - c.SendBufferSize = DefaultBufferSize - } - if c.RecvBufferSize <= 0 { - c.RecvBufferSize = DefaultBufferSize - } - - c.requestsChan = make(chan *AsyncResult, c.PendingRequests) - c.clientStopChan = make(chan struct{}) - - if c.Conns <= 0 { - c.Conns = 1 - } - if c.Dial == nil { - c.Dial = defaultDial - } - - for i := 0; i < c.Conns; i++ { - c.stopWg.Add(1) - go clientHandler(c) - } -} - -// Stop stops rpc client. Stopped client can be started again. -func (c *Client) Stop() { - if c.clientStopChan == nil { - panic("gorpc.Client: the client must be started before stopping it") - } - close(c.clientStopChan) - c.stopWg.Wait() - c.clientStopChan = nil -} - -// Call sends the given request to the server and obtains response -// from the server. -// Returns non-nil error if the response cannot be obtained during -// Client.RequestTimeout or server connection problems occur. -// The returned error can be casted to ClientError. -// -// Request and response types may be arbitrary. All the response types -// the server may return must be registered via gorpc.RegisterType() before -// starting the client. -// There is no need in registering base Go types such as int, string, bool, -// float64, etc. or arrays, slices and maps containing base Go types. -// -// Hint: use Dispatcher for distinct calls' construction. -// -// Don't forget starting the client with Client.Start() before calling Client.Call(). -func (c *Client) Call(request interface{}) (response interface{}, err error) { - return c.CallTimeout(request, c.RequestTimeout) -} - -// CallTimeout sends the given request to the server and obtains response -// from the server. -// Returns non-nil error if the response cannot be obtained during -// the given timeout or server connection problems occur. -// The returned error can be casted to ClientError. -// -// Request and response types may be arbitrary. All the response types -// the server may return must be registered via gorpc.RegisterType() before -// starting the client. -// There is no need in registering base Go types such as int, string, bool, -// float64, etc. or arrays, slices and maps containing base Go types. -// -// Hint: use Dispatcher for distinct calls' construction. -// -// Don't forget starting the client with Client.Start() before calling Client.Call(). -func (c *Client) CallTimeout(request interface{}, timeout time.Duration) (response interface{}, err error) { - var m *AsyncResult - if m, err = c.CallAsync(request); err != nil { - return nil, err - } - - t := acquireTimer(timeout) - - select { - case <-m.Done: - response, err = m.Response, m.Error - case <-t.C: - err = getClientTimeoutError(c, timeout) - } - - releaseTimer(t) - return -} - -func getClientTimeoutError(c *Client, timeout time.Duration) error { - err := fmt.Errorf("gorpc.Client: [%s]. Cannot obtain response during timeout=%s", c.Addr, timeout) - c.LogError("%s", err) - return &ClientError{ - Timeout: true, - err: err, - } -} - -// Send sends the given request to the server and doesn't wait for response. -// -// Since this is 'fire and forget' function, which never waits for response, -// it cannot guarantee that the server receives and successfully processes -// the given request. Though in most cases under normal conditions requests -// should reach the server and it should successfully process them. -// Send semantics is similar to UDP messages' semantics. -// -// The server may return arbitrary response on Send() request, but the response -// is totally ignored. -// -// Don't forget starting the client with Client.Start() before calling Client.Send(). -func (c *Client) Send(request interface{}) error { - _, err := c.callAsync(request, true) - return err -} - -// AsyncResult is a result returned from Client.CallAsync(). -type AsyncResult struct { - // The response can be read only after <-Done unblocks. - Response interface{} - - // The error can be read only after <-Done unblocks. - // The error can be casted to ClientError. - Error error - - // Response and Error become available after <-Done unblocks. - Done <-chan struct{} - - request interface{} - t time.Time - done chan struct{} -} - -// CallAsync starts async rpc call. -// -// Rpc call is complete after <-AsyncResult.Done unblocks. -// If you want canceling the request, just throw away the returned AsyncResult. -// -// CallAsync doesn't respect Client.RequestTimeout - response timeout -// may be controlled by the caller via something like: -// -// r := c.CallAsync("foobar") -// select { -// case <-time.After(c.RequestTimeout): -// log.Printf("rpc timeout!") -// case <-r.Done: -// processResponse(r.Response, r.Error) -// } -// -// Don't forget starting the client with Client.Start() before -// calling Client.CallAsync(). -func (c *Client) CallAsync(request interface{}) (*AsyncResult, error) { - return c.callAsync(request, false) -} - -func (c *Client) callAsync(request interface{}, skipResponse bool) (ar *AsyncResult, err error) { - m := &AsyncResult{ - request: request, - } - if !skipResponse { - m.t = time.Now() - m.done = make(chan struct{}) - m.Done = m.done - } - - select { - case c.requestsChan <- m: - return m, nil - default: - err = fmt.Errorf("gorpc.Client: [%s]. Requests' queue with size=%d is overflown. Try increasing Client.PendingRequests value", c.Addr, cap(c.requestsChan)) - c.LogError("%s", err) - err = &ClientError{ - Overflow: true, - err: err, - } - return nil, err - } -} - -// Batch allows grouping and executing multiple RPCs in a single batch. -// -// Batch may be created via Client.NewBatch(). -type Batch struct { - c *Client - ops []*BatchResult - opsLock sync.Mutex -} - -// BatchResult is a result returned from Batch.Add*(). -type BatchResult struct { - // The response can be read only after Batch.Call*() returns. - Response interface{} - - // The error can be read only after Batch.Call*() returns. - // The error can be casted to ClientError. - Error error - - // <-Done unblocks after Batch.Call*() returns. - // Response and Error become available after <-Done unblocks. - Done <-chan struct{} - - request interface{} - ctx interface{} - done chan struct{} -} - -// NewBatch creates new RPC batch. -// -// It is safe creating multiple concurrent batches from a single client. -// -// Don't forget starting the client with Client.Start() before working -// with batched RPC. -func (c *Client) NewBatch() *Batch { - return &Batch{ - c: c, - } -} - -// Add ads new request to the RPC batch. -// -// The order of batched RPCs execution on the server is unspecified. -// -// All the requests added to the batch are sent to the server at once -// when Batch.Call*() is called. -// -// It is safe adding multiple requests to the same batch from concurrently -// running goroutines. -func (b *Batch) Add(request interface{}) *BatchResult { - return b.add(request, false) -} - -// AddSkipResponse adds new request to the RPC batch and doesn't care -// about the response. -// -// The order of batched RPCs execution on the server is unspecified. -// -// All the requests added to the batch are sent to the server at once -// when Batch.Call*() is called. -// -// It is safe adding multiple requests to the same batch from concurrently -// running goroutines. -func (b *Batch) AddSkipResponse(request interface{}) { - b.add(request, true) -} - -func (b *Batch) add(request interface{}, skipResponse bool) *BatchResult { - br := &BatchResult{ - request: request, - } - if !skipResponse { - br.done = make(chan struct{}) - br.Done = br.done - } - - b.opsLock.Lock() - b.ops = append(b.ops, br) - b.opsLock.Unlock() - - return br -} - -// Call calls all the RPCs added via Batch.Add(). -// -// The order of batched RPCs execution on the server is unspecified. -// -// The caller may read all BatchResult contents returned from Batch.Add() -// after the Call returns. -// -// It is guaranteed that all <-BatchResult.Done channels are unblocked after -// the Call returns. -func (b *Batch) Call() error { - return b.CallTimeout(b.c.RequestTimeout) -} - -// CallTimeout calls all the RPCs added via Batch.Add() and waits for -// all the RPC responses during the given timeout. -// -// The caller may read all BatchResult contents returned from Batch.Add() -// after the CallTimeout returns. -// -// It is guaranteed that all <-BatchResult.Done channels are unblocked after -// the CallTimeout returns. -func (b *Batch) CallTimeout(timeout time.Duration) error { - b.opsLock.Lock() - ops := b.ops - b.ops = nil - b.opsLock.Unlock() - - results := make([]*AsyncResult, len(ops)) - for i := range ops { - op := ops[i] - r, err := callAsyncRetry(b.c, op.request, op.done == nil, 5) - if err != nil { - return err - } - results[i] = r - } - - t := acquireTimer(timeout) - - for i := range results { - r := results[i] - op := ops[i] - if op.done == nil { - continue - } - - select { - case <-r.Done: - op.Response, op.Error = r.Response, r.Error - close(op.done) - case <-t.C: - releaseTimer(t) - err := getClientTimeoutError(b.c, timeout) - for ; i < len(results); i++ { - op = ops[i] - op.Error = err - if op.done != nil { - close(op.done) - } - } - return err - } - } - - releaseTimer(t) - - return nil -} - -func callAsyncRetry(c *Client, request interface{}, skipResponse bool, retriesCount int) (*AsyncResult, error) { - retriesCount++ - for { - ar, err := c.callAsync(request, skipResponse) - if err == nil { - return ar, nil - } - if !err.(*ClientError).Overflow { - return nil, err - } - retriesCount-- - if retriesCount <= 0 { - return nil, err - } - time.Sleep(10 * time.Millisecond) - } -} - -// ClientError is an error Client methods can return. -type ClientError struct { - // Set if the error is timeout-related. - Timeout bool - - // Set if the error is connection-related. - Connection bool - - // Set if the error is server-related. - Server bool - - // Set if the error is related to internal resources' overflow. - // Increase PendingRequests if you see a lot of such errors. - Overflow bool - - err error -} - -func (e *ClientError) Error() string { - return e.err.Error() -} - -func clientHandler(c *Client) { - defer c.stopWg.Done() - - var conn net.Conn - var err error - - for { - dialChan := make(chan struct{}) - go func() { - if conn, err = c.Dial(c.Addr); err != nil { - c.LogError("gorpc.Client: [%s]. Cannot establish rpc connection: [%s]", c.Addr, err) - time.Sleep(time.Second) - } - close(dialChan) - }() - - select { - case <-c.clientStopChan: - return - case <-dialChan: - c.Stats.incDialCalls() - } - - if err != nil { - c.Stats.incDialErrors() - continue - } - clientHandleConnection(c, conn) - } -} - -func clientHandleConnection(c *Client, conn net.Conn) { - if c.OnConnect != nil { - newConn, _, err := c.OnConnect(conn) - if err != nil { - c.LogError("gorpc.Client: [%s]. OnConnect error: [%s]", c.Addr, err) - conn.Close() - return - } - conn = newConn - } - - var buf [1]byte - if !c.DisableCompression { - buf[0] = 1 - } - _, err := conn.Write(buf[:]) - if err != nil { - c.LogError("gorpc.Client: [%s]. Error when writing handshake to server: [%s]", c.Addr, err) - conn.Close() - return - } - - stopChan := make(chan struct{}) - - pendingRequests := make(map[uint64]*AsyncResult) - var pendingRequestsLock sync.Mutex - - writerDone := make(chan error, 1) - go clientWriter(c, conn, pendingRequests, &pendingRequestsLock, stopChan, writerDone) - - readerDone := make(chan error, 1) - go clientReader(c, conn, pendingRequests, &pendingRequestsLock, readerDone) - - select { - case err = <-writerDone: - close(stopChan) - conn.Close() - <-readerDone - case err = <-readerDone: - close(stopChan) - conn.Close() - <-writerDone - case <-c.clientStopChan: - close(stopChan) - conn.Close() - <-readerDone - <-writerDone - } - - if err != nil { - c.LogError("%s", err) - err = &ClientError{ - Connection: true, - err: err, - } - } - for _, m := range pendingRequests { - m.Error = err - if m.done != nil { - close(m.done) - } - } -} - -func clientWriter(c *Client, w io.Writer, pendingRequests map[uint64]*AsyncResult, pendingRequestsLock *sync.Mutex, stopChan <-chan struct{}, done chan<- error) { - var err error - defer func() { done <- err }() - - e := newMessageEncoder(w, c.SendBufferSize, !c.DisableCompression, &c.Stats) - defer e.Close() - - t := time.NewTimer(c.FlushDelay) - var flushChan <-chan time.Time - var wr wireRequest - var msgID uint64 - for { - var m *AsyncResult - - select { - case m = <-c.requestsChan: - default: - select { - case <-stopChan: - return - case m = <-c.requestsChan: - case <-flushChan: - if err = e.Flush(); err != nil { - err = fmt.Errorf("gorpc.Client: [%s]. Cannot flush requests to underlying stream: [%s]", c.Addr, err) - return - } - flushChan = nil - continue - } - } - - if flushChan == nil { - flushChan = getFlushChan(t, c.FlushDelay) - } - - if m.done == nil { - wr.ID = 0 - } else { - msgID++ - if msgID == 0 { - msgID = 1 - } - pendingRequestsLock.Lock() - n := len(pendingRequests) - for { - if _, ok := pendingRequests[msgID]; !ok { - break - } - msgID++ - } - pendingRequests[msgID] = m - pendingRequestsLock.Unlock() - - if n > 10*c.PendingRequests { - err = fmt.Errorf("gorpc.Client: [%s]. The server didn't return %d responses yet. Closing server connection in order to prevent client resource leaks", c.Addr, n) - return - } - - wr.ID = msgID - } - - wr.Request = m.request - m.request = nil - if err = e.Encode(wr); err != nil { - err = fmt.Errorf("gorpc.Client: [%s]. Cannot send request to wire: [%s]", c.Addr, err) - return - } - wr.Request = nil - } -} - -func clientReader(c *Client, r io.Reader, pendingRequests map[uint64]*AsyncResult, pendingRequestsLock *sync.Mutex, done chan<- error) { - var err error - defer func() { - if r := recover(); r != nil { - if err == nil { - err = fmt.Errorf("gorpc.Client: [%s]. Panic when reading data from server: %v", c.Addr, r) - } - } - done <- err - }() - - d := newMessageDecoder(r, c.RecvBufferSize, !c.DisableCompression, &c.Stats) - defer d.Close() - - var wr wireResponse - for { - if err = d.Decode(&wr); err != nil { - err = fmt.Errorf("gorpc.Client: [%s]. Cannot decode response: [%s]", c.Addr, err) - return - } - - pendingRequestsLock.Lock() - m, ok := pendingRequests[wr.ID] - if ok { - delete(pendingRequests, wr.ID) - } - pendingRequestsLock.Unlock() - - if !ok { - err = fmt.Errorf("gorpc.Client: [%s]. Unexpected msgID=[%d] obtained from server", c.Addr, wr.ID) - return - } - - m.Response = wr.Response - - wr.ID = 0 - wr.Response = nil - if wr.Error != "" { - m.Error = &ClientError{ - Server: true, - err: fmt.Errorf("gorpc.Client: [%s]. Server error: [%s]", c.Addr, wr.Error), - } - wr.Error = "" - } - - close(m.done) - - c.Stats.incRPCCalls() - c.Stats.incRPCTime(uint64(time.Since(m.t).Seconds() * 1000)) - } -} diff --git a/vendor/github.com/TykTechnologies/gorpc/common.go b/vendor/github.com/TykTechnologies/gorpc/common.go deleted file mode 100644 index bb0823b036d..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/common.go +++ /dev/null @@ -1,118 +0,0 @@ -package gorpc - -import ( - "fmt" - "log" - "net" - "sync" - "time" -) - -const ( - // DefaultConcurrency is the default number of concurrent rpc calls - // the server can process. - DefaultConcurrency = 8 * 1024 - - // DefaultRequestTimeout is the default timeout for client request. - DefaultRequestTimeout = 20 * time.Second - - // DefaultPendingMessages is the default number of pending messages - // handled by Client and Server. - DefaultPendingMessages = 32 * 1024 - - // DefaultFlushDelay is the default delay between message flushes - // on Client and Server. - DefaultFlushDelay = -1 - - // DefaultBufferSize is the default size for Client and Server buffers. - DefaultBufferSize = 64 * 1024 -) - -// OnConnectFunc is a callback, which may be called by both Client and Server -// on every connection creation if assigned -// to Client.OnConnect / Server.OnConnect. -// -// remoteAddr is the address of the remote end for the established -// connection rwc. -// -// The callback must return either rwc itself or a rwc wrapper. -// The returned connection wrapper MUST send all the data to the underlying -// rwc on every Write() call, otherwise the connection will hang forever. -// -// The callback may be used for authentication/authorization and/or custom -// transport wrapping. -type OnConnectFunc func(rwc net.Conn) (net.Conn, string, error) - -// LoggerFunc is an error logging function to pass to gorpc.SetErrorLogger(). -type LoggerFunc func(format string, args ...interface{}) - -var errorLogger = LoggerFunc(log.Printf) - -// SetErrorLogger sets the given error logger to use in gorpc. -// -// By default log.Printf is used for error logging. -func SetErrorLogger(f LoggerFunc) { - errorLogger = f -} - -// NilErrorLogger discards all error messages. -// -// Pass NilErrorLogger to SetErrorLogger() in order to suppress error log generated -// by gorpc. -func NilErrorLogger(format string, args ...interface{}) {} - -func logPanic(format string, args ...interface{}) { - errorLogger(format, args...) - s := fmt.Sprintf(format, args...) - panic(s) -} - -var timerPool sync.Pool - -func acquireTimer(timeout time.Duration) *time.Timer { - tv := timerPool.Get() - if tv == nil { - return time.NewTimer(timeout) - } - - t := tv.(*time.Timer) - if t.Reset(timeout) { - panic("BUG: Active timer trapped into acquireTimer()") - } - return t -} - -func releaseTimer(t *time.Timer) { - if !t.Stop() { - // Collect possibly added time from the channel - // if timer has been stopped and nobody collected its' value. - select { - case <-t.C: - default: - } - } - - timerPool.Put(t) -} - -var closedFlushChan = make(chan time.Time) - -func init() { - close(closedFlushChan) -} - -func getFlushChan(t *time.Timer, flushDelay time.Duration) <-chan time.Time { - if flushDelay <= 0 { - return closedFlushChan - } - - if !t.Stop() { - // Exhaust expired timer's chan. - select { - case <-t.C: - default: - } - } - t.Reset(flushDelay) - return t.C -} diff --git a/vendor/github.com/TykTechnologies/gorpc/conn_stats.go b/vendor/github.com/TykTechnologies/gorpc/conn_stats.go deleted file mode 100644 index eadd6166da8..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/conn_stats.go +++ /dev/null @@ -1,125 +0,0 @@ -package gorpc - -import ( - "io" - "sync" - "time" -) - -// ConnStats provides connection statistics. Applied to both gorpc.Client -// and gorpc.Server. -// -// Use stats returned from ConnStats.Snapshot() on live Client and / or Server, -// since the original stats can be updated by concurrently running goroutines. -type ConnStats struct { - // The number of rpc calls performed. - RPCCalls uint64 - - // The total aggregate time for all rpc calls in milliseconds. - // - // This time can be used for calculating the average response time - // per RPC: - // avgRPCTtime = RPCTime / RPCCalls - RPCTime uint64 - - // The number of bytes written to the underlying connections. - BytesWritten uint64 - - // The number of bytes read from the underlying connections. - BytesRead uint64 - - // The number of Read() calls. - ReadCalls uint64 - - // The number of Read() errors. - ReadErrors uint64 - - // The number of Write() calls. - WriteCalls uint64 - - // The number of Write() errors. - WriteErrors uint64 - - // The number of Dial() calls. - DialCalls uint64 - - // The number of Dial() errors. - DialErrors uint64 - - // The number of Accept() calls. - AcceptCalls uint64 - - // The number of Accept() errors. - AcceptErrors uint64 - - // lock is for 386 builds. See https://github.com/valyala/gorpc/issues/5 . - lock sync.Mutex -} - -// AvgRPCTime returns the average RPC execution time. -// -// Use stats returned from ConnStats.Snapshot() on live Client and / or Server, -// since the original stats can be updated by concurrently running goroutines. -func (cs *ConnStats) AvgRPCTime() time.Duration { - return time.Duration(float64(cs.RPCTime)/float64(cs.RPCCalls)) * time.Millisecond -} - -// AvgRPCBytes returns the average bytes sent / received per RPC. -// -// Use stats returned from ConnStats.Snapshot() on live Client and / or Server, -// since the original stats can be updated by concurrently running goroutines. -func (cs *ConnStats) AvgRPCBytes() (send float64, recv float64) { - return float64(cs.BytesWritten) / float64(cs.RPCCalls), float64(cs.BytesRead) / float64(cs.RPCCalls) -} - -// AvgRPCCalls returns the average number of write() / read() syscalls per PRC. -// -// Use stats returned from ConnStats.Snapshot() on live Client and / or Server, -// since the original stats can be updated by concurrently running goroutines. -func (cs *ConnStats) AvgRPCCalls() (write float64, read float64) { - return float64(cs.WriteCalls) / float64(cs.RPCCalls), float64(cs.ReadCalls) / float64(cs.RPCCalls) -} - -type writerCounter struct { - w io.Writer - cs *ConnStats -} - -type readerCounter struct { - r io.Reader - cs *ConnStats -} - -func newWriterCounter(w io.Writer, cs *ConnStats) io.Writer { - return &writerCounter{ - w: w, - cs: cs, - } -} - -func newReaderCounter(r io.Reader, cs *ConnStats) io.Reader { - return &readerCounter{ - r: r, - cs: cs, - } -} - -func (w *writerCounter) Write(p []byte) (int, error) { - n, err := w.w.Write(p) - w.cs.incWriteCalls() - if err != nil { - w.cs.incWriteErrors() - } - w.cs.addBytesWritten(uint64(n)) - return n, err -} - -func (r *readerCounter) Read(p []byte) (int, error) { - n, err := r.r.Read(p) - r.cs.incReadCalls() - if err != nil { - r.cs.incReadErrors() - } - r.cs.addBytesRead(uint64(n)) - return n, err -} diff --git a/vendor/github.com/TykTechnologies/gorpc/conn_stats_386.go b/vendor/github.com/TykTechnologies/gorpc/conn_stats_386.go deleted file mode 100644 index 03c7b4d1730..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/conn_stats_386.go +++ /dev/null @@ -1,113 +0,0 @@ -// Separate implementation for 386, since it has broken support for atomics. -// See https://github.com/valyala/gorpc/issues/5 for details. - -// +build 386 - -package gorpc - -import ( - "sync" -) - -// Snapshot returns connection statistics' snapshot. -// -// Use stats returned from ConnStats.Snapshot() on live Client and / or Server, -// since the original stats can be updated by concurrently running goroutines. -func (cs *ConnStats) Snapshot() *ConnStats { - cs.lock.Lock() - snapshot := *cs - cs.lock.Unlock() - - snapshot.lock = sync.Mutex{} - return &snapshot -} - -// Reset resets all the stats counters. -func (cs *ConnStats) Reset() { - cs.lock.Lock() - cs.RPCCalls = 0 - cs.RPCTime = 0 - cs.BytesWritten = 0 - cs.BytesRead = 0 - cs.WriteCalls = 0 - cs.WriteErrors = 0 - cs.ReadCalls = 0 - cs.ReadErrors = 0 - cs.DialCalls = 0 - cs.DialErrors = 0 - cs.AcceptCalls = 0 - cs.AcceptErrors = 0 - cs.lock.Unlock() -} - -func (cs *ConnStats) incRPCCalls() { - cs.lock.Lock() - cs.RPCCalls++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incRPCTime(dt uint64) { - cs.lock.Lock() - cs.RPCTime += dt - cs.lock.Unlock() -} - -func (cs *ConnStats) addBytesWritten(n uint64) { - cs.lock.Lock() - cs.BytesWritten += n - cs.lock.Unlock() -} - -func (cs *ConnStats) addBytesRead(n uint64) { - cs.lock.Lock() - cs.BytesRead += n - cs.lock.Unlock() -} - -func (cs *ConnStats) incReadCalls() { - cs.lock.Lock() - cs.ReadCalls++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incReadErrors() { - cs.lock.Lock() - cs.ReadErrors++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incWriteCalls() { - cs.lock.Lock() - cs.WriteCalls++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incWriteErrors() { - cs.lock.Lock() - cs.WriteErrors++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incDialCalls() { - cs.lock.Lock() - cs.DialCalls++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incDialErrors() { - cs.lock.Lock() - cs.DialErrors++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incAcceptCalls() { - cs.lock.Lock() - cs.AcceptCalls++ - cs.lock.Unlock() -} - -func (cs *ConnStats) incAcceptErrors() { - cs.lock.Lock() - cs.AcceptErrors++ - cs.lock.Unlock() -} diff --git a/vendor/github.com/TykTechnologies/gorpc/conn_stats_generic.go b/vendor/github.com/TykTechnologies/gorpc/conn_stats_generic.go deleted file mode 100644 index 110010876d8..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/conn_stats_generic.go +++ /dev/null @@ -1,92 +0,0 @@ -// +build !386 - -package gorpc - -import ( - "sync/atomic" -) - -// Snapshot returns connection statistics' snapshot. -// -// Use stats returned from ConnStats.Snapshot() on live Client and / or Server, -// since the original stats can be updated by concurrently running goroutines. -func (cs *ConnStats) Snapshot() *ConnStats { - return &ConnStats{ - RPCCalls: atomic.LoadUint64(&cs.RPCCalls), - RPCTime: atomic.LoadUint64(&cs.RPCTime), - BytesWritten: atomic.LoadUint64(&cs.BytesWritten), - BytesRead: atomic.LoadUint64(&cs.BytesRead), - ReadCalls: atomic.LoadUint64(&cs.ReadCalls), - ReadErrors: atomic.LoadUint64(&cs.ReadErrors), - WriteCalls: atomic.LoadUint64(&cs.WriteCalls), - WriteErrors: atomic.LoadUint64(&cs.WriteErrors), - DialCalls: atomic.LoadUint64(&cs.DialCalls), - DialErrors: atomic.LoadUint64(&cs.DialErrors), - AcceptCalls: atomic.LoadUint64(&cs.AcceptCalls), - AcceptErrors: atomic.LoadUint64(&cs.AcceptErrors), - } -} - -// Reset resets all the stats counters. -func (cs *ConnStats) Reset() { - atomic.StoreUint64(&cs.RPCCalls, 0) - atomic.StoreUint64(&cs.RPCTime, 0) - atomic.StoreUint64(&cs.BytesWritten, 0) - atomic.StoreUint64(&cs.BytesRead, 0) - atomic.StoreUint64(&cs.WriteCalls, 0) - atomic.StoreUint64(&cs.WriteErrors, 0) - atomic.StoreUint64(&cs.ReadCalls, 0) - atomic.StoreUint64(&cs.ReadErrors, 0) - atomic.StoreUint64(&cs.DialCalls, 0) - atomic.StoreUint64(&cs.DialErrors, 0) - atomic.StoreUint64(&cs.AcceptCalls, 0) - atomic.StoreUint64(&cs.AcceptErrors, 0) -} - -func (cs *ConnStats) incRPCCalls() { - atomic.AddUint64(&cs.RPCCalls, 1) -} - -func (cs *ConnStats) incRPCTime(dt uint64) { - atomic.AddUint64(&cs.RPCTime, dt) -} - -func (cs *ConnStats) addBytesWritten(n uint64) { - atomic.AddUint64(&cs.BytesWritten, n) -} - -func (cs *ConnStats) addBytesRead(n uint64) { - atomic.AddUint64(&cs.BytesRead, n) -} - -func (cs *ConnStats) incReadCalls() { - atomic.AddUint64(&cs.ReadCalls, 1) -} - -func (cs *ConnStats) incReadErrors() { - atomic.AddUint64(&cs.ReadErrors, 1) -} - -func (cs *ConnStats) incWriteCalls() { - atomic.AddUint64(&cs.WriteCalls, 1) -} - -func (cs *ConnStats) incWriteErrors() { - atomic.AddUint64(&cs.WriteErrors, 1) -} - -func (cs *ConnStats) incDialCalls() { - atomic.AddUint64(&cs.DialCalls, 1) -} - -func (cs *ConnStats) incDialErrors() { - atomic.AddUint64(&cs.DialErrors, 1) -} - -func (cs *ConnStats) incAcceptCalls() { - atomic.AddUint64(&cs.AcceptCalls, 1) -} - -func (cs *ConnStats) incAcceptErrors() { - atomic.AddUint64(&cs.AcceptErrors, 1) -} diff --git a/vendor/github.com/TykTechnologies/gorpc/dispatcher.go b/vendor/github.com/TykTechnologies/gorpc/dispatcher.go deleted file mode 100644 index 84f69806043..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/dispatcher.go +++ /dev/null @@ -1,620 +0,0 @@ -package gorpc - -import ( - "errors" - "fmt" - "reflect" - "strings" - "sync" - "time" -) - -// Dispatcher helps constructing HandlerFunc for dispatching across multiple -// functions and/or services. -// -// Dispatcher also automatically registers all request and response types -// for all functions and/or methods registered via AddFunc() and AddService(), -// so there is no need in calling RegisterType() for them. -// -// See examples for details. -type Dispatcher struct { - serviceMap map[string]*serviceData -} - -type serviceData struct { - sv reflect.Value - funcMap map[string]*funcData -} - -type funcData struct { - inNum int - reqt reflect.Type - fv reflect.Value -} - -// NewDispatcher returns new dispatcher. -func NewDispatcher() *Dispatcher { - return &Dispatcher{ - serviceMap: make(map[string]*serviceData), - } -} - -// AddFunc registers the given function f under the name funcName. -// -// The function must accept zero, one or two input arguments. -// If the function has two arguments, then the first argument must have -// string type - the server will pass client address in this parameter. -// -// The function must return zero, one or two values. -// * If the function has two return values, then the second value must have -// error type - the server will propagate this error to the client. -// -// * If the function returns only error value, then the server treats it -// as error, not return value, when sending to the client. -// -// Arbitrary number of functions can be registered in the dispatcher. -// -// See examples for details. -func (d *Dispatcher) AddFunc(funcName string, f interface{}) { - sd, ok := d.serviceMap[""] - if !ok { - sd = &serviceData{ - funcMap: make(map[string]*funcData), - } - d.serviceMap[""] = sd - } - - if _, ok := sd.funcMap[funcName]; ok { - logPanic("gorpc.Dispatcher: function %s has been already registered", funcName) - } - - fd := &funcData{ - fv: reflect.Indirect(reflect.ValueOf(f)), - } - var err error - if fd.inNum, fd.reqt, err = validateFunc(funcName, fd.fv, false); err != nil { - logPanic("gorpc.Disaptcher: %s", err) - } - sd.funcMap[funcName] = fd -} - -// AddService registers public methods of the given service under -// the given name serviceName. -// -// Since only public methods are registered, the service must have at least -// one public method. -// -// All public methods must conform requirements described in AddFunc(). -func (d *Dispatcher) AddService(serviceName string, service interface{}) { - if serviceName == "" { - logPanic("gorpc.Dispatcher: serviceName cannot be empty") - } - if _, ok := d.serviceMap[serviceName]; ok { - logPanic("gorpc.Dispatcher: service with name=[%s] has been already registered", serviceName) - } - - funcMap := make(map[string]*funcData) - - st := reflect.TypeOf(service) - if st.Kind() == reflect.Struct { - logPanic("gorpc.Dispatcher: service [%s] must be a pointer to struct, i.e. *%s", serviceName, st) - } - - for i := 0; i < st.NumMethod(); i++ { - mv := st.Method(i) - - if mv.PkgPath != "" { - // skip unexported methods - continue - } - - funcName := serviceName + "." + mv.Name - fd := &funcData{ - fv: mv.Func, - } - var err error - if fd.inNum, fd.reqt, err = validateFunc(funcName, fd.fv, true); err != nil { - logPanic("gorpc.Dispatcher: %s", err) - } - funcMap[mv.Name] = fd - } - - if len(funcMap) == 0 { - logPanic("gorpc.Dispatcher: the service %s has no methods suitable for rpc", serviceName) - } - - d.serviceMap[serviceName] = &serviceData{ - sv: reflect.ValueOf(service), - funcMap: funcMap, - } -} - -func validateFunc(funcName string, fv reflect.Value, isMethod bool) (inNum int, reqt reflect.Type, err error) { - if funcName == "" { - err = fmt.Errorf("funcName cannot be empty") - return - } - - ft := fv.Type() - if ft.Kind() != reflect.Func { - err = fmt.Errorf("function [%s] must be a function instead of %s", funcName, ft) - return - } - - inNum = ft.NumIn() - outNum := ft.NumOut() - - dt := 0 - if isMethod { - dt = 1 - } - - if inNum == 2+dt { - if ft.In(dt).Kind() != reflect.String { - err = fmt.Errorf("unexpected type for the first argument of the function [%s]: [%s]. Expected string", funcName, ft.In(dt)) - return - } - } else if inNum > 2+dt { - err = fmt.Errorf("unexpected number of arguments in the function [%s]: %d. Expected 0, 1 (request) or 2 (clientAddr, request)", funcName, inNum-dt) - return - } - - if outNum == 2 { - if !isErrorType(ft.Out(1)) { - err = fmt.Errorf("unexpected type for the second return value of the function [%s]: [%s]. Expected [%s]", funcName, ft.Out(1), errt) - return - } - } else if outNum > 2 { - err = fmt.Errorf("unexpected number of return values for the function %s: %d. Expected 0, 1 (response) or 2 (response, error)", funcName, outNum) - return - } - - if inNum > dt { - reqt = ft.In(inNum - 1) - if err = registerType("request", funcName, reqt); err != nil { - return - } - } - - if outNum > 0 { - respt := ft.Out(0) - if !isErrorType(respt) { - if err = registerType("response", funcName, ft.Out(0)); err != nil { - return - } - } - } - - return -} - -func registerType(s, funcName string, t reflect.Type) error { - if t.Kind() == reflect.Struct { - return fmt.Errorf("%s in the function [%s] should be passed by reference, i.e. *%s", s, funcName, t) - } - if err := validateType(t); err != nil { - return fmt.Errorf("%s in the function [%s] cannot contain %s", s, funcName, err) - } - - t = removePtr(t) - tv := reflect.New(t) - if t.Kind() != reflect.Struct { - tv = reflect.Indirect(tv) - } - - switch t.Kind() { - case reflect.Array, reflect.Slice, reflect.Map, reflect.Struct: - RegisterType(tv.Interface()) - default: - } - - return nil -} - -func removePtr(t reflect.Type) reflect.Type { - for t.Kind() == reflect.Ptr { - t = t.Elem() - } - return t -} - -var validatedTypes []*validatedType - -type validatedType struct { - t reflect.Type - err *error -} - -func validateType(t reflect.Type) (err error) { - t = removePtr(t) - for _, vd := range validatedTypes { - if vd.t == t { - return *vd.err - } - } - validatedTypes = append(validatedTypes, &validatedType{ - t: t, - err: &err, - }) - - switch t.Kind() { - case reflect.Chan, reflect.Func, reflect.Interface, reflect.UnsafePointer: - err = fmt.Errorf("%s. Found [%s]", t.Kind(), t) - return - case reflect.Array, reflect.Slice: - if err = validateType(t.Elem()); err != nil { - err = fmt.Errorf("%s in the %s [%s]", err, t.Kind(), t) - return - } - case reflect.Map: - if err = validateType(t.Elem()); err != nil { - err = fmt.Errorf("%s in the value of map [%s]", err, t) - return - } - if err = validateType(t.Key()); err != nil { - err = fmt.Errorf("%s in the key of map [%s]", err, t) - return - } - case reflect.Struct: - n := 0 - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - if f.PkgPath == "" { - if err = validateType(f.Type); err != nil { - err = fmt.Errorf("%s in the field [%s] of struct [%s]", err, f.Name, t) - return - } - n++ - } - } - if n == 0 { - err = fmt.Errorf("struct without exported fields [%s]", t) - return - } - } - - return err -} - -type dispatcherRequest struct { - Request interface{} - Name string -} - -type dispatcherResponse struct { - Response interface{} - Error string -} - -func init() { - RegisterType(&dispatcherRequest{}) - RegisterType(&dispatcherResponse{}) -} - -// NewHandlerFunc returns HandlerFunc serving all the functions and/or services -// registered via AddFunc() and AddService(). -// -// The returned HandlerFunc must be assigned to Server.Handler or -// passed to New*Server(). -func (d *Dispatcher) NewHandlerFunc() HandlerFunc { - if len(d.serviceMap) == 0 { - logPanic("gorpc.Dispatcher: register at least one service before calling HandlerFunc()") - } - - serviceMap := copyServiceMap(d.serviceMap) - - return func(clientAddr string, request interface{}) interface{} { - req, ok := request.(*dispatcherRequest) - if !ok { - logPanic("gorpc.Dispatcher: unsupported request type received from the client: %T", request) - } - return dispatchRequest(serviceMap, clientAddr, req) - } -} - -func copyServiceMap(sm map[string]*serviceData) map[string]*serviceData { - serviceMap := make(map[string]*serviceData) - for sk, sv := range sm { - funcMap := make(map[string]*funcData) - for fk, fv := range sv.funcMap { - funcMap[fk] = fv - } - serviceMap[sk] = &serviceData{ - sv: sv.sv, - funcMap: funcMap, - } - } - return serviceMap -} - -func dispatchRequest(serviceMap map[string]*serviceData, clientAddr string, req *dispatcherRequest) *dispatcherResponse { - callName := strings.SplitN(req.Name, ".", 2) - if len(callName) != 2 { - return &dispatcherResponse{ - Error: fmt.Sprintf("gorpc.Dispatcher: cannot split call name into service name and method name [%s]", req.Name), - } - } - - serviceName, funcName := callName[0], callName[1] - s, ok := serviceMap[serviceName] - if !ok { - return &dispatcherResponse{ - Error: fmt.Sprintf("gorpc.Dispatcher: unknown service name [%s]", serviceName), - } - } - - fd, ok := s.funcMap[funcName] - if !ok { - return &dispatcherResponse{ - Error: fmt.Sprintf("gorpc.Dispatcher: unknown method [%s]", req.Name), - } - } - - var inArgs []reflect.Value - if fd.inNum > 0 { - inArgs = make([]reflect.Value, fd.inNum) - - dt := 0 - if serviceName != "" { - dt = 1 - inArgs[0] = s.sv - } - if fd.inNum == 2+dt { - inArgs[dt] = reflect.ValueOf(clientAddr) - } - if fd.inNum > dt { - reqv := reflect.ValueOf(req.Request) - reqt := reflect.TypeOf(req.Request) - if reqt != fd.reqt { - return &dispatcherResponse{ - Error: fmt.Sprintf("gorpc.Dispatcher: unexpected request type for method [%s]: %s. Expected %s", req.Name, reqt, fd.reqt), - } - } - inArgs[len(inArgs)-1] = reqv - } - } - - outArgs := fd.fv.Call(inArgs) - - resp := &dispatcherResponse{} - - if len(outArgs) == 1 { - if isErrorType(outArgs[0].Type()) { - resp.Error = getErrorString(outArgs[0]) - } else { - resp.Response = outArgs[0].Interface() - } - } else if len(outArgs) == 2 { - resp.Error = getErrorString(outArgs[1]) - if resp.Error == "" { - resp.Response = outArgs[0].Interface() - } - } - - return resp -} - -var errt = reflect.TypeOf((*error)(nil)).Elem() - -func isErrorType(t reflect.Type) bool { - return t.Implements(errt) -} - -func getErrorString(v reflect.Value) string { - if v.IsNil() { - return "" - } - return v.Interface().(error).Error() -} - -// DispatcherClient is a Client wrapper suitable for calling registered -// functions and/or for calling methods of the registered services. -type DispatcherClient struct { - c *Client - serviceName string -} - -// NewFuncClient returns a client suitable for calling functions registered -// via AddFunc(). -func (d *Dispatcher) NewFuncClient(c *Client) *DispatcherClient { - if len(d.serviceMap) == 0 || d.serviceMap[""] == nil { - logPanic("gorpc.Dispatcher: register at least one function with AddFunc() before calling NewFuncClient()") - } - - return &DispatcherClient{ - c: c, - } -} - -// NewServiceClient returns a client suitable for calling methods -// of the service with name serviceName registered via AddService(). -// -// It is safe creating multiple service clients over a single underlying client. -func (d *Dispatcher) NewServiceClient(serviceName string, c *Client) *DispatcherClient { - if len(d.serviceMap) == 0 || d.serviceMap[serviceName] == nil { - logPanic("gorpc.Dispatcher: service [%s] must be registered with AddService() before calling NewServiceClient()", serviceName) - } - - return &DispatcherClient{ - c: c, - serviceName: serviceName, - } -} - -// Call calls the given function. -func (dc *DispatcherClient) Call(funcName string, request interface{}) (response interface{}, err error) { - return dc.CallTimeout(funcName, request, dc.c.RequestTimeout) -} - -// CallTimeout calls the given function and waits for response during the given timeout. -func (dc *DispatcherClient) CallTimeout(funcName string, request interface{}, timeout time.Duration) (response interface{}, err error) { - req := dc.getRequest(funcName, request) - resp, err := dc.c.CallTimeout(req, timeout) - return getResponse(resp, err) -} - -// Send sends the given request to the given function and doesn't -// wait for response. -func (dc *DispatcherClient) Send(funcName string, request interface{}) error { - req := dc.getRequest(funcName, request) - return dc.c.Send(req) -} - -// CallAsync calls the given function asynchronously. -func (dc *DispatcherClient) CallAsync(funcName string, request interface{}) (*AsyncResult, error) { - req := dc.getRequest(funcName, request) - - innerAr, err := dc.c.CallAsync(req) - if err != nil { - return nil, err - } - - ch := make(chan struct{}) - ar := &AsyncResult{ - Done: ch, - } - - go func() { - <-innerAr.Done - ar.Response, ar.Error = getResponse(innerAr.Response, innerAr.Error) - close(ch) - }() - - return ar, nil -} - -// DispatcherBatch allows grouping and executing multiple RPCs in a single batch. -// -// DispatcherBatch may be created via DispatcherClient.NewBatch(). -type DispatcherBatch struct { - lock sync.Mutex - c *DispatcherClient - b *Batch - ops []*BatchResult -} - -// NewBatch creates new RPC batch for the given DispatcherClient. -// -// It is safe creating multiple concurrent batches from a single client. -func (dc *DispatcherClient) NewBatch() *DispatcherBatch { - return &DispatcherBatch{ - c: dc, - b: dc.c.NewBatch(), - } -} - -// Add ads new request to the RPC batch. -// -// The order of batched RPCs execution on the server is unspecified. -// -// All the requests added to the batch are sent to the server at once -// when DispatcherBatch.Call*() is called. -// -// It is safe adding multiple requests to the same batch from concurrently -// running goroutines. -func (b *DispatcherBatch) Add(funcName string, request interface{}) *BatchResult { - return b.add(funcName, request, false) -} - -// AddSkipResponse adds new request to the RPC batch and doesn't care -// about the response. -// -// The order of batched RPCs execution on the server is unspecified. -// -// All the requests added to the batch are sent to the server at once -// when DispatcherBatch.Call*() is called. -// -// It is safe adding multiple requests to the same batch from concurrently -// running goroutines. -func (b *DispatcherBatch) AddSkipResponse(funcName string, request interface{}) { - b.add(funcName, request, true) -} - -func (b *DispatcherBatch) add(funcName string, request interface{}, skipResponse bool) *BatchResult { - req := b.c.getRequest(funcName, request) - - var br *BatchResult - b.lock.Lock() - if !skipResponse { - br = &BatchResult{ - ctx: b.b.Add(req), - done: make(chan struct{}), - } - br.Done = br.done - b.ops = append(b.ops, br) - } else { - b.b.AddSkipResponse(req) - } - b.lock.Unlock() - - return br -} - -// Call calls all the RPCs added via DispatcherBatch.Add(). -// -// The order of batched RPCs execution on the server is unspecified. -// -// The caller may read all BatchResult contents returned -// from DispatcherBatch.Add() after the Call returns. -// -// It is guaranteed that all <-BatchResult.Done channels are unblocked after -// the Call returns. -func (b *DispatcherBatch) Call() error { - return b.CallTimeout(b.c.c.RequestTimeout) -} - -// CallTimeout calls all the RPCs added via DispatcherBatch.Add() and waits -// for all the RPC responses during the given timeout. -// -// The caller may read all BatchResult contents returned -// from DispatcherBatch.Add() after the CallTimeout returns. -// -// It is guaranteed that all <-BatchResult.Done channels are unblocked after -// the CallTimeout returns. -func (b *DispatcherBatch) CallTimeout(timeout time.Duration) error { - b.lock.Lock() - bb := b.b - b.b = b.c.c.NewBatch() - ops := b.ops - b.ops = nil - b.lock.Unlock() - - if err := bb.CallTimeout(timeout); err != nil { - return err - } - - for _, op := range ops { - br := op.ctx.(*BatchResult) - op.Response, op.Error = getResponse(br.Response, br.Error) - close(op.done) - } - - return nil -} - -func (dc *DispatcherClient) getRequest(funcName string, request interface{}) *dispatcherRequest { - return &dispatcherRequest{ - Name: dc.serviceName + "." + funcName, - Request: request, - } -} - -func getResponse(respv interface{}, err error) (interface{}, error) { - if err != nil { - return nil, err - } - resp, ok := respv.(*dispatcherResponse) - if !ok { - return nil, &ClientError{ - Server: true, - err: fmt.Errorf("gorpc.DispatcherClient: unexpected response type: %T. Expected *dispatcherResponse", respv), - } - } - if resp.Error != "" { - return nil, &ClientError{ - Server: true, - err: errors.New(resp.Error), - } - } - return resp.Response, nil -} diff --git a/vendor/github.com/TykTechnologies/gorpc/doc.go b/vendor/github.com/TykTechnologies/gorpc/doc.go deleted file mode 100644 index 9acb63d907e..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/doc.go +++ /dev/null @@ -1,15 +0,0 @@ -/* -Package gorpc provides simple RPC API for highload projects. - -Gorpc has the following features: - - * Easy-to-use API. - * Optimized for high load (>10K qps). - * Uses as low network bandwidth as possible. - * Minimizes the number of TCP connections in TIME_WAIT and WAIT_CLOSE states. - * Minimizes the number of send() and recv() syscalls. - * Provides ability to use arbitrary underlying transport. - By default TCP is used, but TLS and UNIX sockets are already available. - -*/ -package gorpc diff --git a/vendor/github.com/TykTechnologies/gorpc/encoding.go b/vendor/github.com/TykTechnologies/gorpc/encoding.go deleted file mode 100644 index 21a1a0a1072..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/encoding.go +++ /dev/null @@ -1,118 +0,0 @@ -package gorpc - -import ( - "bufio" - "compress/flate" - "encoding/gob" - "io" -) - -// RegisterType registers the given type to send via rpc. -// -// The client must register all the response types the server may send. -// The server must register all the request types the client may send. -// -// There is no need in registering base Go types such as int, string, bool, -// float64, etc. or arrays, slices and maps containing base Go types. -// -// There is no need in registering argument and return value types -// for functions and methods registered via Dispatcher. -func RegisterType(x interface{}) { - gob.Register(x) -} - -type wireRequest struct { - ID uint64 - Request interface{} -} - -type wireResponse struct { - ID uint64 - Response interface{} - Error string -} - -type messageEncoder struct { - e *gob.Encoder - bw *bufio.Writer - zw *flate.Writer - ww *bufio.Writer -} - -func (e *messageEncoder) Close() error { - if e.zw != nil { - return e.zw.Close() - } - return nil -} - -func (e *messageEncoder) Flush() error { - if e.zw != nil { - if err := e.ww.Flush(); err != nil { - return err - } - if err := e.zw.Flush(); err != nil { - return err - } - } - if err := e.bw.Flush(); err != nil { - return err - } - return nil -} - -func (e *messageEncoder) Encode(msg interface{}) error { - return e.e.Encode(msg) -} - -func newMessageEncoder(w io.Writer, bufferSize int, enableCompression bool, s *ConnStats) *messageEncoder { - w = newWriterCounter(w, s) - bw := bufio.NewWriterSize(w, bufferSize) - - ww := bw - var zw *flate.Writer - if enableCompression { - zw, _ = flate.NewWriter(bw, flate.BestSpeed) - ww = bufio.NewWriterSize(zw, bufferSize) - } - - return &messageEncoder{ - e: gob.NewEncoder(ww), - bw: bw, - zw: zw, - ww: ww, - } -} - -type messageDecoder struct { - d *gob.Decoder - zr io.ReadCloser -} - -func (d *messageDecoder) Close() error { - if d.zr != nil { - return d.zr.Close() - } - return nil -} - -func (d *messageDecoder) Decode(msg interface{}) error { - return d.d.Decode(msg) -} - -func newMessageDecoder(r io.Reader, bufferSize int, enableCompression bool, s *ConnStats) *messageDecoder { - r = newReaderCounter(r, s) - br := bufio.NewReaderSize(r, bufferSize) - - rr := br - var zr io.ReadCloser - if enableCompression { - zr = flate.NewReader(br) - rr = bufio.NewReaderSize(zr, bufferSize) - } - - return &messageDecoder{ - d: gob.NewDecoder(rr), - zr: zr, - } -} diff --git a/vendor/github.com/TykTechnologies/gorpc/server.go b/vendor/github.com/TykTechnologies/gorpc/server.go deleted file mode 100644 index 71c52aa08ed..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/server.go +++ /dev/null @@ -1,439 +0,0 @@ -package gorpc - -import ( - "fmt" - "io" - "net" - "runtime" - "sync" - "time" -) - -// HandlerFunc is a server handler function. -// -// clientAddr contains client address returned by Listener.Accept(). -// Request and response types may be arbitrary. -// All the request types the client may send to the server must be registered -// with gorpc.RegisterType() before starting the server. -// There is no need in registering base Go types such as int, string, bool, -// float64, etc. or arrays, slices and maps containing base Go types. -// -// Hint: use Dispatcher for HandlerFunc construction. -type HandlerFunc func(clientAddr string, request interface{}) (response interface{}) - -// Server implements RPC server. -// -// Default server settings are optimized for high load, so don't override -// them without valid reason. -type Server struct { - // Address to listen to for incoming connections. - // - // The address format depends on the underlying transport provided - // by Server.Listener. The following transports are provided - // out of the box: - // * TCP - see NewTCPServer() and NewTCPClient(). - // * TLS (aka SSL) - see NewTLSServer() and NewTLSClient(). - // * Unix sockets - see NewUnixServer() and NewUnixClient(). - // - // By default TCP transport is used. - Addr string - - // Handler function for incoming requests. - // - // Server calls this function for each incoming request. - // The function must process the request and return the corresponding response. - // - // Hint: use Dispatcher for HandlerFunc construction. - Handler HandlerFunc - - // The maximum number of concurrent rpc calls the server may perform. - // Default is DefaultConcurrency. - Concurrency int - - // The maximum delay between response flushes to clients. - // - // Negative values lead to immediate requests' sending to the client - // without their buffering. This minimizes rpc latency at the cost - // of higher CPU and network usage. - // - // Default is DefaultFlushDelay. - FlushDelay time.Duration - - // The maximum number of pending responses in the queue. - // Default is DefaultPendingMessages. - PendingResponses int - - // Size of send buffer per each underlying connection in bytes. - // Default is DefaultBufferSize. - SendBufferSize int - - // Size of recv buffer per each underlying connection in bytes. - // Default is DefaultBufferSize. - RecvBufferSize int - - // OnConnect is called whenever connection from client is accepted. - // The callback can be used for authentication/authorization/encryption - // and/or for custom transport wrapping. - // - // See also Listener, which can be used for sophisticated transport - // implementation. - OnConnect OnConnectFunc - - // The server obtains new client connections via Listener.Accept(). - // - // Override the listener if you want custom underlying transport - // and/or client authentication/authorization. - // Don't forget overriding Client.Dial() callback accordingly. - // - // See also OnConnect for authentication/authorization purposes. - // - // * NewTLSClient() and NewTLSServer() can be used for encrypted rpc. - // * NewUnixClient() and NewUnixServer() can be used for fast local - // inter-process rpc. - // - // By default it returns TCP connections accepted from Server.Addr. - Listener Listener - - // LogError is used for error logging. - // - // By default the function set via SetErrorLogger() is used. - LogError LoggerFunc - - // Connection statistics. - // - // The stats doesn't reset automatically. Feel free resetting it - // any time you wish. - Stats ConnStats - - serverStopChan chan struct{} - stopWg sync.WaitGroup -} - -// Start starts rpc server. -// -// All the request types the client may send to the server must be registered -// with gorpc.RegisterType() before starting the server. -// There is no need in registering base Go types such as int, string, bool, -// float64, etc. or arrays, slices and maps containing base Go types. -func (s *Server) Start() error { - if s.LogError == nil { - s.LogError = errorLogger - } - if s.Handler == nil { - panic("gorpc.Server: Server.Handler cannot be nil") - } - - if s.serverStopChan != nil { - panic("gorpc.Server: server is already running. Stop it before starting it again") - } - s.serverStopChan = make(chan struct{}) - - if s.Concurrency <= 0 { - s.Concurrency = DefaultConcurrency - } - if s.FlushDelay == 0 { - s.FlushDelay = DefaultFlushDelay - } - if s.PendingResponses <= 0 { - s.PendingResponses = DefaultPendingMessages - } - if s.SendBufferSize <= 0 { - s.SendBufferSize = DefaultBufferSize - } - if s.RecvBufferSize <= 0 { - s.RecvBufferSize = DefaultBufferSize - } - - if s.Listener == nil { - s.Listener = &defaultListener{} - } - if err := s.Listener.Init(s.Addr); err != nil { - err = fmt.Errorf("gorpc.Server: [%s]. Cannot listen to: [%s]", s.Addr, err) - s.LogError("%s", err) - return err - } - - workersCh := make(chan struct{}, s.Concurrency) - s.stopWg.Add(1) - go serverHandler(s, workersCh) - return nil -} - -// Stop stops rpc server. Stopped server can be started again. -func (s *Server) Stop() { - if s.serverStopChan == nil { - panic("gorpc.Server: server must be started before stopping it") - } - close(s.serverStopChan) - s.stopWg.Wait() - s.serverStopChan = nil -} - -// Serve starts rpc server and blocks until it is stopped. -func (s *Server) Serve() error { - if err := s.Start(); err != nil { - return err - } - s.stopWg.Wait() - return nil -} - -func serverHandler(s *Server, workersCh chan struct{}) { - defer s.stopWg.Done() - - var conn net.Conn - var err error - - for { - acceptChan := make(chan struct{}) - go func() { - if conn, err = s.Listener.Accept(); err != nil { - s.LogError("gorpc.Server: [%s]. Cannot accept new connection: [%s]", s.Addr, err) - time.Sleep(time.Second) - } - close(acceptChan) - }() - - select { - case <-s.serverStopChan: - s.Listener.Close() - return - case <-acceptChan: - s.Stats.incAcceptCalls() - } - - if err != nil { - s.Stats.incAcceptErrors() - continue - } - - s.stopWg.Add(1) - go serverHandleConnection(s, conn, workersCh) - } -} - -func serverHandleConnection(s *Server, conn net.Conn, workersCh chan struct{}) { - defer s.stopWg.Done() - var clientAddr string - var err error - var newConn net.Conn - - if s.OnConnect != nil { - newConn, clientAddr, err = s.OnConnect(conn) - if err != nil { - s.LogError("gorpc.Server: [%s]->[%s]. OnConnect error: [%s]", clientAddr, s.Addr, err) - conn.Close() - return - } - conn = newConn - } - - if clientAddr == "" { - clientAddr = conn.RemoteAddr().String() - } - - var enabledCompression bool - zChan := make(chan bool, 1) - go func() { - var buf [1]byte - if _, err = conn.Read(buf[:]); err != nil { - s.LogError("gorpc.Server: [%s]->[%s]. Error when reading handshake from client: [%s]", clientAddr, s.Addr, err) - } - zChan <- (buf[0] != 0) - }() - select { - case enabledCompression = <-zChan: - if err != nil { - conn.Close() - return - } - case <-s.serverStopChan: - conn.Close() - return - case <-time.After(10 * time.Second): - s.LogError("gorpc.Server: [%s]->[%s]. Cannot obtain handshake from client during 10s", clientAddr, s.Addr) - conn.Close() - return - } - - responsesChan := make(chan *serverMessage, s.PendingResponses) - stopChan := make(chan struct{}) - - readerDone := make(chan struct{}) - go serverReader(s, conn, clientAddr, responsesChan, stopChan, readerDone, enabledCompression, workersCh) - - writerDone := make(chan struct{}) - go serverWriter(s, conn, clientAddr, responsesChan, stopChan, writerDone, enabledCompression) - - select { - case <-readerDone: - close(stopChan) - conn.Close() - <-writerDone - case <-writerDone: - close(stopChan) - conn.Close() - <-readerDone - case <-s.serverStopChan: - close(stopChan) - conn.Close() - <-readerDone - <-writerDone - } -} - -type serverMessage struct { - ID uint64 - Request interface{} - Response interface{} - Error string - ClientAddr string -} - -var serverMessagePool = &sync.Pool{ - New: func() interface{} { - return &serverMessage{} - }, -} - -func serverReader(s *Server, r io.Reader, clientAddr string, responsesChan chan<- *serverMessage, - stopChan <-chan struct{}, done chan<- struct{}, enabledCompression bool, workersCh chan struct{}) { - - defer func() { - if r := recover(); r != nil { - s.LogError("gorpc.Server: [%s]->[%s]. Panic when reading data from client: %v", clientAddr, s.Addr, r) - } - close(done) - }() - - d := newMessageDecoder(r, s.RecvBufferSize, enabledCompression, &s.Stats) - defer d.Close() - - var wr wireRequest - for { - if err := d.Decode(&wr); err != nil { - s.LogError("gorpc.Server: [%s]->[%s]. Cannot decode request: [%s]", clientAddr, s.Addr, err) - return - } - - m := serverMessagePool.Get().(*serverMessage) - m.ID = wr.ID - m.Request = wr.Request - m.ClientAddr = clientAddr - - wr.ID = 0 - wr.Request = nil - - select { - case workersCh <- struct{}{}: - default: - select { - case workersCh <- struct{}{}: - case <-stopChan: - return - } - } - go serveRequest(s, responsesChan, stopChan, m, workersCh) - } -} - -func serveRequest(s *Server, responsesChan chan<- *serverMessage, stopChan <-chan struct{}, m *serverMessage, workersCh <-chan struct{}) { - request := m.Request - m.Request = nil - clientAddr := m.ClientAddr - m.ClientAddr = "" - skipResponse := (m.ID == 0) - - if skipResponse { - m.Response = nil - m.Error = "" - serverMessagePool.Put(m) - } - - t := time.Now() - response, err := callHandlerWithRecover(s.LogError, s.Handler, clientAddr, s.Addr, request) - s.Stats.incRPCTime(uint64(time.Since(t).Seconds() * 1000)) - - if !skipResponse { - m.Response = response - m.Error = err - - // Select hack for better performance. - // See https://github.com/valyala/gorpc/pull/1 for details. - select { - case responsesChan <- m: - default: - select { - case responsesChan <- m: - case <-stopChan: - } - } - } - - <-workersCh -} - -func callHandlerWithRecover(logErrorFunc LoggerFunc, handler HandlerFunc, clientAddr, serverAddr string, request interface{}) (response interface{}, errStr string) { - defer func() { - if x := recover(); x != nil { - stackTrace := make([]byte, 1<<20) - n := runtime.Stack(stackTrace, false) - errStr = fmt.Sprintf("Panic occured: %v\nStack trace: %s", x, stackTrace[:n]) - logErrorFunc("gorpc.Server: [%s]->[%s]. %s", clientAddr, serverAddr, errStr) - } - }() - response = handler(clientAddr, request) - return -} - -func serverWriter(s *Server, w io.Writer, clientAddr string, responsesChan <-chan *serverMessage, stopChan <-chan struct{}, done chan<- struct{}, enabledCompression bool) { - defer func() { close(done) }() - - e := newMessageEncoder(w, s.SendBufferSize, enabledCompression, &s.Stats) - defer e.Close() - - var flushChan <-chan time.Time - t := time.NewTimer(s.FlushDelay) - var wr wireResponse - for { - var m *serverMessage - - select { - case m = <-responsesChan: - default: - select { - case <-stopChan: - return - case m = <-responsesChan: - case <-flushChan: - if err := e.Flush(); err != nil { - s.LogError("gorpc.Server: [%s]->[%s]: Cannot flush responses to underlying stream: [%s]", clientAddr, s.Addr, err) - return - } - flushChan = nil - continue - } - } - - if flushChan == nil { - flushChan = getFlushChan(t, s.FlushDelay) - } - - wr.ID = m.ID - wr.Response = m.Response - wr.Error = m.Error - - m.Response = nil - m.Error = "" - serverMessagePool.Put(m) - - if err := e.Encode(wr); err != nil { - s.LogError("gorpc.Server: [%s]->[%s]. Cannot send response to wire: [%s]", clientAddr, s.Addr, err) - return - } - wr.Response = nil - wr.Error = "" - - s.Stats.incRPCCalls() - } -} diff --git a/vendor/github.com/TykTechnologies/gorpc/transport.go b/vendor/github.com/TykTechnologies/gorpc/transport.go deleted file mode 100644 index 108cd716377..00000000000 --- a/vendor/github.com/TykTechnologies/gorpc/transport.go +++ /dev/null @@ -1,228 +0,0 @@ -package gorpc - -import ( - "crypto/tls" - "net" - "time" -) - -var ( - dialer = &net.Dialer{ - Timeout: 10 * time.Second, - KeepAlive: 30 * time.Second, - } -) - -// DialFunc is a function intended for setting to Client.Dial. -// -// It is expected that the returned conn immediately -// sends all the data passed via Write() to the server. -// Otherwise gorpc may hang. -// The conn implementation must call Flush() on underlying buffered -// streams before returning from Write(). -type DialFunc func(addr string) (conn net.Conn, err error) - -// Listener is an interface for custom listeners intended for the Server. -type Listener interface { - // Init is called on server start. - // - // addr contains the address set at Server.Addr. - Init(addr string) error - - // Accept must return incoming connections from clients. - // clientAddr must contain client's address in user-readable view. - // - // It is expected that the returned conn immediately - // sends all the data passed via Write() to the client. - // Otherwise gorpc may hang. - // The conn implementation must call Flush() on underlying buffered - // streams before returning from Write(). - Accept() (conn net.Conn, err error) - - // Close closes the listener. - // All pending calls to Accept() must immediately return errors after - // Close is called. - // All subsequent calls to Accept() must immediately return error. - Close() error -} - -func defaultDial(addr string) (conn net.Conn, err error) { - return dialer.Dial("tcp", addr) -} - -type defaultListener struct { - L net.Listener -} - -func (ln *defaultListener) Init(addr string) (err error) { - ln.L, err = net.Listen("tcp", addr) - return -} - -func (ln *defaultListener) Accept() (conn net.Conn, err error) { - c, err := ln.L.Accept() - if err != nil { - return nil, err - } - if err = setupKeepalive(c); err != nil { - c.Close() - return nil, err - } - return c, nil -} - -func (ln *defaultListener) Close() error { - return ln.L.Close() -} - -func setupKeepalive(conn net.Conn) error { - tcpConn := conn.(*net.TCPConn) - if err := tcpConn.SetKeepAlive(true); err != nil { - return err - } - if err := tcpConn.SetKeepAlivePeriod(30 * time.Second); err != nil { - return err - } - return nil -} - -type netListener struct { - F func(addr string) (net.Listener, error) - L net.Listener -} - -func (ln *netListener) Init(addr string) (err error) { - ln.L, err = ln.F(addr) - return -} - -func (ln *netListener) Accept() (conn net.Conn, err error) { - c, err := ln.L.Accept() - if err != nil { - return nil, err - } - return c, nil -} - -func (ln *netListener) Close() error { - return ln.L.Close() -} - -func unixDial(addr string) (conn net.Conn, err error) { - c, err := net.Dial("unix", addr) - if err != nil { - return nil, err - } - return c, err -} - -// NewTCPClient creates a client connecting over TCP to the server -// listening to the given addr. -// -// The returned client must be started after optional settings' adjustment. -// -// The corresponding server must be created with NewTCPServer(). -func NewTCPClient(addr string) *Client { - return &Client{ - Addr: addr, - Dial: defaultDial, - } -} - -// NewTCPServer creates a server listening for TCP connections -// on the given addr and processing incoming requests -// with the given HandlerFunc. -// -// The returned server must be started after optional settings' adjustment. -// -// The corresponding client must be created with NewTCPClient(). -func NewTCPServer(addr string, handler HandlerFunc) *Server { - return &Server{ - Addr: addr, - Handler: handler, - Listener: &defaultListener{}, - } -} - -// NewUnixClient creates a client connecting over unix socket -// to the server listening to the given addr. -// -// The returned client must be started after optional settings' adjustment. -// -// The corresponding server must be created with NewUnixServer(). -func NewUnixClient(addr string) *Client { - return &Client{ - Addr: addr, - Dial: unixDial, - - // There is little sense in compressing rpc data passed - // over local unix sockets. - DisableCompression: true, - - // Sacrifice the number of Write() calls to the smallest - // possible latency, since it has higher priority in local IPC. - FlushDelay: -1, - } -} - -// NewUnixServer creates a server listening for unix connections -// on the given addr and processing incoming requests -// with the given HandlerFunc. -// -// The returned server must be started after optional settings' adjustment. -// -// The corresponding client must be created with NewUnixClient(). -func NewUnixServer(addr string, handler HandlerFunc) *Server { - return &Server{ - Addr: addr, - Handler: handler, - Listener: &netListener{ - F: func(addr string) (net.Listener, error) { - return net.Listen("unix", addr) - }, - }, - - // Sacrifice the number of Write() calls to the smallest - // possible latency, since it has higher priority in local IPC. - FlushDelay: -1, - } -} - -// NewTLSClient creates a client connecting over TLS (aka SSL) to the server -// listening to the given addr using the given TLS config. -// -// The returned client must be started after optional settings' adjustment. -// -// The corresponding server must be created with NewTLSServer(). -func NewTLSClient(addr string, cfg *tls.Config) *Client { - return &Client{ - Addr: addr, - Dial: func(addr string) (conn net.Conn, err error) { - c, err := tls.DialWithDialer(dialer, "tcp", addr, cfg) - if err != nil { - return nil, err - } - return c, err - }, - } -} - -// NewTLSServer creates a server listening for TLS (aka SSL) connections -// on the given addr and processing incoming requests -// with the given HandlerFunc. -// cfg must contain TLS settings for the server. -// -// The returned server must be started after optional settings' adjustment. -// -// The corresponding client must be created with NewTLSClient(). -func NewTLSServer(addr string, handler HandlerFunc, cfg *tls.Config) *Server { - return &Server{ - Addr: addr, - Handler: handler, - Listener: &netListener{ - F: func(addr string) (net.Listener, error) { - return tls.Listen("tcp", addr, cfg) - }, - }, - } -} diff --git a/vendor/github.com/TykTechnologies/goverify/README.md b/vendor/github.com/TykTechnologies/goverify/README.md deleted file mode 100644 index c1b3659d64a..00000000000 --- a/vendor/github.com/TykTechnologies/goverify/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# RSA Verifier - -This lib makes it easy to verify a string with a signature using an RSA public/private key combination. - -Shamelessly ripped off from SO: - -http://stackoverflow.com/questions/20655702/signing-and-decoding-with-rsa-sha-in-go - -And this play example: - -https://play.golang.org/p/bzpD7Pa9mr \ No newline at end of file diff --git a/vendor/github.com/TykTechnologies/goverify/goverify.go b/vendor/github.com/TykTechnologies/goverify/goverify.go deleted file mode 100644 index ae9372c3d24..00000000000 --- a/vendor/github.com/TykTechnologies/goverify/goverify.go +++ /dev/null @@ -1,3 +0,0 @@ -package goverify - - diff --git a/vendor/github.com/TykTechnologies/goverify/rsa_signer.go b/vendor/github.com/TykTechnologies/goverify/rsa_signer.go deleted file mode 100644 index af98c67a539..00000000000 --- a/vendor/github.com/TykTechnologies/goverify/rsa_signer.go +++ /dev/null @@ -1,20 +0,0 @@ -package goverify - -import ( - "crypto" - "crypto/rand" - "crypto/rsa" - "crypto/sha256" -) - -type RSAPrivateKey struct { - *rsa.PrivateKey -} - -// Sign signs data with rsa-sha256 -func (r *RSAPrivateKey) Sign(data []byte) ([]byte, error) { - h := sha256.New() - h.Write(data) - d := h.Sum(nil) - return rsa.SignPKCS1v15(rand.Reader, r.PrivateKey, crypto.SHA256, d) -} diff --git a/vendor/github.com/TykTechnologies/goverify/rsa_verifier.go b/vendor/github.com/TykTechnologies/goverify/rsa_verifier.go deleted file mode 100644 index 7c033f105d2..00000000000 --- a/vendor/github.com/TykTechnologies/goverify/rsa_verifier.go +++ /dev/null @@ -1,19 +0,0 @@ -package goverify - -import ( - "crypto" - "crypto/rsa" - "crypto/sha256" -) - -type RSAPublicKey struct { - *rsa.PublicKey -} - -// Unsign verifies the message using a rsa-sha256 signature -func (r *RSAPublicKey) Verify(message []byte, sig []byte) error { - h := sha256.New() - h.Write(message) - d := h.Sum(nil) - return rsa.VerifyPKCS1v15(r.PublicKey, crypto.SHA256, d, sig) -} diff --git a/vendor/github.com/TykTechnologies/goverify/signer.go b/vendor/github.com/TykTechnologies/goverify/signer.go deleted file mode 100644 index 940b88f1d12..00000000000 --- a/vendor/github.com/TykTechnologies/goverify/signer.go +++ /dev/null @@ -1,24 +0,0 @@ -package goverify - -import ( - "crypto/rsa" - "fmt" -) - -// A Signer is can create signatures that verify against a public key. -type Signer interface { - // Sign returns raw signature for the given data. This method - // will apply the hash specified for the keytype to the data. - Sign(data []byte) ([]byte, error) -} - -func newSignerFromKey(k interface{}) (Signer, error) { - var sshKey Signer - switch t := k.(type) { - case *rsa.PrivateKey: - sshKey = &RSAPrivateKey{t} - default: - return nil, fmt.Errorf("ssh: unsupported key type %T", k) - } - return sshKey, nil -} diff --git a/vendor/github.com/TykTechnologies/goverify/util.go b/vendor/github.com/TykTechnologies/goverify/util.go deleted file mode 100644 index 864a6ff4c61..00000000000 --- a/vendor/github.com/TykTechnologies/goverify/util.go +++ /dev/null @@ -1,83 +0,0 @@ -package goverify - -import ( - "crypto/x509" - "encoding/pem" - "errors" - "fmt" - "io/ioutil" -) - -// loadPrivateKey loads an parses a PEM encoded private key file. -func LoadPublicKeyFromFile(path string) (Verifier, error) { - dat, err := ioutil.ReadFile(path) - - if err != nil { - return nil, err - } - - return parsePublicKey(dat) -} - -func LoadPublicKeyFromString(key string) (Verifier, error) { - - return parsePublicKey([]byte(key)) -} - -// parsePublicKey parses a PEM encoded private key. -func parsePublicKey(pemBytes []byte) (Verifier, error) { - block, _ := pem.Decode(pemBytes) - if block == nil { - return nil, errors.New("ssh: no key found") - } - - var rawkey interface{} - switch block.Type { - case "PUBLIC KEY": - rsa, err := x509.ParsePKIXPublicKey(block.Bytes) - if err != nil { - return nil, err - } - rawkey = rsa - default: - return nil, fmt.Errorf("ssh: unsupported key type %q", block.Type) - } - - return newVerifierFromKey(rawkey) -} - -// loadPrivateKey loads an parses a PEM encoded private key file. -func LoadPrivateKeyFromFile(path string) (Signer, error) { - dat, err := ioutil.ReadFile(path) - - if err != nil { - return nil, err - } - - return parsePrivateKey(dat) -} - -func LoadPrivateKeyFromString(key string) (Signer, error) { - return parsePrivateKey([]byte(key)) -} - -// parsePublicKey parses a PEM encoded private key. -func parsePrivateKey(pemBytes []byte) (Signer, error) { - block, _ := pem.Decode(pemBytes) - if block == nil { - return nil, errors.New("ssh: no key found") - } - - var rawkey interface{} - switch block.Type { - case "RSA PRIVATE KEY": - rsa, err := x509.ParsePKCS1PrivateKey(block.Bytes) - if err != nil { - return nil, err - } - rawkey = rsa - default: - return nil, fmt.Errorf("ssh: unsupported key type %q", block.Type) - } - return newSignerFromKey(rawkey) -} diff --git a/vendor/github.com/TykTechnologies/goverify/verifier.go b/vendor/github.com/TykTechnologies/goverify/verifier.go deleted file mode 100644 index 409766ec9aa..00000000000 --- a/vendor/github.com/TykTechnologies/goverify/verifier.go +++ /dev/null @@ -1,24 +0,0 @@ -package goverify - -import ( - "crypto/rsa" - "fmt" -) - -// A Verifier is can validate signatures that verify against a public key. -type Verifier interface { - // Sign returns raw signature for the given data. This method - // will apply the hash specified for the keytype to the data. - Verify(data []byte, sig []byte) error -} - -func newVerifierFromKey(k interface{}) (Verifier, error) { - var sshKey Verifier - switch t := k.(type) { - case *rsa.PublicKey: - sshKey = &RSAPublicKey{t} - default: - return nil, fmt.Errorf("ssh: unsupported key type %T", k) - } - return sshKey, nil -} diff --git a/vendor/github.com/TykTechnologies/leakybucket/.drone.yml b/vendor/github.com/TykTechnologies/leakybucket/.drone.yml deleted file mode 100644 index c115285ae6a..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/.drone.yml +++ /dev/null @@ -1,19 +0,0 @@ -env: -- REPORT_CARD_GITHUB_STATUS_TOKEN=$$report_card_github_status_token -- REPORT_CARD_GITHUB_REPO_TOKEN=$$report_card_github_repo_token -image: clever/drone-go:1.6 -notify: - email: - recipients: - - drone@clever.com - slack: - on_failure: true - on_started: false - on_success: false - webhook_url: $$slack_webhook -script: -- sudo pip install -q git+https://$REPORT_CARD_GITHUB_REPO_TOKEN@github.com/Clever/report-card.git; GITHUB_API_TOKEN=$REPORT_CARD_GITHUB_STATUS_TOKEN report-card --publish || true -- ulimit -n 2560 -- make test -services: -- redis:2.6 diff --git a/vendor/github.com/TykTechnologies/leakybucket/.gitignore b/vendor/github.com/TykTechnologies/leakybucket/.gitignore deleted file mode 100644 index 19b887e04ea..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*~ -c.out diff --git a/vendor/github.com/TykTechnologies/leakybucket/LICENSE b/vendor/github.com/TykTechnologies/leakybucket/LICENSE deleted file mode 100644 index a6da337d1ae..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/LICENSE +++ /dev/null @@ -1,190 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - Copyright 2014 Clever, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/TykTechnologies/leakybucket/Makefile b/vendor/github.com/TykTechnologies/leakybucket/Makefile deleted file mode 100644 index 2375d33f876..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/Makefile +++ /dev/null @@ -1,15 +0,0 @@ -include golang.mk -.DEFAULT_GOAL := test # override default goal set in library makefile - -.PHONY: test $(PKGS) -SHELL := /bin/bash -PKG := github.com/Clever/leakybucket -PKGS := $(shell go list ./...) -$(eval $(call golang-version-check,1.6)) - -export REDIS_URL ?= localhost:6379 - -test: $(PKGS) -$(PKGS): golang-test-all-deps - go get -d -t $@ - $(call golang-test-all,$@) diff --git a/vendor/github.com/TykTechnologies/leakybucket/README.md b/vendor/github.com/TykTechnologies/leakybucket/README.md deleted file mode 100644 index 741035dee5b..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/README.md +++ /dev/null @@ -1,24 +0,0 @@ -## leakybucket - -Leaky bucket implementation in Go with your choice of data storage layer. - -## Why - -[Leaky buckets](https://en.wikipedia.org/wiki/Leaky_bucket) are useful in a number of settings, especially rate limiting. - -## Documentation - -[![GoDoc](https://godoc.org/github.com/Clever/leakybucket?status.png)](https://godoc.org/github.com/Clever/leakybucket). - -## Tests - -leakybucket is built and tested against Go 1.5. -Ensure this is the version of Go you're running with `go version`. -Make sure your GOPATH is set, e.g. `export GOPATH=~/go`. -Clone the repository to `$GOPATH/src/github.com/Clever/leakybucket`. - -If you have done all of the above, then you should be able to run - -``` -make test -``` diff --git a/vendor/github.com/TykTechnologies/leakybucket/bucket.go b/vendor/github.com/TykTechnologies/leakybucket/bucket.go deleted file mode 100644 index 73e5dc36988..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/bucket.go +++ /dev/null @@ -1,40 +0,0 @@ -package leakybucket - -import ( - "errors" - "time" -) - -var ( - // ErrorFull is returned when the amount requested to add exceeds the remaining space in the bucket. - ErrorFull = errors.New("add exceeds free capacity") -) - -// Bucket interface for interacting with leaky buckets: https://en.wikipedia.org/wiki/Leaky_bucket -type Bucket interface { - // Capacity of the bucket. - Capacity() uint - - // Remaining space in the bucket. - Remaining() uint - - // Reset returns when the bucket will be drained. - Reset() time.Time - - // Add to the bucket. Returns bucket state after adding. - Add(uint) (BucketState, error) -} - -// BucketState is a snapshot of a bucket's properties. -type BucketState struct { - Capacity uint - Remaining uint - Reset time.Time -} - -// Storage interface for generating buckets keyed by a string. -type Storage interface { - // Create a bucket with a name, capacity, and rate. - // rate is how long it takes for full capacity to drain. - Create(name string, capacity uint, rate time.Duration) (Bucket, error) -} diff --git a/vendor/github.com/TykTechnologies/leakybucket/doc.go b/vendor/github.com/TykTechnologies/leakybucket/doc.go deleted file mode 100644 index 8182fc70b82..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package leakybucket provides a leaky bucket implementation with support for various backends. -package leakybucket diff --git a/vendor/github.com/TykTechnologies/leakybucket/golang.mk b/vendor/github.com/TykTechnologies/leakybucket/golang.mk deleted file mode 100644 index 8a6e106f646..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/golang.mk +++ /dev/null @@ -1,133 +0,0 @@ -# This is the default Clever Golang Makefile. -# Please do not alter this file directly. -GOLANG_MK_VERSION := 0.1.0 - -SHELL := /bin/bash -.PHONY: golang-godep-vendor golang-test-deps $(GODEP) - -# This block checks and confirms that the proper Go toolchain version is installed. -# arg1: golang version -define golang-version-check -GOVERSION := $(shell go version | grep $(1)) -_ := $(if \ - $(shell go version | grep $(1)), \ - @echo "", \ - $(error "must be running Go version $(1)")) -endef - -export GO15VENDOREXPERIMENT=1 - -# FGT is a utility that exits with 1 whenever any stderr/stdout output is recieved. -FGT := $(GOPATH)/bin/fgt -$(FGT): - go get github.com/GeertJohan/fgt - -# Godep is a tool used to manage Golang dependencies in the style of the Go 1.5 -# vendoring experiment. -GODEP := $(GOPATH)/bin/godep -$(GODEP): - go get -u github.com/tools/godep - -# Golint is a tool for linting Golang code for common errors. -GOLINT := $(GOPATH)/bin/golint -$(GOLINT): - go get github.com/golang/lint/golint - -# golang-vendor-deps installs all dependencies needed for different test cases. -golang-godep-vendor-deps: $(GODEP) - -# golang-godep-vendor is a target for saving dependencies with the godep tool -# to the vendor/ directory. All nested vendor/ directories are deleted as they -# are not handled well by the Go toolchain. -# arg1: pkg path -define golang-godep-vendor -$(GODEP) save $(1) -@# remove any nested vendor directories -find vendor/ -path '*/vendor' -type d | xargs -IX rm -r X -endef - -# golang-fmt-deps requires the FGT tool for checking output -golang-fmt-deps: $(FGT) - -# golang-fmt checks that all golang files in the pkg are formatted correctly. -# arg1: pkg path -define golang-fmt -@echo "FORMATTING $(1)..." -@$(FGT) gofmt -l=true $(GOPATH)/src/$(1)/*.go -endef - -# golang-lint-deps requires the golint tool for golang linting. -golang-lint-deps: $(GOLINT) - -# golang-lint calls golint on all golang files in the pkg. -# arg1: pkg path -define golang-lint -@echo "LINTING $(1)..." -@$(GOLINT) $(GOPATH)/src/$(1)/*.go -endef - -# golang-lint-deps-strict requires the golint tool for golang linting. -golang-lint-deps-strict: $(GOLINT) $(FGT) - -# golang-lint-strict calls golint on all golang files in the pkg and fails if any lint -# errors are found. -# arg1: pkg path -define golang-lint-strict -@echo "LINTING $(1)..." -@$(FGT) $(GOLINT) $(GOPATH)/src/$(1)/*.go -endef - -# golang-test-deps is here for consistency -golang-test-deps: - -# golang-test uses the Go toolchain to run all tests in the pkg. -# arg1: pkg path -define golang-test -@echo "TESTING $(1)..." -@go test -v $(1) -endef - -# golang-test-strict-deps is here for consistency -golang-test-strict-deps: - -# golang-test-strict uses the Go toolchain to run all tests in the pkg with the race flag -# arg1: pkg path -define golang-test-strict -@echo "TESTING $(1)..." -@go test -v -race $(1) -endef - -# golang-vet-deps is here for consistency -golang-vet-deps: - -# golang-vet uses the Go toolchain to vet all the pkg for common mistakes. -# arg1: pkg path -define golang-vet -@echo "VETTING $(1)..." -@go vet $(GOPATH)/src/$(1)/*.go -endef - -# golang-test-all-deps installs all dependencies needed for different test cases. -golang-test-all-deps: golang-fmt-deps golang-lint-deps golang-test-deps golang-vet-deps - -# golang-test-all calls fmt, lint, vet and test on the specified pkg. -# arg1: pkg path -define golang-test-all -$(call golang-fmt,$(1)) -$(call golang-lint,$(1)) -$(call golang-vet,$(1)) -$(call golang-test,$(1)) -endef - -# golang-test-all-strict-deps: installs all dependencies needed for different test cases. -golang-test-all-strict-deps: golang-fmt-deps golang-lint-deps-strict golang-test-strict-deps golang-vet-deps - -# golang-test-all-strict calls fmt, lint, vet and test on the specified pkg with strict -# requirements that no errors are thrown while linting. -# arg1: pkg path -define golang-test-all-strict -$(call golang-fmt,$(1)) -$(call golang-lint-strict,$(1)) -$(call golang-vet,$(1)) -$(call golang-test-strict,$(1)) -endef diff --git a/vendor/github.com/TykTechnologies/leakybucket/memorycache/cache.go b/vendor/github.com/TykTechnologies/leakybucket/memorycache/cache.go deleted file mode 100644 index 361b97b5e3e..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/memorycache/cache.go +++ /dev/null @@ -1,84 +0,0 @@ -package memorycache - -import ( - "sync" - "time" -) - -// Cache is a synchronised map of items that auto-expire once stale -type Cache struct { - mutex sync.RWMutex - ttl time.Duration - items map[string]*Item -} - -// Set is a thread-safe way to add new items to the map -func (cache *Cache) Set(key string, data *bucket) { - cache.mutex.Lock() - item := &Item{data: data} - item.touch(cache.ttl) - cache.items[key] = item - cache.mutex.Unlock() -} - -// Get is a thread-safe way to lookup items -// Every lookup, also touches the item, hence extending it's life -func (cache *Cache) Get(key string) (data *bucket, found bool) { - cache.mutex.Lock() - item, exists := cache.items[key] - if !exists || item.expired() { - data = &bucket{} - found = false - } else { - item.touch(cache.ttl) - data = item.data - found = true - } - cache.mutex.Unlock() - return -} - -// Count returns the number of items in the cache -// (helpful for tracking memory leaks) -func (cache *Cache) Count() int { - cache.mutex.RLock() - count := len(cache.items) - cache.mutex.RUnlock() - return count -} - -func (cache *Cache) cleanup() { - cache.mutex.Lock() - for key, item := range cache.items { - if item.expired() { - delete(cache.items, key) - } - } - cache.mutex.Unlock() -} - -func (cache *Cache) startCleanupTimer() { - duration := cache.ttl - if duration < time.Second { - duration = time.Second - } - ticker := time.Tick(duration) - go (func() { - for { - select { - case <-ticker: - cache.cleanup() - } - } - })() -} - -// NewCache is a helper to create instance of the Cache struct -func NewCache(duration time.Duration) *Cache { - cache := &Cache{ - ttl: duration, - items: map[string]*Item{}, - } - cache.startCleanupTimer() - return cache -} diff --git a/vendor/github.com/TykTechnologies/leakybucket/memorycache/item.go b/vendor/github.com/TykTechnologies/leakybucket/memorycache/item.go deleted file mode 100644 index f344d6203ff..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/memorycache/item.go +++ /dev/null @@ -1,32 +0,0 @@ -package memorycache - -import ( - "sync" - "time" -) - -// Item represents a record in the cache map -type Item struct { - sync.RWMutex - data *bucket - expires *time.Time -} - -func (item *Item) touch(duration time.Duration) { - item.Lock() - expiration := time.Now().Add(duration) - item.expires = &expiration - item.Unlock() -} - -func (item *Item) expired() bool { - var value bool - item.RLock() - if item.expires == nil { - value = true - } else { - value = item.expires.Before(time.Now()) - } - item.RUnlock() - return value -} diff --git a/vendor/github.com/TykTechnologies/leakybucket/memorycache/memorycache.go b/vendor/github.com/TykTechnologies/leakybucket/memorycache/memorycache.go deleted file mode 100644 index 82477fff067..00000000000 --- a/vendor/github.com/TykTechnologies/leakybucket/memorycache/memorycache.go +++ /dev/null @@ -1,74 +0,0 @@ -package memorycache - -import ( - "sync" - "time" - - "github.com/TykTechnologies/leakybucket" -) - -type bucket struct { - capacity uint - remaining uint - reset time.Time - rate time.Duration - mutex sync.Mutex -} - -func (b *bucket) Capacity() uint { - return b.capacity -} - -// Remaining space in the bucket. -func (b *bucket) Remaining() uint { - return b.remaining -} - -// Reset returns when the bucket will be drained. -func (b *bucket) Reset() time.Time { - return b.reset -} - -// Add to the bucket. -func (b *bucket) Add(amount uint) (leakybucket.BucketState, error) { - b.mutex.Lock() - defer b.mutex.Unlock() - if time.Now().After(b.reset) { - b.reset = time.Now().Add(b.rate) - b.remaining = b.capacity - } - if amount > b.remaining { - return leakybucket.BucketState{Capacity: b.capacity, Remaining: b.remaining, Reset: b.reset}, leakybucket.ErrorFull - } - b.remaining -= amount - return leakybucket.BucketState{Capacity: b.capacity, Remaining: b.remaining, Reset: b.reset}, nil -} - -// Storage is a non thread-safe in-memory leaky bucket factory. -type Storage struct { - buckets *Cache -} - -// New initializes the in-memory bucket store. -func New() *Storage { - return &Storage{ - buckets: NewCache(10 * time.Minute), - } -} - -// Create a bucket. -func (s *Storage) Create(name string, capacity uint, rate time.Duration) (leakybucket.Bucket, error) { - b, ok := s.buckets.Get(name) - if ok { - return b, nil - } - - b = &bucket{ - capacity: capacity, - remaining: capacity, - reset: time.Now().Add(rate), - rate: rate, - } - s.buckets.Set(name, b) - return b, nil -} diff --git a/vendor/github.com/TykTechnologies/murmur3/.gitignore b/vendor/github.com/TykTechnologies/murmur3/.gitignore deleted file mode 100644 index 00268614f04..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe diff --git a/vendor/github.com/TykTechnologies/murmur3/.travis.yml b/vendor/github.com/TykTechnologies/murmur3/.travis.yml deleted file mode 100644 index 9bfca9c8b22..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: go - -go: - - 1.x - - master - -script: go test diff --git a/vendor/github.com/TykTechnologies/murmur3/LICENSE b/vendor/github.com/TykTechnologies/murmur3/LICENSE deleted file mode 100644 index 2a46fd75007..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -Copyright 2013, Sébastien Paolacci. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of the library nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/TykTechnologies/murmur3/README.md b/vendor/github.com/TykTechnologies/murmur3/README.md deleted file mode 100644 index e463678a05e..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/README.md +++ /dev/null @@ -1,86 +0,0 @@ -murmur3 -======= - -[![Build Status](https://travis-ci.org/spaolacci/murmur3.svg?branch=master)](https://travis-ci.org/spaolacci/murmur3) - -Native Go implementation of Austin Appleby's third MurmurHash revision (aka -MurmurHash3). - -Reference algorithm has been slightly hacked as to support the streaming mode -required by Go's standard [Hash interface](http://golang.org/pkg/hash/#Hash). - - -Benchmarks ----------- - -Go tip as of 2014-06-12 (i.e almost go1.3), core i7 @ 3.4 Ghz. All runs -include hasher instantiation and sequence finalization. - -
-
-Benchmark32_1        500000000     7.69 ns/op      130.00 MB/s
-Benchmark32_2        200000000     8.83 ns/op      226.42 MB/s
-Benchmark32_4        500000000     7.99 ns/op      500.39 MB/s
-Benchmark32_8        200000000     9.47 ns/op      844.69 MB/s
-Benchmark32_16       100000000     12.1 ns/op     1321.61 MB/s
-Benchmark32_32       100000000     18.3 ns/op     1743.93 MB/s
-Benchmark32_64        50000000     30.9 ns/op     2071.64 MB/s
-Benchmark32_128       50000000     57.6 ns/op     2222.96 MB/s
-Benchmark32_256       20000000      116 ns/op     2188.60 MB/s
-Benchmark32_512       10000000      226 ns/op     2260.59 MB/s
-Benchmark32_1024       5000000      452 ns/op     2263.73 MB/s
-Benchmark32_2048       2000000      891 ns/op     2296.02 MB/s
-Benchmark32_4096       1000000     1787 ns/op     2290.92 MB/s
-Benchmark32_8192        500000     3593 ns/op     2279.68 MB/s
-Benchmark128_1       100000000     26.1 ns/op       38.33 MB/s
-Benchmark128_2       100000000     29.0 ns/op       69.07 MB/s
-Benchmark128_4        50000000     29.8 ns/op      134.17 MB/s
-Benchmark128_8        50000000     31.6 ns/op      252.86 MB/s
-Benchmark128_16      100000000     26.5 ns/op      603.42 MB/s
-Benchmark128_32      100000000     28.6 ns/op     1117.15 MB/s
-Benchmark128_64       50000000     35.5 ns/op     1800.97 MB/s
-Benchmark128_128      50000000     50.9 ns/op     2515.50 MB/s
-Benchmark128_256      20000000     76.9 ns/op     3330.11 MB/s
-Benchmark128_512      20000000      135 ns/op     3769.09 MB/s
-Benchmark128_1024     10000000      250 ns/op     4094.38 MB/s
-Benchmark128_2048      5000000      477 ns/op     4290.75 MB/s
-Benchmark128_4096      2000000      940 ns/op     4353.29 MB/s
-Benchmark128_8192      1000000     1838 ns/op     4455.47 MB/s
-
-
- - -
-
-benchmark              Go1.0 MB/s    Go1.1 MB/s  speedup    Go1.2 MB/s  speedup    Go1.3 MB/s  speedup
-Benchmark32_1               98.90        118.59    1.20x        114.79    0.97x        130.00    1.13x
-Benchmark32_2              168.04        213.31    1.27x        210.65    0.99x        226.42    1.07x
-Benchmark32_4              414.01        494.19    1.19x        490.29    0.99x        500.39    1.02x
-Benchmark32_8              662.19        836.09    1.26x        836.46    1.00x        844.69    1.01x
-Benchmark32_16             917.46       1304.62    1.42x       1297.63    0.99x       1321.61    1.02x
-Benchmark32_32            1141.93       1737.54    1.52x       1728.24    0.99x       1743.93    1.01x
-Benchmark32_64            1289.47       2039.51    1.58x       2038.20    1.00x       2071.64    1.02x
-Benchmark32_128           1299.23       2097.63    1.61x       2177.13    1.04x       2222.96    1.02x
-Benchmark32_256           1369.90       2202.34    1.61x       2213.15    1.00x       2188.60    0.99x
-Benchmark32_512           1399.56       2255.72    1.61x       2264.49    1.00x       2260.59    1.00x
-Benchmark32_1024          1410.90       2285.82    1.62x       2270.99    0.99x       2263.73    1.00x
-Benchmark32_2048          1422.14       2297.62    1.62x       2269.59    0.99x       2296.02    1.01x
-Benchmark32_4096          1420.53       2307.81    1.62x       2273.43    0.99x       2290.92    1.01x
-Benchmark32_8192          1424.79       2312.87    1.62x       2286.07    0.99x       2279.68    1.00x
-Benchmark128_1               8.32         30.15    3.62x         30.84    1.02x         38.33    1.24x
-Benchmark128_2              16.38         59.72    3.65x         59.37    0.99x         69.07    1.16x
-Benchmark128_4              32.26        112.96    3.50x        114.24    1.01x        134.17    1.17x
-Benchmark128_8              62.68        217.88    3.48x        218.18    1.00x        252.86    1.16x
-Benchmark128_16            128.47        451.57    3.51x        474.65    1.05x        603.42    1.27x
-Benchmark128_32            246.18        910.42    3.70x        871.06    0.96x       1117.15    1.28x
-Benchmark128_64            449.05       1477.64    3.29x       1449.24    0.98x       1800.97    1.24x
-Benchmark128_128           762.61       2222.42    2.91x       2217.30    1.00x       2515.50    1.13x
-Benchmark128_256          1179.92       3005.46    2.55x       2931.55    0.98x       3330.11    1.14x
-Benchmark128_512          1616.51       3590.75    2.22x       3592.08    1.00x       3769.09    1.05x
-Benchmark128_1024         1964.36       3979.67    2.03x       4034.01    1.01x       4094.38    1.01x
-Benchmark128_2048         2225.07       4156.93    1.87x       4244.17    1.02x       4290.75    1.01x
-Benchmark128_4096         2360.15       4299.09    1.82x       4392.35    1.02x       4353.29    0.99x
-Benchmark128_8192         2411.50       4356.84    1.81x       4480.68    1.03x       4455.47    0.99x
-
-
- diff --git a/vendor/github.com/TykTechnologies/murmur3/murmur.go b/vendor/github.com/TykTechnologies/murmur3/murmur.go deleted file mode 100644 index 1252cf73a79..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/murmur.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2013, Sébastien Paolacci. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package murmur3 implements Austin Appleby's non-cryptographic MurmurHash3. - - Reference implementation: - http://code.google.com/p/smhasher/wiki/MurmurHash3 - - History, characteristics and (legacy) perfs: - https://sites.google.com/site/murmurhash/ - https://sites.google.com/site/murmurhash/statistics -*/ -package murmur3 - -type bmixer interface { - bmix(p []byte) (tail []byte) - Size() (n int) - reset() -} - -type digest struct { - clen int // Digested input cumulative length. - tail []byte // 0 to Size()-1 bytes view of `buf'. - buf [16]byte // Expected (but not required) to be Size() large. - seed uint32 // Seed for initializing the hash. - bmixer -} - -func (d *digest) BlockSize() int { return 1 } - -func (d *digest) Write(p []byte) (n int, err error) { - n = len(p) - d.clen += n - - if len(d.tail) > 0 { - // Stick back pending bytes. - nfree := d.Size() - len(d.tail) // nfree ∈ [1, d.Size()-1]. - if nfree < len(p) { - // One full block can be formed. - block := append(d.tail, p[:nfree]...) - p = p[nfree:] - _ = d.bmix(block) // No tail. - } else { - // Tail's buf is large enough to prevent reallocs. - p = append(d.tail, p...) - } - } - - d.tail = d.bmix(p) - - // Keep own copy of the 0 to Size()-1 pending bytes. - nn := copy(d.buf[:], d.tail) - d.tail = d.buf[:nn] - - return n, nil -} - -func (d *digest) Reset() { - d.clen = 0 - d.tail = nil - d.bmixer.reset() -} diff --git a/vendor/github.com/TykTechnologies/murmur3/murmur128.go b/vendor/github.com/TykTechnologies/murmur3/murmur128.go deleted file mode 100644 index a4b618b5f3d..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/murmur128.go +++ /dev/null @@ -1,203 +0,0 @@ -package murmur3 - -import ( - //"encoding/binary" - "hash" - "unsafe" -) - -const ( - c1_128 = 0x87c37b91114253d5 - c2_128 = 0x4cf5ad432745937f -) - -// Make sure interfaces are correctly implemented. -var ( - _ hash.Hash = new(digest128) - _ Hash128 = new(digest128) - _ bmixer = new(digest128) -) - -// Hash128 represents a 128-bit hasher -// Hack: the standard api doesn't define any Hash128 interface. -type Hash128 interface { - hash.Hash - Sum128() (uint64, uint64) -} - -// digest128 represents a partial evaluation of a 128 bites hash. -type digest128 struct { - digest - h1 uint64 // Unfinalized running hash part 1. - h2 uint64 // Unfinalized running hash part 2. -} - -// New128 returns a 128-bit hasher -func New128() Hash128 { return New128WithSeed(0) } - -// New128WithSeed returns a 128-bit hasher set with explicit seed value -func New128WithSeed(seed uint32) Hash128 { - d := new(digest128) - d.seed = seed - d.bmixer = d - d.Reset() - return d -} - -func (d *digest128) Size() int { return 16 } - -func (d *digest128) reset() { d.h1, d.h2 = uint64(d.seed), uint64(d.seed) } - -func (d *digest128) Sum(b []byte) []byte { - h1, h2 := d.Sum128() - return append(b, - byte(h1>>56), byte(h1>>48), byte(h1>>40), byte(h1>>32), - byte(h1>>24), byte(h1>>16), byte(h1>>8), byte(h1), - - byte(h2>>56), byte(h2>>48), byte(h2>>40), byte(h2>>32), - byte(h2>>24), byte(h2>>16), byte(h2>>8), byte(h2), - ) -} - -func (d *digest128) bmix(p []byte) (tail []byte) { - h1, h2 := d.h1, d.h2 - - nblocks := len(p) / 16 - for i := 0; i < nblocks; i++ { - t := (*[2]uint64)(unsafe.Pointer(&p[i*16])) - k1, k2 := t[0], t[1] - - k1 *= c1_128 - k1 = (k1 << 31) | (k1 >> 33) // rotl64(k1, 31) - k1 *= c2_128 - h1 ^= k1 - - h1 = (h1 << 27) | (h1 >> 37) // rotl64(h1, 27) - h1 += h2 - h1 = h1*5 + 0x52dce729 - - k2 *= c2_128 - k2 = (k2 << 33) | (k2 >> 31) // rotl64(k2, 33) - k2 *= c1_128 - h2 ^= k2 - - h2 = (h2 << 31) | (h2 >> 33) // rotl64(h2, 31) - h2 += h1 - h2 = h2*5 + 0x38495ab5 - } - d.h1, d.h2 = h1, h2 - return p[nblocks*d.Size():] -} - -func (d *digest128) Sum128() (h1, h2 uint64) { - - h1, h2 = d.h1, d.h2 - - var k1, k2 uint64 - switch len(d.tail) & 15 { - case 15: - k2 ^= uint64(d.tail[14]) << 48 - fallthrough - case 14: - k2 ^= uint64(d.tail[13]) << 40 - fallthrough - case 13: - k2 ^= uint64(d.tail[12]) << 32 - fallthrough - case 12: - k2 ^= uint64(d.tail[11]) << 24 - fallthrough - case 11: - k2 ^= uint64(d.tail[10]) << 16 - fallthrough - case 10: - k2 ^= uint64(d.tail[9]) << 8 - fallthrough - case 9: - k2 ^= uint64(d.tail[8]) << 0 - - k2 *= c2_128 - k2 = (k2 << 33) | (k2 >> 31) // rotl64(k2, 33) - k2 *= c1_128 - h2 ^= k2 - - fallthrough - - case 8: - k1 ^= uint64(d.tail[7]) << 56 - fallthrough - case 7: - k1 ^= uint64(d.tail[6]) << 48 - fallthrough - case 6: - k1 ^= uint64(d.tail[5]) << 40 - fallthrough - case 5: - k1 ^= uint64(d.tail[4]) << 32 - fallthrough - case 4: - k1 ^= uint64(d.tail[3]) << 24 - fallthrough - case 3: - k1 ^= uint64(d.tail[2]) << 16 - fallthrough - case 2: - k1 ^= uint64(d.tail[1]) << 8 - fallthrough - case 1: - k1 ^= uint64(d.tail[0]) << 0 - k1 *= c1_128 - k1 = (k1 << 31) | (k1 >> 33) // rotl64(k1, 31) - k1 *= c2_128 - h1 ^= k1 - } - - h1 ^= uint64(d.clen) - h2 ^= uint64(d.clen) - - h1 += h2 - h2 += h1 - - h1 = fmix64(h1) - h2 = fmix64(h2) - - h1 += h2 - h2 += h1 - - return h1, h2 -} - -func fmix64(k uint64) uint64 { - k ^= k >> 33 - k *= 0xff51afd7ed558ccd - k ^= k >> 33 - k *= 0xc4ceb9fe1a85ec53 - k ^= k >> 33 - return k -} - -/* -func rotl64(x uint64, r byte) uint64 { - return (x << r) | (x >> (64 - r)) -} -*/ - -// Sum128 returns the MurmurHash3 sum of data. It is equivalent to the -// following sequence (without the extra burden and the extra allocation): -// hasher := New128() -// hasher.Write(data) -// return hasher.Sum128() -func Sum128(data []byte) (h1 uint64, h2 uint64) { return Sum128WithSeed(data, 0) } - -// Sum128WithSeed returns the MurmurHash3 sum of data. It is equivalent to the -// following sequence (without the extra burden and the extra allocation): -// hasher := New128WithSeed(seed) -// hasher.Write(data) -// return hasher.Sum128() -func Sum128WithSeed(data []byte, seed uint32) (h1 uint64, h2 uint64) { - d := &digest128{h1: uint64(seed), h2: uint64(seed)} - d.seed = seed - d.tail = d.bmix(data) - d.clen = len(data) - return d.Sum128() -} diff --git a/vendor/github.com/TykTechnologies/murmur3/murmur32.go b/vendor/github.com/TykTechnologies/murmur3/murmur32.go deleted file mode 100644 index bc89d268a3c..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/murmur32.go +++ /dev/null @@ -1,154 +0,0 @@ -package murmur3 - -// http://code.google.com/p/guava-libraries/source/browse/guava/src/com/google/common/hash/Murmur3_32HashFunction.java - -import ( - "hash" - "unsafe" -) - -// Make sure interfaces are correctly implemented. -var ( - _ hash.Hash = new(digest32) - _ hash.Hash32 = new(digest32) -) - -const ( - c1_32 uint32 = 0xcc9e2d51 - c2_32 uint32 = 0x1b873593 -) - -// digest32 represents a partial evaluation of a 32 bites hash. -type digest32 struct { - digest - h1 uint32 // Unfinalized running hash. -} - -func New32() hash.Hash32 { - d := new(digest32) - d.bmixer = d - d.Reset() - return d -} - -func (d *digest32) Size() int { return 4 } - -func (d *digest32) reset() { d.h1 = 0 } - -func (d *digest32) Sum(b []byte) []byte { - h := d.h1 - return append(b, byte(h>>24), byte(h>>16), byte(h>>8), byte(h)) -} - -// Digest as many blocks as possible. -func (d *digest32) bmix(p []byte) (tail []byte) { - h1 := d.h1 - - nblocks := len(p) / 4 - for i := 0; i < nblocks; i++ { - k1 := *(*uint32)(unsafe.Pointer(&p[i*4])) - - k1 *= c1_32 - k1 = (k1 << 15) | (k1 >> 17) // rotl32(k1, 15) - k1 *= c2_32 - - h1 ^= k1 - h1 = (h1 << 13) | (h1 >> 19) // rotl32(h1, 13) - h1 = h1*5 + 0xe6546b64 - } - d.h1 = h1 - return p[nblocks*d.Size():] -} - -func (d *digest32) Sum32() (h1 uint32) { - - h1 = d.h1 - - var k1 uint32 - switch len(d.tail) & 3 { - case 3: - k1 ^= uint32(d.tail[2]) << 16 - fallthrough - case 2: - k1 ^= uint32(d.tail[1]) << 8 - fallthrough - case 1: - k1 ^= uint32(d.tail[0]) - k1 *= c1_32 - k1 = (k1 << 15) | (k1 >> 17) // rotl32(k1, 15) - k1 *= c2_32 - h1 ^= k1 - } - - h1 ^= uint32(d.clen) - - h1 ^= h1 >> 16 - h1 *= 0x85ebca6b - h1 ^= h1 >> 13 - h1 *= 0xc2b2ae35 - h1 ^= h1 >> 16 - - return h1 -} - -/* -func rotl32(x uint32, r byte) uint32 { - return (x << r) | (x >> (32 - r)) -} -*/ - -// Sum32 returns the MurmurHash3 sum of data. It is equivalent to the -// following sequence (without the extra burden and the extra allocation): -// hasher := New32() -// hasher.Write(data) -// return hasher.Sum32() -func Sum32(data []byte) uint32 { - - var h1 uint32 = 0 - - nblocks := len(data) / 4 - var p uintptr - if len(data) > 0 { - p = uintptr(unsafe.Pointer(&data[0])) - } - p1 := p + uintptr(4*nblocks) - for ; p < p1; p += 4 { - k1 := *(*uint32)(unsafe.Pointer(p)) - - k1 *= c1_32 - k1 = (k1 << 15) | (k1 >> 17) // rotl32(k1, 15) - k1 *= c2_32 - - h1 ^= k1 - h1 = (h1 << 13) | (h1 >> 19) // rotl32(h1, 13) - h1 = h1*5 + 0xe6546b64 - } - - tail := data[nblocks*4:] - - var k1 uint32 - switch len(tail) & 3 { - case 3: - k1 ^= uint32(tail[2]) << 16 - fallthrough - case 2: - k1 ^= uint32(tail[1]) << 8 - fallthrough - case 1: - k1 ^= uint32(tail[0]) - k1 *= c1_32 - k1 = (k1 << 15) | (k1 >> 17) // rotl32(k1, 15) - k1 *= c2_32 - h1 ^= k1 - } - - h1 ^= uint32(len(data)) - - h1 ^= h1 >> 16 - h1 *= 0x85ebca6b - h1 ^= h1 >> 13 - h1 *= 0xc2b2ae35 - h1 ^= h1 >> 16 - - return h1 -} diff --git a/vendor/github.com/TykTechnologies/murmur3/murmur64.go b/vendor/github.com/TykTechnologies/murmur3/murmur64.go deleted file mode 100644 index 65a410ae0b9..00000000000 --- a/vendor/github.com/TykTechnologies/murmur3/murmur64.go +++ /dev/null @@ -1,57 +0,0 @@ -package murmur3 - -import ( - "hash" -) - -// Make sure interfaces are correctly implemented. -var ( - _ hash.Hash = new(digest64) - _ hash.Hash64 = new(digest64) - _ bmixer = new(digest64) -) - -// digest64 is half a digest128. -type digest64 digest128 - -// New64 returns a 64-bit hasher -func New64() hash.Hash64 { return New64WithSeed(0) } - -// New64WithSeed returns a 64-bit hasher set with explicit seed value -func New64WithSeed(seed uint32) hash.Hash64 { - d := (*digest64)(New128WithSeed(seed).(*digest128)) - return d -} - -func (d *digest64) Sum(b []byte) []byte { - h1 := d.Sum64() - return append(b, - byte(h1>>56), byte(h1>>48), byte(h1>>40), byte(h1>>32), - byte(h1>>24), byte(h1>>16), byte(h1>>8), byte(h1)) -} - -func (d *digest64) Sum64() uint64 { - h1, _ := (*digest128)(d).Sum128() - return h1 -} - -// Sum64 returns the MurmurHash3 sum of data. It is equivalent to the -// following sequence (without the extra burden and the extra allocation): -// hasher := New64() -// hasher.Write(data) -// return hasher.Sum64() -func Sum64(data []byte) uint64 { return Sum64WithSeed(data, 0) } - -// Sum64WithSeed returns the MurmurHash3 sum of data. It is equivalent to the -// following sequence (without the extra burden and the extra allocation): -// hasher := New64WithSeed(seed) -// hasher.Write(data) -// return hasher.Sum64() -func Sum64WithSeed(data []byte, seed uint32) uint64 { - d := &digest128{h1: uint64(seed), h2: uint64(seed)} - d.seed = seed - d.tail = d.bmix(data) - d.clen = len(data) - h1, _ := d.Sum128() - return h1 -} diff --git a/vendor/github.com/TykTechnologies/openid2go/LICENSE b/vendor/github.com/TykTechnologies/openid2go/LICENSE deleted file mode 100644 index a39cc60e8a6..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Emanoel Xavier - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/LICENSE b/vendor/github.com/TykTechnologies/openid2go/openid/LICENSE deleted file mode 100644 index a39cc60e8a6..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Emanoel Xavier - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/README.md b/vendor/github.com/TykTechnologies/openid2go/openid/README.md deleted file mode 100644 index c31447a9ad8..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/README.md +++ /dev/null @@ -1,86 +0,0 @@ -Go OpenId -=========== -[![godoc](http://img.shields.io/badge/godoc-reference-blue.svg?style=flat)](https://godoc.org/github.com/emanoelxavier/openid2go/openid) -[![license](http://img.shields.io/badge/license-MIT-yellowgreen.svg?style=flat)](https://raw.githubusercontent.com/emanoelxavier/openid2go/master/openid/LICENSE) -## Summary - -A Go package that implements web service middlewares for authenticating identities represented by OpenID Connect (OIDC) ID Tokens. - -"OpenID Connect 1.0 is a simple identity layer on top of the OAuth 2.0 protocol. It enables Clients to verify the identity of the End-User based on the authentication performed by an Authorization Server" - [OpenID Connect](http://openid.net/specs/openid-connect-core-1_0.html) - -## Installation - -go get github.com/emanoelxavier/openid2go/openid - -## Example -This example demonstrates how to use this package to validate incoming ID Tokens. It initializes the Configuration with the desired providers (OPs) and registers two middlewares: openid.Authenticate and openid.AuthenticateUser. The former performs the token validation while the latter, in addition to that, will forward the user information to the next handler. - -```go -import ( - "fmt" - "net/http" - - "github.com/emanoelxavier/openid2go/openid" -) - -func AuthenticatedHandler(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, "The user was authenticated!") -} - -func AuthenticatedHandlerWithUser(u *openid.User, w http.ResponseWriter, r *http.Request) { - fmt.Fprintf(w, "The user was authenticated! The token was issued by %v and the user is %+v.", u.Issuer, u) -} - -func Example() { - configuration, err := openid.NewConfiguration(openid.ProvidersGetter(getProviders_googlePlayground)) - - if err != nil { - panic(err) - } - - http.Handle("/user", openid.AuthenticateUser(configuration, openid.UserHandlerFunc(AuthenticatedHandlerWithUser))) - http.Handle("/authn", openid.Authenticate(configuration, http.HandlerFunc(AuthenticatedHandler))) - - http.ListenAndServe(":5100", nil) -} - -func myGetProviders() ([]openid.Provider, error) { - provider, err := openid.NewProvider("https://providerissuer", []string{"myClientID"}) - - if err != nil { - return nil, err - } - - return []openid.Provider{provider}, nil -} -``` -This example is also available in the documentation of this package, for more details see [GoDoc](https://godoc.org/github.com/emanoelxavier/openid2go/openid). - -## Tests - -#### Unit Tests -```sh -go test github.com/emanoelxavier/openid2go/openid -``` - -#### Integration Tests -In addition to to unit tests, this package also comes with integration tests that will validate real ID Tokens issued by real OIDC providers. The following command will run those tests: - -```sh -go test -tags integration github.com/emanoelxavier/openid2go/openid -issuer=[issuer] -clientID=[clientID] -idToken=[idToken] -``` - -Replace [issuer], [clientID] and [idToken] with the information from an identity provider of your choice. - -For a quick spin you can use it with tokens issued by Google for the [Google OAuth PlayGround](https://developers.google.com/oauthplayground) entering "openid" (without quotes) within the scope field and copying the issued ID Token. For this provider and client the values will be: - -```sh -go test -tags integration github.com/emanoelxavier/openid2go/openid -issuer=https://accounts.google.com -clientID=407408718192.apps.googleusercontent.com -idToken=copiedIDToken -``` - -## Contributing - -1. Open an issue if found a bug or have a functional request. -2. Disccuss. -3. Branch off, write the fix with test(s) and commit attaching to the issue. -4. Make a pull request. \ No newline at end of file diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/configuration.go b/vendor/github.com/TykTechnologies/openid2go/openid/configuration.go deleted file mode 100644 index b49c90d0184..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/configuration.go +++ /dev/null @@ -1,6 +0,0 @@ -package openid - -type configuration struct { - Issuer string `json:"issuer"` - JwksUri string `json:"jwks_uri"` -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/configurationprovider.go b/vendor/github.com/TykTechnologies/openid2go/openid/configurationprovider.go deleted file mode 100644 index 66a846af717..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/configurationprovider.go +++ /dev/null @@ -1,54 +0,0 @@ -package openid - -import ( - "encoding/json" - "fmt" - "io" - "net/http" - "strings" -) - -const wellKnownOpenIdConfiguration = "/.well-known/openid-configuration" - -type httpGetFunc func(url string) (*http.Response, error) -type decodeResponseFunc func(io.Reader, interface{}) error - -type configurationGetter interface { // Getter - getConfiguration(string) (configuration, error) -} - -type httpConfigurationProvider struct { //configurationProvider - getConfig httpGetFunc //httpGetter - decodeConfig decodeResponseFunc //responseDecoder -} - -func newHTTPConfigurationProvider(gc httpGetFunc, dc decodeResponseFunc) *httpConfigurationProvider { - return &httpConfigurationProvider{gc, dc} -} - -func jsonDecodeResponse(r io.Reader, v interface{}) error { - return json.NewDecoder(r).Decode(v) -} - -func (httpProv *httpConfigurationProvider) getConfiguration(issuer string) (configuration, error) { - // Workaround for tokens issued by google - if issuer == "accounts.google.com" { - issuer = "https://" + issuer - } - - configurationUri := strings.TrimSuffix(issuer, "/") + wellKnownOpenIdConfiguration - var config configuration - resp, err := httpProv.getConfig(configurationUri) - if err != nil { - return config, &ValidationError{Code: ValidationErrorGetOpenIdConfigurationFailure, Message: fmt.Sprintf("Failure while contacting the configuration endpoint %v.", configurationUri), Err: err, HTTPStatus: http.StatusUnauthorized} - } - - defer resp.Body.Close() - - if err := httpProv.decodeConfig(resp.Body, &config); err != nil { - return config, &ValidationError{Code: ValidationErrorDecodeOpenIdConfigurationFailure, Message: fmt.Sprintf("Failure while decoding the configuration retrived from endpoint %v.", configurationUri), Err: err, HTTPStatus: http.StatusUnauthorized} - } - - return config, nil - -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/doc.go b/vendor/github.com/TykTechnologies/openid2go/openid/doc.go deleted file mode 100644 index 244eab61f60..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/doc.go +++ /dev/null @@ -1,129 +0,0 @@ -/*Package openid implements web service middlewares for authenticating identities represented by -OpenID Connect (OIDC) ID Tokens. -For details on OIDC go to http://openid.net/specs/openid-connect-core-1_0.html - -The middlewares will: extract the ID Token from the request; retrieve the OIDC provider (OP) -configuration and signing keys; validate the token and provide the user identity and claims to the -underlying web service. - -The Basics - -At the core of this package are the Authenticate and AuthenticateUser middlewares. To use either one -of them you will need an instance of the Configuration type, to create that you use NewConfiguration. - - func Authenticate(conf *Configuration, h http.Handler) http.Handler - func AuthenticateUser(conf *Configuration, h UserHandler) http.Handler - NewConfiguration(options ...option) (*Configuration, error) - - // options: - - func ErrorHandler(eh ErrorHandlerFunc) func(*Configuration) error - func ProvidersGetter(pg GetProvidersFunc) func(*Configuration) error - - // extension points: - - type ErrorHandlerFunc func(error, http.ResponseWriter, *http.Request) bool - type GetProvidersFunc func() ([]Provider, error) - -The Example below demonstrates these elements working together. - -Token Parsing - -Both Authenticate and AuthenticateUser middlewares expect the incoming requests to have an HTTP -Authorization header with the content 'Bearer [idToken]' where [idToken] is a valid ID Token issued by -an OP. For instance: - - Authorization: Bearer eyJhbGciOiJSUzI1NiIsImtpZCI6... - -By default, requests that do not contain an Authorization header with this content will not be forwarded -to the next HTTP handler in the pipeline, instead they will fail back to the client with HTTP status -400/Bad Request. - -Token Validation - -Once parsed the ID Token will be validated: - - 1) Is the token a valid jwt? - 2) Is the token issued by a known OP? - 3) Is the token issued for a known client? - 4) Is the token valid at the time ('not use before' and 'expire at' claims)? - 5) Is the token signed accordingly? - -The signature validation is done with the public keys retrieved from the jwks_uri published by the OP in -its OIDC metadata (https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata). - -The token's issuer and audiences will be verified using a collection of the type Provider. This -collection is retrieved by calling the implementation of the function GetProvidersFunc registered with -the Configuration. -If the token issuer matches the Issuer of any of the providers and the token audience matches at least -one of the ClientIDs of the respective provider then the token is considered valid. - - func myGetProviders() ([]openid.Provider, error) { - p, err := openid.NewProvider("https://accounts.google.com", - []string{"407408718192.apps.googleusercontent.com"}) - // .... - return []openid.Provider{p}, nil - } - - c, _ := openid.NewConfiguration(openid.ProvidersGetter(myGetProviders)) - -In code above only tokens with Issuer claim ('iss') https://accounts.google.com and Audiences claim -('aud') containing "407408718192.apps.googleusercontent.com" can be valid. - -By default, when the token validation fails for any reason the requests will not be forwarded to the next -handler in the pipeline, instead they will fail back to the client with HTTP status 401/Unauthorized. - -Error Handling - -The default behavior of the Authenticate and AuthenticateUser middlewares upon error conditions is: -the execution pipeline is stopped (the next handler will not be executed), the response will contain -status 400 when a token is not found and 401 when it is invalid, and the response will also contain the -error message. -This behavior can be changed by implementing a function of type ErrorHandlerFunc and registering it -using ErrorHandler with the Configuration. - - type ErrorHandlerFunc func(error, http.ResponseWriter, *http.Request) bool - func ErrorHandler(eh ErrorHandlerFunc) func(*Configuration) error - -For instance: - - func myErrorHandler(e error, w http.ResponseWriter, r *http.Request) bool { - fmt.Fprintf(w, e.Error()) - return false - } - - c, _ := openid.NewConfiguration(openid.ProvidersGetter(myGetProviders), - openid.ErrorHandler(myErrorHandler)) - -In the code above myErrorHandler adds the error message to the response and let the execution -continue to the next handler in the pipeline (returning false) for all error types. -You can use this extension point to fine tune what happens when a specific error is returned by your -implementation of the GetProvidersFunc or even for the error types and codes exported by this -package: - - type ValidationError struct - type ValidationErrorCode uint32 - type SetupError struct - type SetupErrorCode uint32 - -Authenticate vs AuthenticateUser - -Both middlewares Authenticate and AuthenticateUser behave exactly the same way when it comes to -parsing and validating the ID Token. The only difference is that AuthenticateUser will forward the -information about the user's identity from the ID Token to the next handler in the pipeline. -If your service does not need to know the identity of the authenticated user then Authenticate will -suffice, otherwise your choice is AuthenticateUser. -In order to receive the User information from the AuthenticateUser the next handler in the pipeline -must implement the interface UserHandler with the following function: - - ServeHTTPWithUser(*User, http.ResponseWriter, *http.Request) - -You can also make use of the function adapter UserHandlerFunc as shown in the example below: - - func myHandlerWithUser(u *openid.User, w http.ResponseWriter, r *http.Request) { - fmt.Fprintf(w, "Authenticated! The user is %+v.", u) - } - - http.Handle("/user", openid.AuthenticateUser(c, openid.UserHandlerFunc(myHandlerWithUser))) -*/ -package openid diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/errors.go b/vendor/github.com/TykTechnologies/openid2go/openid/errors.go deleted file mode 100644 index 0a495c92a58..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/errors.go +++ /dev/null @@ -1,122 +0,0 @@ -package openid - -import ( - "fmt" - "net/http" - - "github.com/dgrijalva/jwt-go" -) - -// SetupErrorCode is the type of error code that can -// be returned by the operations done during middleware setup. -type SetupErrorCode uint32 - -// Setup error constants. -const ( - SetupErrorInvalidIssuer SetupErrorCode = iota // Invalid issuer provided during setup. - SetupErrorInvalidClientIDs // Invalid client id collection provided during setup. - SetupErrorEmptyProviderCollection // Empty collection of providers provided during setup. -) - -// ValidationErrorCode is the type of error code that can -// be returned by the operations done during token validation. -type ValidationErrorCode uint32 - -// Validation error constants. -const ( - ValidationErrorAuthorizationHeaderNotFound ValidationErrorCode = iota // Authorization header not found on request. - ValidationErrorAuthorizationHeaderWrongFormat // Authorization header unexpected format. - ValidationErrorAuthorizationHeaderWrongSchemeName // Authorization header unexpected scheme. - ValidationErrorJwtValidationFailure // Jwt token validation failed with a known error. - ValidationErrorJwtValidationUnknownFailure // Jwt token validation failed with an unknown error. - ValidationErrorInvalidAudienceType // Unexpected token audience type. - ValidationErrorInvalidAudience // Unexpected token audience content. - ValidationErrorAudienceNotFound // Unexpected token audience value. Audience not registered. - ValidationErrorInvalidIssuerType // Unexpected token issuer type. - ValidationErrorInvalidIssuer // Unexpected token issuer content. - ValidationErrorIssuerNotFound // Unexpected token value. Issuer not registered. - ValidationErrorGetOpenIdConfigurationFailure // Failure while retrieving the OIDC configuration. - ValidationErrorDecodeOpenIdConfigurationFailure // Failure while decoding the OIDC configuration. - ValidationErrorGetJwksFailure // Failure while retrieving jwk set. - ValidationErrorDecodeJwksFailure // Failure while decoding the jwk set. - ValidationErrorEmptyJwk // Empty jwk returned. - ValidationErrorEmptyJwkKey // Empty jwk key set returned. - ValidationErrorMarshallingKey // Error while marshalling the signing key. - ValidationErrorKidNotFound // Key identifier not found. - ValidationErrorInvalidSubjectType // Unexpected token subject type. - ValidationErrorInvalidSubject // Unexpected token subject content. - ValidationErrorSubjectNotFound // Token missing the 'sub' claim. - ValidationErrorIdTokenEmpty // Empty ID token. - ValidationErrorEmptyProviders // Empty collection of providers. -) - -const setupErrorMessagePrefix string = "Setup Error." -const validationErrorMessagePrefix string = "Validation Error." - -// SetupError represents the error returned by operations called during -// middleware setup. -type SetupError struct { - Err error - Code SetupErrorCode - Message string -} - -// Error returns a formatted string containing the error Message. -func (se SetupError) Error() string { - return fmt.Sprintf("Setup error. %v", se.Message) -} - -// ValidationError represents the error returned by operations called during -// token validation. -type ValidationError struct { - Err error - Code ValidationErrorCode - Message string - HTTPStatus int -} - -// The ErrorHandlerFunc represents the function used to handle errors during token -// validation. Applications can have their own implementation of this function and -// register it using the ErrorHandler option. Through this extension point applications -// can choose what to do upon different error types, for instance return an certain HTTP Status code -// and/or include some detailed message in the response. -// This function returns false if the next handler registered after the ID Token validation -// should be executed when an error is found or true if the execution should be stopped. -type ErrorHandlerFunc func(error, http.ResponseWriter, *http.Request) bool - -// Error returns a formatted string containing the error Message. -func (ve ValidationError) Error() string { - return fmt.Sprintf("Validation error. %v", ve.Message) -} - -// jwtErrorToOpenIdError converts errors of the type *jwt.ValidationError returned during token validation into errors of type *ValidationError -func jwtErrorToOpenIdError(e error) *ValidationError { - if jwtError, ok := e.(*jwt.ValidationError); ok { - if (jwtError.Errors & (jwt.ValidationErrorNotValidYet | jwt.ValidationErrorExpired | jwt.ValidationErrorSignatureInvalid)) != 0 { - return &ValidationError{Code: ValidationErrorJwtValidationFailure, Message: "Jwt token validation failed.", HTTPStatus: http.StatusUnauthorized} - } - - if (jwtError.Errors & jwt.ValidationErrorMalformed) != 0 { - return &ValidationError{Code: ValidationErrorJwtValidationFailure, Message: "Jwt token validation failed.", HTTPStatus: http.StatusBadRequest} - } - - if (jwtError.Errors & jwt.ValidationErrorUnverifiable) != 0 { - // TODO: improve this once https://github.com/dgrijalva/jwt-go/issues/108 is resolved. - // Currently jwt.Parse does not surface errors returned by the KeyFunc. - return &ValidationError{Code: ValidationErrorJwtValidationFailure, Message: jwtError.Error(), HTTPStatus: http.StatusUnauthorized} - } - } - - return &ValidationError{Code: ValidationErrorJwtValidationUnknownFailure, Message: "Jwt token validation failed with unknown error.", HTTPStatus: http.StatusInternalServerError} -} - -func validationErrorToHTTPStatus(e error, rw http.ResponseWriter, req *http.Request) (halt bool) { - if verr, ok := e.(*ValidationError); ok { - http.Error(rw, verr.Message, verr.HTTPStatus) - } else { - rw.WriteHeader(http.StatusInternalServerError) - fmt.Fprintf(rw, e.Error()) - } - - return true -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/idtokenvalidator.go b/vendor/github.com/TykTechnologies/openid2go/openid/idtokenvalidator.go deleted file mode 100644 index 198bde696df..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/idtokenvalidator.go +++ /dev/null @@ -1,200 +0,0 @@ -package openid - -import ( - "fmt" - "net/http" - - "github.com/dgrijalva/jwt-go" -) - -const issuerClaimName = "iss" -const audiencesClaimName = "aud" -const subjectClaimName = "sub" -const keyIDJwtHeaderName = "kid" - -type JWTTokenValidator interface { - Validate(t string) (jt *jwt.Token, err error) -} - -type jwtParserFunc func(string, jwt.Keyfunc) (*jwt.Token, error) - -type idTokenValidator struct { - provGetter GetProvidersFunc - jwtParser jwtParserFunc - keyGetter signingKeyGetter -} - -func newIDTokenValidator(pg GetProvidersFunc, jp jwtParserFunc, kg signingKeyGetter) *idTokenValidator { - return &idTokenValidator{pg, jp, kg} -} - -func (tv *idTokenValidator) Validate(t string) (*jwt.Token, error) { - jt, err := tv.jwtParser(t, tv.getSigningKey) - if err != nil { - if verr, ok := err.(*jwt.ValidationError); ok { - // If the signing key did not match it may be because the in memory key is outdated. - // Renew the cached signing key. - if (verr.Errors & jwt.ValidationErrorSignatureInvalid) != 0 { - jt, err = tv.jwtParser(t, tv.renewAndGetSigningKey) - } - } - } - - if err != nil { - return nil, jwtErrorToOpenIdError(err) - } - - return jt, nil -} - -func (tv *idTokenValidator) renewAndGetSigningKey(jt *jwt.Token) (interface{}, error) { - // Issuer is already validated when 'getSigningKey was called. - iss := jt.Claims.(jwt.MapClaims)[issuerClaimName].(string) - - err := tv.keyGetter.flushCachedSigningKeys(iss) - - if err != nil { - return nil, err - } - - headerVal, ok := jt.Header[keyIDJwtHeaderName] - - if !ok { - return tv.keyGetter.getSigningKey(iss, "") - } - - switch headerVal.(type) { - case string: - return tv.keyGetter.getSigningKey(iss, headerVal.(string)) - default: - return tv.keyGetter.getSigningKey(iss, "") - } - -} - -func (tv *idTokenValidator) getSigningKey(jt *jwt.Token) (interface{}, error) { - provs, err := tv.provGetter() - if err != nil { - return nil, err - } - - if err := providers(provs).validate(); err != nil { - return nil, err - } - - p, err := validateIssuer(jt, provs) - if err != nil { - return nil, err - } - - _, err = validateAudiences(jt, p) - if err != nil { - return nil, err - } - _, err = validateSubject(jt) - if err != nil { - return nil, err - } - - var kid string = "" - - if jt.Header[keyIDJwtHeaderName] != nil { - kid = jt.Header[keyIDJwtHeaderName].(string) - } - - return tv.keyGetter.getSigningKey(p.Issuer, kid) -} - -func validateIssuer(jt *jwt.Token, ps []Provider) (*Provider, error) { - issuerClaim := getIssuer(jt) - var ti string - - if iss, ok := issuerClaim.(string); ok { - ti = iss - } else { - return nil, &ValidationError{Code: ValidationErrorInvalidIssuerType, Message: fmt.Sprintf("Invalid Issuer type: %T", issuerClaim), HTTPStatus: http.StatusUnauthorized} - } - - if ti == "" { - return nil, &ValidationError{Code: ValidationErrorInvalidIssuer, Message: "The token 'iss' claim was not found or was empty.", HTTPStatus: http.StatusUnauthorized} - } - - // Workaround for tokens issued by google - gi := ti - if gi == "accounts.google.com" { - gi = "https://" + gi - } - - for _, p := range ps { - if ti == p.Issuer || gi == p.Issuer { - return &p, nil - } - } - - return nil, &ValidationError{Code: ValidationErrorIssuerNotFound, Message: fmt.Sprintf("No provider was registered with issuer: %v", ti), HTTPStatus: http.StatusUnauthorized} -} - -func validateSubject(jt *jwt.Token) (string, error) { - subjectClaim := getSubject(jt) - - var ts string - if sub, ok := subjectClaim.(string); ok { - ts = sub - } else { - return ts, &ValidationError{Code: ValidationErrorInvalidSubjectType, Message: fmt.Sprintf("Invalid subject type: %T", subjectClaim), HTTPStatus: http.StatusUnauthorized} - } - - if ts == "" { - return ts, &ValidationError{Code: ValidationErrorInvalidSubject, Message: "The token 'sub' claim was not found or was empty.", HTTPStatus: http.StatusUnauthorized} - } - - return ts, nil -} - -func validateAudiences(jt *jwt.Token, p *Provider) (string, error) { - audiencesClaim, err := getAudiences(jt) - - if err != nil { - return "", err - } - - for _, aud := range p.ClientIDs { - for _, audienceClaim := range audiencesClaim { - ta, ok := audienceClaim.(string) - if !ok { - fmt.Printf("aud type %T \n", audienceClaim) - return "", &ValidationError{Code: ValidationErrorInvalidAudienceType, Message: fmt.Sprintf("Invalid Audiences type: %T", audiencesClaim), HTTPStatus: http.StatusUnauthorized} - } - - if ta == "" { - return "", &ValidationError{Code: ValidationErrorInvalidAudience, Message: "The token 'aud' claim was not found or was empty.", HTTPStatus: http.StatusUnauthorized} - } - - if ta == aud { - return ta, nil - } - } - } - - return "", &ValidationError{Code: ValidationErrorAudienceNotFound, Message: fmt.Sprintf("The provider %v does not have a client id matching any of the token audiences %+v", p.Issuer, audiencesClaim), HTTPStatus: http.StatusUnauthorized} -} - -func getAudiences(t *jwt.Token) ([]interface{}, error) { - audiencesClaim := t.Claims.(jwt.MapClaims)[audiencesClaimName] - if aud, ok := audiencesClaim.(string); ok { - return []interface{}{aud}, nil - } else if _, ok := audiencesClaim.([]interface{}); ok { - return audiencesClaim.([]interface{}), nil - } - - return nil, &ValidationError{Code: ValidationErrorInvalidAudienceType, Message: fmt.Sprintf("Invalid Audiences type: %T", audiencesClaim), HTTPStatus: http.StatusUnauthorized} - -} - -func getIssuer(t *jwt.Token) interface{} { - return t.Claims.(jwt.MapClaims)[issuerClaimName] -} - -func getSubject(t *jwt.Token) interface{} { - return t.Claims.(jwt.MapClaims)[subjectClaimName] -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/jwksprovider.go b/vendor/github.com/TykTechnologies/openid2go/openid/jwksprovider.go deleted file mode 100644 index 3042e102dd0..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/jwksprovider.go +++ /dev/null @@ -1,39 +0,0 @@ -package openid - -import ( - "fmt" - "net/http" - - "github.com/square/go-jose" -) - -type jwksGetter interface { - getJwkSet(string) (jose.JSONWebKeySet, error) -} - -type httpJwksProvider struct { - getJwks httpGetFunc - decodeJwks decodeResponseFunc -} - -func newHTTPJwksProvider(gf httpGetFunc, df decodeResponseFunc) *httpJwksProvider { - return &httpJwksProvider{gf, df} -} - -func (httpProv *httpJwksProvider) getJwkSet(url string) (jose.JSONWebKeySet, error) { - - var jwks jose.JSONWebKeySet - resp, err := httpProv.getJwks(url) - - if err != nil { - return jwks, &ValidationError{Code: ValidationErrorGetJwksFailure, Message: fmt.Sprintf("Failure while contacting the jwk endpoint %v: %v", url, err), Err: err, HTTPStatus: http.StatusUnauthorized} - } - - defer resp.Body.Close() - - if err := httpProv.decodeJwks(resp.Body, &jwks); err != nil { - return jwks, &ValidationError{Code: ValidationErrorDecodeJwksFailure, Message: fmt.Sprintf("Failure while decoding the jwk retrieved from the endpoint %v: %v", url, err), Err: err, HTTPStatus: http.StatusUnauthorized} - } - - return jwks, nil -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/middleware.go b/vendor/github.com/TykTechnologies/openid2go/openid/middleware.go deleted file mode 100644 index e57adce2dd5..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/middleware.go +++ /dev/null @@ -1,177 +0,0 @@ -package openid - -import ( - "net/http" - - "github.com/dgrijalva/jwt-go" -) - -// The Configuration contains the entities needed to perform ID token validation. -// This type should be instantiated at the application startup time. -type Configuration struct { - tokenValidator JWTTokenValidator - IDTokenGetter GetIDTokenFunc - errorHandler ErrorHandlerFunc -} - -type option func(*Configuration) error - -// The NewConfiguration creates a new instance of Configuration and returns a pointer to it. -// This function receives a collection of the function type option. Each of those functions are -// responsible for setting some part of the returned *Configuration. If any if the option functions -// returns an error then NewConfiguration will return a nil configuration and that error. -func NewConfiguration(options ...option) (*Configuration, error) { - m := new(Configuration) - cp := newHTTPConfigurationProvider(http.Get, jsonDecodeResponse) - jp := newHTTPJwksProvider(http.Get, jsonDecodeResponse) - ksp := newSigningKeySetProvider(cp, jp, pemEncodePublicKey) - kp := newSigningKeyProvider(ksp) - m.tokenValidator = newIDTokenValidator(nil, jwt.Parse, kp) - - for _, option := range options { - err := option(m) - - if err != nil { - return nil, err - } - } - - return m, nil -} - -// The ProvidersGetter option registers the function responsible for returning the -// providers containing the valid issuer and client IDs used to validate the ID Token. -func ProvidersGetter(pg GetProvidersFunc) func(*Configuration) error { - return func(c *Configuration) error { - c.tokenValidator.(*idTokenValidator).provGetter = pg - return nil - } -} - -func TokenValidator(tv JWTTokenValidator) func(*Configuration) error { - return func(c *Configuration) error { - c.tokenValidator = tv - return nil - } -} - -// The ErrorHandler option registers the function responsible for handling -// the errors returned during token validation. When this option is not used then the -// middleware will use the default internal implementation validationErrorToHTTPStatus. -func ErrorHandler(eh ErrorHandlerFunc) func(*Configuration) error { - return func(c *Configuration) error { - c.errorHandler = eh - return nil - } -} - -// The Authenticate middleware performs the validation of the OIDC ID Token. -// If an error happens, i.e.: expired token, the next handler may or may not executed depending on the -// provided ErrorHandlerFunc option. The default behavior, determined by validationErrorToHTTPStatus, -// stops the execution and returns Unauthorized. -// If the validation is successful then the next handler(h) will be executed. -func Authenticate(conf *Configuration, h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if _, halt := authenticate(conf, w, r); !halt { - h.ServeHTTP(w, r) - } - }) -} - -// The AuthenticateUser middleware performs the validation of the OIDC ID Token and -// forwards the authenticated user's information to the next handler in the pipeline. -// If an error happens, i.e.: expired token, the next handler may or may not executed depending on the -// provided ErrorHandlerFunc option. The default behavior, determined by validationErrorToHTTPStatus, -// stops the execution and returns Unauthorized. -// If the validation is successful then the next handler(h) will be executed and will -// receive the authenticated user information. -func AuthenticateUser(conf *Configuration, h UserHandler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if u, halt := authenticateUser(conf, w, r); !halt { - h.ServeHTTPWithUser(u, w, r) - } - }) -} - -// Exported authenticate so we don't need to use the middleware -func AuthenticateOIDWithUser(c *Configuration, rw http.ResponseWriter, req *http.Request) (*User, *jwt.Token, bool) { - return authenticateUserWithToken(c, rw, req) -} - -func authenticate(c *Configuration, rw http.ResponseWriter, req *http.Request) (t *jwt.Token, halt bool) { - var tg GetIDTokenFunc - if c.IDTokenGetter == nil { - tg = getIDTokenAuthorizationHeader - } else { - tg = c.IDTokenGetter - } - - var eh ErrorHandlerFunc - if c.errorHandler == nil { - eh = validationErrorToHTTPStatus - } else { - eh = c.errorHandler - } - - ts, err := tg(req) - - if err != nil { - return nil, eh(err, rw, req) - } - - vt, err := c.tokenValidator.Validate(ts) - - if err != nil { - return nil, eh(err, rw, req) - } - - return vt, false -} - -func authenticateUser(c *Configuration, rw http.ResponseWriter, req *http.Request) (u *User, halt bool) { - var vt *jwt.Token - - var eh ErrorHandlerFunc - if c.errorHandler == nil { - eh = validationErrorToHTTPStatus - } else { - eh = c.errorHandler - } - - if t, h := authenticate(c, rw, req); h { - return nil, h - } else { - vt = t - } - - u, err := newUser(vt) - - if err != nil { - return nil, eh(err, rw, req) - } - - return u, false -} - -func authenticateUserWithToken(c *Configuration, rw http.ResponseWriter, req *http.Request) (u *User, vt *jwt.Token, halt bool) { - var eh ErrorHandlerFunc - if c.errorHandler == nil { - eh = validationErrorToHTTPStatus - } else { - eh = c.errorHandler - } - - if t, h := authenticate(c, rw, req); h { - return nil, nil, h - } else { - vt = t - } - - u, err := newUser(vt) - - if err != nil { - return nil, nil, eh(err, rw, req) - } - - return u, vt, false -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/provider.go b/vendor/github.com/TykTechnologies/openid2go/openid/provider.go deleted file mode 100644 index ed447339bd6..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/provider.go +++ /dev/null @@ -1,79 +0,0 @@ -package openid - -// Provider represents an OpenId Identity Provider (OP) and contains -// the information needed to perform validation of ID Token. -// See OpenId terminology http://openid.net/specs/openid-connect-core-1_0.html#Terminology. -// -// The Issuer uniquely identifies an OP. This field will be used -// to validate the 'iss' claim present in the ID Token. -// -// The CliendIDs contains the list of client IDs registered with the OP that are meant to be accepted by the service using this package. -// These values are used to validate the 'aud' clain present in the ID Token. -type Provider struct { - Issuer string - ClientIDs []string -} - -// providers represent a collection of OPs. -type providers []Provider - -// NewProvider returns a new instance of a Provider created with the given issuer and clientIDs. -func NewProvider(issuer string, clientIDs []string) (Provider, error) { - p := Provider{issuer, clientIDs} - - if err := p.validate(); err != nil { - return Provider{}, err - } - - return p, nil -} - -// The GetProvidersFunc defines the function type used to retrieve the collection of allowed OP(s) along with the -// respective client IDs registered with those providers that can access the backend service -// using this package. -// A function of this type must be provided to NewConfiguration through the option ProvidersGetter. -// The given function will then be invoked for every request intercepted by the Authenticate or AuthenticateUser middleware. -type GetProvidersFunc func() ([]Provider, error) - -func (ps providers) validate() error { - if len(ps) == 0 { - return &SetupError{Code: SetupErrorEmptyProviderCollection, Message: "The collection of providers must contain at least one element."} - } - - for _, p := range ps { - if err := p.validate(); err != nil { - return err - } - } - - return nil -} - -func (p Provider) validate() error { - if err := validateProviderIssuer(p.Issuer); err != nil { - return err - } - - if err := validateProviderClientIDs(p.ClientIDs); err != nil { - return err - } - - return nil -} - -func validateProviderIssuer(iss string) error { - if iss == "" { - return &SetupError{Code: SetupErrorInvalidIssuer, Message: "Empty string issuer not allowed."} - } - - // TODO: Validate that the issuer format complies with openid spec. - return nil -} - -func validateProviderClientIDs(cIDs []string) error { - if len(cIDs) == 0 { - return &SetupError{Code: SetupErrorInvalidClientIDs, Message: "At leat one client id must be provided."} - } - - return nil -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/readidtoken.go b/vendor/github.com/TykTechnologies/openid2go/openid/readidtoken.go deleted file mode 100644 index e90dc5da789..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/readidtoken.go +++ /dev/null @@ -1,39 +0,0 @@ -package openid - -import ( - "net/http" - "strings" -) - -// GetIdTokenFunc represents the function used to provide the OIDC idToken. -// It uses the provided request(r) to return the id token string(token). -// If the token was not found or had a bad format this function will return an error. -type GetIDTokenFunc func(r *http.Request) (token string, err error) - -// GetIdTokenAuthorizationHeader is the default implementation of the GetIdTokenFunc -// used by this package.I looks for the idToken in the http Authorization header with -// the format 'Bearer TokenString'. If found it will return 'TokenString' if not found -// or the format does not match it will return an error. -func getIDTokenAuthorizationHeader(r *http.Request) (t string, err error) { - h := r.Header.Get("Authorization") - - return CheckAndSplitHeader(h) -} - -func CheckAndSplitHeader(h string) (t string, err error) { - if h == "" { - return h, &ValidationError{Code: ValidationErrorAuthorizationHeaderNotFound, Message: "The 'Authorization' header was not found or was empty.", HTTPStatus: http.StatusBadRequest} - } - - p := strings.Split(h, " ") - - if len(p) != 2 { - return h, &ValidationError{Code: ValidationErrorAuthorizationHeaderWrongFormat, Message: "The 'Authorization' header did not have the correct format.", HTTPStatus: http.StatusBadRequest} - } - - if p[0] != "Bearer" { - return h, &ValidationError{Code: ValidationErrorAuthorizationHeaderWrongSchemeName, Message: "The 'Authorization' header scheme name was not 'Bearer'", HTTPStatus: http.StatusBadRequest} - } - - return p[1], nil -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/signingkeyencoder.go b/vendor/github.com/TykTechnologies/openid2go/openid/signingkeyencoder.go deleted file mode 100644 index 5b3cfc1e988..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/signingkeyencoder.go +++ /dev/null @@ -1,23 +0,0 @@ -package openid - -import ( - "crypto/x509" - "encoding/pem" - "fmt" - "net/http" -) - -type pemEncodeFunc func(key interface{}) ([]byte, error) - -func pemEncodePublicKey(key interface{}) ([]byte, error) { - mk, err := x509.MarshalPKIXPublicKey(key) - if err != nil { - return nil, &ValidationError{Code: ValidationErrorMarshallingKey, Message: fmt.Sprint("The jwk key could not be marshalled."), HTTPStatus: http.StatusInternalServerError, Err: err} - } - - ed := pem.EncodeToMemory(&pem.Block{ - Bytes: mk, - }) - - return ed, nil -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/signingkeyprovider.go b/vendor/github.com/TykTechnologies/openid2go/openid/signingkeyprovider.go deleted file mode 100644 index b31f60eeb9a..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/signingkeyprovider.go +++ /dev/null @@ -1,118 +0,0 @@ -package openid - -import ( - "crypto/x509" - "encoding/pem" - "fmt" - "net/http" - "sync" -) - -var lock = sync.RWMutex{} - -type signingKeyGetter interface { - flushCachedSigningKeys(issuer string) error - getSigningKey(issuer string, kid string) (interface{}, error) -} - -type signingKeyProvider struct { - keySetGetter signingKeySetGetter - jwksMap map[string][]signingKey -} - -func newSigningKeyProvider(kg signingKeySetGetter) *signingKeyProvider { - keyMap := make(map[string][]signingKey) - return &signingKeyProvider{kg, keyMap} -} - -func (s *signingKeyProvider) flushCachedSigningKeys(issuer string) error { - lock.Lock() - defer lock.Unlock() - delete(s.jwksMap, issuer) - return nil -} - -func (s *signingKeyProvider) refreshSigningKeys(issuer string) error { - skeys, err := s.keySetGetter.getSigningKeySet(issuer) - - if err != nil { - return err - } - - lock.Lock() - s.jwksMap[issuer] = skeys - lock.Unlock() - return nil -} - -func parsePublicKey(data []byte) (interface{}, error) { - input := data - block, _ := pem.Decode(data) - if block != nil { - input = block.Bytes - } - var pub interface{} - var err error - pub, err = x509.ParsePKIXPublicKey(input) - if err != nil { - cert, err0 := x509.ParseCertificate(input) - if err0 != nil { - return nil, err0 - } - pub = cert.PublicKey - err = nil - } - return pub, err -} - -func (s *signingKeyProvider) getSigningKey(issuer string, kid string) (interface{}, error) { - lock.RLock() - sk := findKey(s.jwksMap, issuer, kid) - lock.RUnlock() - - if sk != nil { - parsed, pErr := parsePublicKey(sk) - if pErr != nil { - return sk, nil - } - return parsed, nil - } - - err := s.refreshSigningKeys(issuer) - - if err != nil { - return nil, err - } - - lock.RLock() - sk = findKey(s.jwksMap, issuer, kid) - lock.RUnlock() - - if sk == nil { - return nil, &ValidationError{Code: ValidationErrorKidNotFound, Message: fmt.Sprintf("The jwk set retrieved for the issuer %v does not contain a key identifier %v.", issuer, kid), HTTPStatus: http.StatusUnauthorized} - } - - parsed, pErr := parsePublicKey(sk) - if pErr != nil { - return sk, nil - } - - return parsed, nil -} - -func findKey(km map[string][]signingKey, issuer string, kid string) []byte { - - if skSet, ok := km[issuer]; ok { - if kid == "" { - return skSet[0].key - } else { - for _, sk := range skSet { - if sk.keyID == kid { - return sk.key - } - } - } - } - - return nil -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/signingkeysetprovider.go b/vendor/github.com/TykTechnologies/openid2go/openid/signingkeysetprovider.go deleted file mode 100644 index 3a40abcde94..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/signingkeysetprovider.go +++ /dev/null @@ -1,56 +0,0 @@ -package openid - -import ( - "fmt" - "net/http" -) - -type signingKeySetGetter interface { - getSigningKeySet(issuer string) ([]signingKey, error) -} - -type signingKeySetProvider struct { - configGetter configurationGetter - jwksGetter jwksGetter - keyEncoder pemEncodeFunc -} - -type signingKey struct { - keyID string - key []byte -} - -func newSigningKeySetProvider(cg configurationGetter, jg jwksGetter, ke pemEncodeFunc) *signingKeySetProvider { - return &signingKeySetProvider{cg, jg, ke} -} - -func (signProv *signingKeySetProvider) getSigningKeySet(iss string) ([]signingKey, error) { - conf, err := signProv.configGetter.getConfiguration(iss) - - if err != nil { - return nil, err - } - - jwks, err := signProv.jwksGetter.getJwkSet(conf.JwksUri) - - if err != nil { - return nil, err - } - - if len(jwks.Keys) == 0 { - return nil, &ValidationError{Code: ValidationErrorEmptyJwk, Message: fmt.Sprintf("The jwk set retrieved for the issuer %v does not contain any key.", iss), HTTPStatus: http.StatusUnauthorized} - } - - sk := make([]signingKey, len(jwks.Keys)) - - for i, k := range jwks.Keys { - ek, err := signProv.keyEncoder(k.Key) - if err != nil { - return nil, err - } - - sk[i] = signingKey{k.KeyID, ek} - } - - return sk, nil -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/user.go b/vendor/github.com/TykTechnologies/openid2go/openid/user.go deleted file mode 100644 index 20ebd17f932..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/user.go +++ /dev/null @@ -1,45 +0,0 @@ -package openid - -import ( - "net/http" - - "github.com/dgrijalva/jwt-go" -) - -// User represents the authenticated user encapsulating information obtained from the validated ID token. -// -// The Issuer contains the value from the 'iss' claim found in the ID Token. -// -// The ID contains the value of the 'sub' claim found in the ID Token. -// -// The Claims contains all the claims present found in the ID Token -type User struct { - Issuer string - ID string - Claims map[string]interface{} -} - -func newUser(t *jwt.Token) (*User, error) { - if t == nil { - return nil, &ValidationError{Code: ValidationErrorIdTokenEmpty, Message: "The token provided to created a user was nil.", HTTPStatus: http.StatusUnauthorized} - } - - iss := getIssuer(t).(string) - - if iss == "" { - return nil, &ValidationError{Code: ValidationErrorInvalidIssuer, Message: "The token provided to created a user did not contain a valid 'iss' claim", HTTPStatus: http.StatusInternalServerError} - } - - sub := getSubject(t).(string) - - if sub == "" { - return nil, &ValidationError{Code: ValidationErrorInvalidSubject, Message: "The token provided to created a user did not contain a valid 'sub' claim.", HTTPStatus: http.StatusInternalServerError} - - } - - u := new(User) - u.Issuer = iss - u.ID = sub - u.Claims = t.Claims.(jwt.MapClaims) - return u, nil -} diff --git a/vendor/github.com/TykTechnologies/openid2go/openid/userhandler.go b/vendor/github.com/TykTechnologies/openid2go/openid/userhandler.go deleted file mode 100644 index 9e45c7129bf..00000000000 --- a/vendor/github.com/TykTechnologies/openid2go/openid/userhandler.go +++ /dev/null @@ -1,22 +0,0 @@ -package openid - -import "net/http" - -// The UserHandler represents a handler to be registered by the middleware AuthenticateUser. -// This handler allows the AuthenticateUser middleware to forward information about the the authenticated user to -// the rest of the application service. -// -// ServeHTTPWithUser is similar to the http.ServeHTTP function. It contains an additional paramater *User, -// which is used by the AuthenticateUser middleware to pass information about the authenticated user. -type UserHandler interface { - ServeHTTPWithUser(*User, http.ResponseWriter, *http.Request) -} - -// The UserHandlerFunc is an adapter to allow the use of functions as UserHandler. -// This is similar to using http.HandlerFunc as http.Handler -type UserHandlerFunc func(*User, http.ResponseWriter, *http.Request) - -// ServeHttpWithUser calls f(u, w, r) -func (f UserHandlerFunc) ServeHTTPWithUser(u *User, w http.ResponseWriter, r *http.Request) { - f(u, w, r) -} diff --git a/vendor/github.com/alecthomas/template/LICENSE b/vendor/github.com/alecthomas/template/LICENSE deleted file mode 100644 index 74487567632..00000000000 --- a/vendor/github.com/alecthomas/template/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/alecthomas/template/README.md b/vendor/github.com/alecthomas/template/README.md deleted file mode 100644 index ef6a8ee303e..00000000000 --- a/vendor/github.com/alecthomas/template/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Go's `text/template` package with newline elision - -This is a fork of Go 1.4's [text/template](http://golang.org/pkg/text/template/) package with one addition: a backslash immediately after a closing delimiter will delete all subsequent newlines until a non-newline. - -eg. - -``` -{{if true}}\ -hello -{{end}}\ -``` - -Will result in: - -``` -hello\n -``` - -Rather than: - -``` -\n -hello\n -\n -``` diff --git a/vendor/github.com/alecthomas/template/doc.go b/vendor/github.com/alecthomas/template/doc.go deleted file mode 100644 index 223c595c25d..00000000000 --- a/vendor/github.com/alecthomas/template/doc.go +++ /dev/null @@ -1,406 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package template implements data-driven templates for generating textual output. - -To generate HTML output, see package html/template, which has the same interface -as this package but automatically secures HTML output against certain attacks. - -Templates are executed by applying them to a data structure. Annotations in the -template refer to elements of the data structure (typically a field of a struct -or a key in a map) to control execution and derive values to be displayed. -Execution of the template walks the structure and sets the cursor, represented -by a period '.' and called "dot", to the value at the current location in the -structure as execution proceeds. - -The input text for a template is UTF-8-encoded text in any format. -"Actions"--data evaluations or control structures--are delimited by -"{{" and "}}"; all text outside actions is copied to the output unchanged. -Actions may not span newlines, although comments can. - -Once parsed, a template may be executed safely in parallel. - -Here is a trivial example that prints "17 items are made of wool". - - type Inventory struct { - Material string - Count uint - } - sweaters := Inventory{"wool", 17} - tmpl, err := template.New("test").Parse("{{.Count}} items are made of {{.Material}}") - if err != nil { panic(err) } - err = tmpl.Execute(os.Stdout, sweaters) - if err != nil { panic(err) } - -More intricate examples appear below. - -Actions - -Here is the list of actions. "Arguments" and "pipelines" are evaluations of -data, defined in detail below. - -*/ -// {{/* a comment */}} -// A comment; discarded. May contain newlines. -// Comments do not nest and must start and end at the -// delimiters, as shown here. -/* - - {{pipeline}} - The default textual representation of the value of the pipeline - is copied to the output. - - {{if pipeline}} T1 {{end}} - If the value of the pipeline is empty, no output is generated; - otherwise, T1 is executed. The empty values are false, 0, any - nil pointer or interface value, and any array, slice, map, or - string of length zero. - Dot is unaffected. - - {{if pipeline}} T1 {{else}} T0 {{end}} - If the value of the pipeline is empty, T0 is executed; - otherwise, T1 is executed. Dot is unaffected. - - {{if pipeline}} T1 {{else if pipeline}} T0 {{end}} - To simplify the appearance of if-else chains, the else action - of an if may include another if directly; the effect is exactly - the same as writing - {{if pipeline}} T1 {{else}}{{if pipeline}} T0 {{end}}{{end}} - - {{range pipeline}} T1 {{end}} - The value of the pipeline must be an array, slice, map, or channel. - If the value of the pipeline has length zero, nothing is output; - otherwise, dot is set to the successive elements of the array, - slice, or map and T1 is executed. If the value is a map and the - keys are of basic type with a defined order ("comparable"), the - elements will be visited in sorted key order. - - {{range pipeline}} T1 {{else}} T0 {{end}} - The value of the pipeline must be an array, slice, map, or channel. - If the value of the pipeline has length zero, dot is unaffected and - T0 is executed; otherwise, dot is set to the successive elements - of the array, slice, or map and T1 is executed. - - {{template "name"}} - The template with the specified name is executed with nil data. - - {{template "name" pipeline}} - The template with the specified name is executed with dot set - to the value of the pipeline. - - {{with pipeline}} T1 {{end}} - If the value of the pipeline is empty, no output is generated; - otherwise, dot is set to the value of the pipeline and T1 is - executed. - - {{with pipeline}} T1 {{else}} T0 {{end}} - If the value of the pipeline is empty, dot is unaffected and T0 - is executed; otherwise, dot is set to the value of the pipeline - and T1 is executed. - -Arguments - -An argument is a simple value, denoted by one of the following. - - - A boolean, string, character, integer, floating-point, imaginary - or complex constant in Go syntax. These behave like Go's untyped - constants, although raw strings may not span newlines. - - The keyword nil, representing an untyped Go nil. - - The character '.' (period): - . - The result is the value of dot. - - A variable name, which is a (possibly empty) alphanumeric string - preceded by a dollar sign, such as - $piOver2 - or - $ - The result is the value of the variable. - Variables are described below. - - The name of a field of the data, which must be a struct, preceded - by a period, such as - .Field - The result is the value of the field. Field invocations may be - chained: - .Field1.Field2 - Fields can also be evaluated on variables, including chaining: - $x.Field1.Field2 - - The name of a key of the data, which must be a map, preceded - by a period, such as - .Key - The result is the map element value indexed by the key. - Key invocations may be chained and combined with fields to any - depth: - .Field1.Key1.Field2.Key2 - Although the key must be an alphanumeric identifier, unlike with - field names they do not need to start with an upper case letter. - Keys can also be evaluated on variables, including chaining: - $x.key1.key2 - - The name of a niladic method of the data, preceded by a period, - such as - .Method - The result is the value of invoking the method with dot as the - receiver, dot.Method(). Such a method must have one return value (of - any type) or two return values, the second of which is an error. - If it has two and the returned error is non-nil, execution terminates - and an error is returned to the caller as the value of Execute. - Method invocations may be chained and combined with fields and keys - to any depth: - .Field1.Key1.Method1.Field2.Key2.Method2 - Methods can also be evaluated on variables, including chaining: - $x.Method1.Field - - The name of a niladic function, such as - fun - The result is the value of invoking the function, fun(). The return - types and values behave as in methods. Functions and function - names are described below. - - A parenthesized instance of one the above, for grouping. The result - may be accessed by a field or map key invocation. - print (.F1 arg1) (.F2 arg2) - (.StructValuedMethod "arg").Field - -Arguments may evaluate to any type; if they are pointers the implementation -automatically indirects to the base type when required. -If an evaluation yields a function value, such as a function-valued -field of a struct, the function is not invoked automatically, but it -can be used as a truth value for an if action and the like. To invoke -it, use the call function, defined below. - -A pipeline is a possibly chained sequence of "commands". A command is a simple -value (argument) or a function or method call, possibly with multiple arguments: - - Argument - The result is the value of evaluating the argument. - .Method [Argument...] - The method can be alone or the last element of a chain but, - unlike methods in the middle of a chain, it can take arguments. - The result is the value of calling the method with the - arguments: - dot.Method(Argument1, etc.) - functionName [Argument...] - The result is the value of calling the function associated - with the name: - function(Argument1, etc.) - Functions and function names are described below. - -Pipelines - -A pipeline may be "chained" by separating a sequence of commands with pipeline -characters '|'. In a chained pipeline, the result of the each command is -passed as the last argument of the following command. The output of the final -command in the pipeline is the value of the pipeline. - -The output of a command will be either one value or two values, the second of -which has type error. If that second value is present and evaluates to -non-nil, execution terminates and the error is returned to the caller of -Execute. - -Variables - -A pipeline inside an action may initialize a variable to capture the result. -The initialization has syntax - - $variable := pipeline - -where $variable is the name of the variable. An action that declares a -variable produces no output. - -If a "range" action initializes a variable, the variable is set to the -successive elements of the iteration. Also, a "range" may declare two -variables, separated by a comma: - - range $index, $element := pipeline - -in which case $index and $element are set to the successive values of the -array/slice index or map key and element, respectively. Note that if there is -only one variable, it is assigned the element; this is opposite to the -convention in Go range clauses. - -A variable's scope extends to the "end" action of the control structure ("if", -"with", or "range") in which it is declared, or to the end of the template if -there is no such control structure. A template invocation does not inherit -variables from the point of its invocation. - -When execution begins, $ is set to the data argument passed to Execute, that is, -to the starting value of dot. - -Examples - -Here are some example one-line templates demonstrating pipelines and variables. -All produce the quoted word "output": - - {{"\"output\""}} - A string constant. - {{`"output"`}} - A raw string constant. - {{printf "%q" "output"}} - A function call. - {{"output" | printf "%q"}} - A function call whose final argument comes from the previous - command. - {{printf "%q" (print "out" "put")}} - A parenthesized argument. - {{"put" | printf "%s%s" "out" | printf "%q"}} - A more elaborate call. - {{"output" | printf "%s" | printf "%q"}} - A longer chain. - {{with "output"}}{{printf "%q" .}}{{end}} - A with action using dot. - {{with $x := "output" | printf "%q"}}{{$x}}{{end}} - A with action that creates and uses a variable. - {{with $x := "output"}}{{printf "%q" $x}}{{end}} - A with action that uses the variable in another action. - {{with $x := "output"}}{{$x | printf "%q"}}{{end}} - The same, but pipelined. - -Functions - -During execution functions are found in two function maps: first in the -template, then in the global function map. By default, no functions are defined -in the template but the Funcs method can be used to add them. - -Predefined global functions are named as follows. - - and - Returns the boolean AND of its arguments by returning the - first empty argument or the last argument, that is, - "and x y" behaves as "if x then y else x". All the - arguments are evaluated. - call - Returns the result of calling the first argument, which - must be a function, with the remaining arguments as parameters. - Thus "call .X.Y 1 2" is, in Go notation, dot.X.Y(1, 2) where - Y is a func-valued field, map entry, or the like. - The first argument must be the result of an evaluation - that yields a value of function type (as distinct from - a predefined function such as print). The function must - return either one or two result values, the second of which - is of type error. If the arguments don't match the function - or the returned error value is non-nil, execution stops. - html - Returns the escaped HTML equivalent of the textual - representation of its arguments. - index - Returns the result of indexing its first argument by the - following arguments. Thus "index x 1 2 3" is, in Go syntax, - x[1][2][3]. Each indexed item must be a map, slice, or array. - js - Returns the escaped JavaScript equivalent of the textual - representation of its arguments. - len - Returns the integer length of its argument. - not - Returns the boolean negation of its single argument. - or - Returns the boolean OR of its arguments by returning the - first non-empty argument or the last argument, that is, - "or x y" behaves as "if x then x else y". All the - arguments are evaluated. - print - An alias for fmt.Sprint - printf - An alias for fmt.Sprintf - println - An alias for fmt.Sprintln - urlquery - Returns the escaped value of the textual representation of - its arguments in a form suitable for embedding in a URL query. - -The boolean functions take any zero value to be false and a non-zero -value to be true. - -There is also a set of binary comparison operators defined as -functions: - - eq - Returns the boolean truth of arg1 == arg2 - ne - Returns the boolean truth of arg1 != arg2 - lt - Returns the boolean truth of arg1 < arg2 - le - Returns the boolean truth of arg1 <= arg2 - gt - Returns the boolean truth of arg1 > arg2 - ge - Returns the boolean truth of arg1 >= arg2 - -For simpler multi-way equality tests, eq (only) accepts two or more -arguments and compares the second and subsequent to the first, -returning in effect - - arg1==arg2 || arg1==arg3 || arg1==arg4 ... - -(Unlike with || in Go, however, eq is a function call and all the -arguments will be evaluated.) - -The comparison functions work on basic types only (or named basic -types, such as "type Celsius float32"). They implement the Go rules -for comparison of values, except that size and exact type are -ignored, so any integer value, signed or unsigned, may be compared -with any other integer value. (The arithmetic value is compared, -not the bit pattern, so all negative integers are less than all -unsigned integers.) However, as usual, one may not compare an int -with a float32 and so on. - -Associated templates - -Each template is named by a string specified when it is created. Also, each -template is associated with zero or more other templates that it may invoke by -name; such associations are transitive and form a name space of templates. - -A template may use a template invocation to instantiate another associated -template; see the explanation of the "template" action above. The name must be -that of a template associated with the template that contains the invocation. - -Nested template definitions - -When parsing a template, another template may be defined and associated with the -template being parsed. Template definitions must appear at the top level of the -template, much like global variables in a Go program. - -The syntax of such definitions is to surround each template declaration with a -"define" and "end" action. - -The define action names the template being created by providing a string -constant. Here is a simple example: - - `{{define "T1"}}ONE{{end}} - {{define "T2"}}TWO{{end}} - {{define "T3"}}{{template "T1"}} {{template "T2"}}{{end}} - {{template "T3"}}` - -This defines two templates, T1 and T2, and a third T3 that invokes the other two -when it is executed. Finally it invokes T3. If executed this template will -produce the text - - ONE TWO - -By construction, a template may reside in only one association. If it's -necessary to have a template addressable from multiple associations, the -template definition must be parsed multiple times to create distinct *Template -values, or must be copied with the Clone or AddParseTree method. - -Parse may be called multiple times to assemble the various associated templates; -see the ParseFiles and ParseGlob functions and methods for simple ways to parse -related templates stored in files. - -A template may be executed directly or through ExecuteTemplate, which executes -an associated template identified by name. To invoke our example above, we -might write, - - err := tmpl.Execute(os.Stdout, "no data needed") - if err != nil { - log.Fatalf("execution failed: %s", err) - } - -or to invoke a particular template explicitly by name, - - err := tmpl.ExecuteTemplate(os.Stdout, "T2", "no data needed") - if err != nil { - log.Fatalf("execution failed: %s", err) - } - -*/ -package template diff --git a/vendor/github.com/alecthomas/template/exec.go b/vendor/github.com/alecthomas/template/exec.go deleted file mode 100644 index c3078e5d0c0..00000000000 --- a/vendor/github.com/alecthomas/template/exec.go +++ /dev/null @@ -1,845 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "bytes" - "fmt" - "io" - "reflect" - "runtime" - "sort" - "strings" - - "github.com/alecthomas/template/parse" -) - -// state represents the state of an execution. It's not part of the -// template so that multiple executions of the same template -// can execute in parallel. -type state struct { - tmpl *Template - wr io.Writer - node parse.Node // current node, for errors - vars []variable // push-down stack of variable values. -} - -// variable holds the dynamic value of a variable such as $, $x etc. -type variable struct { - name string - value reflect.Value -} - -// push pushes a new variable on the stack. -func (s *state) push(name string, value reflect.Value) { - s.vars = append(s.vars, variable{name, value}) -} - -// mark returns the length of the variable stack. -func (s *state) mark() int { - return len(s.vars) -} - -// pop pops the variable stack up to the mark. -func (s *state) pop(mark int) { - s.vars = s.vars[0:mark] -} - -// setVar overwrites the top-nth variable on the stack. Used by range iterations. -func (s *state) setVar(n int, value reflect.Value) { - s.vars[len(s.vars)-n].value = value -} - -// varValue returns the value of the named variable. -func (s *state) varValue(name string) reflect.Value { - for i := s.mark() - 1; i >= 0; i-- { - if s.vars[i].name == name { - return s.vars[i].value - } - } - s.errorf("undefined variable: %s", name) - return zero -} - -var zero reflect.Value - -// at marks the state to be on node n, for error reporting. -func (s *state) at(node parse.Node) { - s.node = node -} - -// doublePercent returns the string with %'s replaced by %%, if necessary, -// so it can be used safely inside a Printf format string. -func doublePercent(str string) string { - if strings.Contains(str, "%") { - str = strings.Replace(str, "%", "%%", -1) - } - return str -} - -// errorf formats the error and terminates processing. -func (s *state) errorf(format string, args ...interface{}) { - name := doublePercent(s.tmpl.Name()) - if s.node == nil { - format = fmt.Sprintf("template: %s: %s", name, format) - } else { - location, context := s.tmpl.ErrorContext(s.node) - format = fmt.Sprintf("template: %s: executing %q at <%s>: %s", location, name, doublePercent(context), format) - } - panic(fmt.Errorf(format, args...)) -} - -// errRecover is the handler that turns panics into returns from the top -// level of Parse. -func errRecover(errp *error) { - e := recover() - if e != nil { - switch err := e.(type) { - case runtime.Error: - panic(e) - case error: - *errp = err - default: - panic(e) - } - } -} - -// ExecuteTemplate applies the template associated with t that has the given name -// to the specified data object and writes the output to wr. -// If an error occurs executing the template or writing its output, -// execution stops, but partial results may already have been written to -// the output writer. -// A template may be executed safely in parallel. -func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error { - tmpl := t.tmpl[name] - if tmpl == nil { - return fmt.Errorf("template: no template %q associated with template %q", name, t.name) - } - return tmpl.Execute(wr, data) -} - -// Execute applies a parsed template to the specified data object, -// and writes the output to wr. -// If an error occurs executing the template or writing its output, -// execution stops, but partial results may already have been written to -// the output writer. -// A template may be executed safely in parallel. -func (t *Template) Execute(wr io.Writer, data interface{}) (err error) { - defer errRecover(&err) - value := reflect.ValueOf(data) - state := &state{ - tmpl: t, - wr: wr, - vars: []variable{{"$", value}}, - } - t.init() - if t.Tree == nil || t.Root == nil { - var b bytes.Buffer - for name, tmpl := range t.tmpl { - if tmpl.Tree == nil || tmpl.Root == nil { - continue - } - if b.Len() > 0 { - b.WriteString(", ") - } - fmt.Fprintf(&b, "%q", name) - } - var s string - if b.Len() > 0 { - s = "; defined templates are: " + b.String() - } - state.errorf("%q is an incomplete or empty template%s", t.Name(), s) - } - state.walk(value, t.Root) - return -} - -// Walk functions step through the major pieces of the template structure, -// generating output as they go. -func (s *state) walk(dot reflect.Value, node parse.Node) { - s.at(node) - switch node := node.(type) { - case *parse.ActionNode: - // Do not pop variables so they persist until next end. - // Also, if the action declares variables, don't print the result. - val := s.evalPipeline(dot, node.Pipe) - if len(node.Pipe.Decl) == 0 { - s.printValue(node, val) - } - case *parse.IfNode: - s.walkIfOrWith(parse.NodeIf, dot, node.Pipe, node.List, node.ElseList) - case *parse.ListNode: - for _, node := range node.Nodes { - s.walk(dot, node) - } - case *parse.RangeNode: - s.walkRange(dot, node) - case *parse.TemplateNode: - s.walkTemplate(dot, node) - case *parse.TextNode: - if _, err := s.wr.Write(node.Text); err != nil { - s.errorf("%s", err) - } - case *parse.WithNode: - s.walkIfOrWith(parse.NodeWith, dot, node.Pipe, node.List, node.ElseList) - default: - s.errorf("unknown node: %s", node) - } -} - -// walkIfOrWith walks an 'if' or 'with' node. The two control structures -// are identical in behavior except that 'with' sets dot. -func (s *state) walkIfOrWith(typ parse.NodeType, dot reflect.Value, pipe *parse.PipeNode, list, elseList *parse.ListNode) { - defer s.pop(s.mark()) - val := s.evalPipeline(dot, pipe) - truth, ok := isTrue(val) - if !ok { - s.errorf("if/with can't use %v", val) - } - if truth { - if typ == parse.NodeWith { - s.walk(val, list) - } else { - s.walk(dot, list) - } - } else if elseList != nil { - s.walk(dot, elseList) - } -} - -// isTrue reports whether the value is 'true', in the sense of not the zero of its type, -// and whether the value has a meaningful truth value. -func isTrue(val reflect.Value) (truth, ok bool) { - if !val.IsValid() { - // Something like var x interface{}, never set. It's a form of nil. - return false, true - } - switch val.Kind() { - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - truth = val.Len() > 0 - case reflect.Bool: - truth = val.Bool() - case reflect.Complex64, reflect.Complex128: - truth = val.Complex() != 0 - case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface: - truth = !val.IsNil() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - truth = val.Int() != 0 - case reflect.Float32, reflect.Float64: - truth = val.Float() != 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - truth = val.Uint() != 0 - case reflect.Struct: - truth = true // Struct values are always true. - default: - return - } - return truth, true -} - -func (s *state) walkRange(dot reflect.Value, r *parse.RangeNode) { - s.at(r) - defer s.pop(s.mark()) - val, _ := indirect(s.evalPipeline(dot, r.Pipe)) - // mark top of stack before any variables in the body are pushed. - mark := s.mark() - oneIteration := func(index, elem reflect.Value) { - // Set top var (lexically the second if there are two) to the element. - if len(r.Pipe.Decl) > 0 { - s.setVar(1, elem) - } - // Set next var (lexically the first if there are two) to the index. - if len(r.Pipe.Decl) > 1 { - s.setVar(2, index) - } - s.walk(elem, r.List) - s.pop(mark) - } - switch val.Kind() { - case reflect.Array, reflect.Slice: - if val.Len() == 0 { - break - } - for i := 0; i < val.Len(); i++ { - oneIteration(reflect.ValueOf(i), val.Index(i)) - } - return - case reflect.Map: - if val.Len() == 0 { - break - } - for _, key := range sortKeys(val.MapKeys()) { - oneIteration(key, val.MapIndex(key)) - } - return - case reflect.Chan: - if val.IsNil() { - break - } - i := 0 - for ; ; i++ { - elem, ok := val.Recv() - if !ok { - break - } - oneIteration(reflect.ValueOf(i), elem) - } - if i == 0 { - break - } - return - case reflect.Invalid: - break // An invalid value is likely a nil map, etc. and acts like an empty map. - default: - s.errorf("range can't iterate over %v", val) - } - if r.ElseList != nil { - s.walk(dot, r.ElseList) - } -} - -func (s *state) walkTemplate(dot reflect.Value, t *parse.TemplateNode) { - s.at(t) - tmpl := s.tmpl.tmpl[t.Name] - if tmpl == nil { - s.errorf("template %q not defined", t.Name) - } - // Variables declared by the pipeline persist. - dot = s.evalPipeline(dot, t.Pipe) - newState := *s - newState.tmpl = tmpl - // No dynamic scoping: template invocations inherit no variables. - newState.vars = []variable{{"$", dot}} - newState.walk(dot, tmpl.Root) -} - -// Eval functions evaluate pipelines, commands, and their elements and extract -// values from the data structure by examining fields, calling methods, and so on. -// The printing of those values happens only through walk functions. - -// evalPipeline returns the value acquired by evaluating a pipeline. If the -// pipeline has a variable declaration, the variable will be pushed on the -// stack. Callers should therefore pop the stack after they are finished -// executing commands depending on the pipeline value. -func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) { - if pipe == nil { - return - } - s.at(pipe) - for _, cmd := range pipe.Cmds { - value = s.evalCommand(dot, cmd, value) // previous value is this one's final arg. - // If the object has type interface{}, dig down one level to the thing inside. - if value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 { - value = reflect.ValueOf(value.Interface()) // lovely! - } - } - for _, variable := range pipe.Decl { - s.push(variable.Ident[0], value) - } - return value -} - -func (s *state) notAFunction(args []parse.Node, final reflect.Value) { - if len(args) > 1 || final.IsValid() { - s.errorf("can't give argument to non-function %s", args[0]) - } -} - -func (s *state) evalCommand(dot reflect.Value, cmd *parse.CommandNode, final reflect.Value) reflect.Value { - firstWord := cmd.Args[0] - switch n := firstWord.(type) { - case *parse.FieldNode: - return s.evalFieldNode(dot, n, cmd.Args, final) - case *parse.ChainNode: - return s.evalChainNode(dot, n, cmd.Args, final) - case *parse.IdentifierNode: - // Must be a function. - return s.evalFunction(dot, n, cmd, cmd.Args, final) - case *parse.PipeNode: - // Parenthesized pipeline. The arguments are all inside the pipeline; final is ignored. - return s.evalPipeline(dot, n) - case *parse.VariableNode: - return s.evalVariableNode(dot, n, cmd.Args, final) - } - s.at(firstWord) - s.notAFunction(cmd.Args, final) - switch word := firstWord.(type) { - case *parse.BoolNode: - return reflect.ValueOf(word.True) - case *parse.DotNode: - return dot - case *parse.NilNode: - s.errorf("nil is not a command") - case *parse.NumberNode: - return s.idealConstant(word) - case *parse.StringNode: - return reflect.ValueOf(word.Text) - } - s.errorf("can't evaluate command %q", firstWord) - panic("not reached") -} - -// idealConstant is called to return the value of a number in a context where -// we don't know the type. In that case, the syntax of the number tells us -// its type, and we use Go rules to resolve. Note there is no such thing as -// a uint ideal constant in this situation - the value must be of int type. -func (s *state) idealConstant(constant *parse.NumberNode) reflect.Value { - // These are ideal constants but we don't know the type - // and we have no context. (If it was a method argument, - // we'd know what we need.) The syntax guides us to some extent. - s.at(constant) - switch { - case constant.IsComplex: - return reflect.ValueOf(constant.Complex128) // incontrovertible. - case constant.IsFloat && !isHexConstant(constant.Text) && strings.IndexAny(constant.Text, ".eE") >= 0: - return reflect.ValueOf(constant.Float64) - case constant.IsInt: - n := int(constant.Int64) - if int64(n) != constant.Int64 { - s.errorf("%s overflows int", constant.Text) - } - return reflect.ValueOf(n) - case constant.IsUint: - s.errorf("%s overflows int", constant.Text) - } - return zero -} - -func isHexConstant(s string) bool { - return len(s) > 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X') -} - -func (s *state) evalFieldNode(dot reflect.Value, field *parse.FieldNode, args []parse.Node, final reflect.Value) reflect.Value { - s.at(field) - return s.evalFieldChain(dot, dot, field, field.Ident, args, final) -} - -func (s *state) evalChainNode(dot reflect.Value, chain *parse.ChainNode, args []parse.Node, final reflect.Value) reflect.Value { - s.at(chain) - // (pipe).Field1.Field2 has pipe as .Node, fields as .Field. Eval the pipeline, then the fields. - pipe := s.evalArg(dot, nil, chain.Node) - if len(chain.Field) == 0 { - s.errorf("internal error: no fields in evalChainNode") - } - return s.evalFieldChain(dot, pipe, chain, chain.Field, args, final) -} - -func (s *state) evalVariableNode(dot reflect.Value, variable *parse.VariableNode, args []parse.Node, final reflect.Value) reflect.Value { - // $x.Field has $x as the first ident, Field as the second. Eval the var, then the fields. - s.at(variable) - value := s.varValue(variable.Ident[0]) - if len(variable.Ident) == 1 { - s.notAFunction(args, final) - return value - } - return s.evalFieldChain(dot, value, variable, variable.Ident[1:], args, final) -} - -// evalFieldChain evaluates .X.Y.Z possibly followed by arguments. -// dot is the environment in which to evaluate arguments, while -// receiver is the value being walked along the chain. -func (s *state) evalFieldChain(dot, receiver reflect.Value, node parse.Node, ident []string, args []parse.Node, final reflect.Value) reflect.Value { - n := len(ident) - for i := 0; i < n-1; i++ { - receiver = s.evalField(dot, ident[i], node, nil, zero, receiver) - } - // Now if it's a method, it gets the arguments. - return s.evalField(dot, ident[n-1], node, args, final, receiver) -} - -func (s *state) evalFunction(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value { - s.at(node) - name := node.Ident - function, ok := findFunction(name, s.tmpl) - if !ok { - s.errorf("%q is not a defined function", name) - } - return s.evalCall(dot, function, cmd, name, args, final) -} - -// evalField evaluates an expression like (.Field) or (.Field arg1 arg2). -// The 'final' argument represents the return value from the preceding -// value of the pipeline, if any. -func (s *state) evalField(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value { - if !receiver.IsValid() { - return zero - } - typ := receiver.Type() - receiver, _ = indirect(receiver) - // Unless it's an interface, need to get to a value of type *T to guarantee - // we see all methods of T and *T. - ptr := receiver - if ptr.Kind() != reflect.Interface && ptr.CanAddr() { - ptr = ptr.Addr() - } - if method := ptr.MethodByName(fieldName); method.IsValid() { - return s.evalCall(dot, method, node, fieldName, args, final) - } - hasArgs := len(args) > 1 || final.IsValid() - // It's not a method; must be a field of a struct or an element of a map. The receiver must not be nil. - receiver, isNil := indirect(receiver) - if isNil { - s.errorf("nil pointer evaluating %s.%s", typ, fieldName) - } - switch receiver.Kind() { - case reflect.Struct: - tField, ok := receiver.Type().FieldByName(fieldName) - if ok { - field := receiver.FieldByIndex(tField.Index) - if tField.PkgPath != "" { // field is unexported - s.errorf("%s is an unexported field of struct type %s", fieldName, typ) - } - // If it's a function, we must call it. - if hasArgs { - s.errorf("%s has arguments but cannot be invoked as function", fieldName) - } - return field - } - s.errorf("%s is not a field of struct type %s", fieldName, typ) - case reflect.Map: - // If it's a map, attempt to use the field name as a key. - nameVal := reflect.ValueOf(fieldName) - if nameVal.Type().AssignableTo(receiver.Type().Key()) { - if hasArgs { - s.errorf("%s is not a method but has arguments", fieldName) - } - return receiver.MapIndex(nameVal) - } - } - s.errorf("can't evaluate field %s in type %s", fieldName, typ) - panic("not reached") -} - -var ( - errorType = reflect.TypeOf((*error)(nil)).Elem() - fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem() -) - -// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so -// it looks just like a function call. The arg list, if non-nil, includes (in the manner of the shell), arg[0] -// as the function itself. -func (s *state) evalCall(dot, fun reflect.Value, node parse.Node, name string, args []parse.Node, final reflect.Value) reflect.Value { - if args != nil { - args = args[1:] // Zeroth arg is function name/node; not passed to function. - } - typ := fun.Type() - numIn := len(args) - if final.IsValid() { - numIn++ - } - numFixed := len(args) - if typ.IsVariadic() { - numFixed = typ.NumIn() - 1 // last arg is the variadic one. - if numIn < numFixed { - s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args)) - } - } else if numIn < typ.NumIn()-1 || !typ.IsVariadic() && numIn != typ.NumIn() { - s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), len(args)) - } - if !goodFunc(typ) { - // TODO: This could still be a confusing error; maybe goodFunc should provide info. - s.errorf("can't call method/function %q with %d results", name, typ.NumOut()) - } - // Build the arg list. - argv := make([]reflect.Value, numIn) - // Args must be evaluated. Fixed args first. - i := 0 - for ; i < numFixed && i < len(args); i++ { - argv[i] = s.evalArg(dot, typ.In(i), args[i]) - } - // Now the ... args. - if typ.IsVariadic() { - argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice. - for ; i < len(args); i++ { - argv[i] = s.evalArg(dot, argType, args[i]) - } - } - // Add final value if necessary. - if final.IsValid() { - t := typ.In(typ.NumIn() - 1) - if typ.IsVariadic() { - t = t.Elem() - } - argv[i] = s.validateType(final, t) - } - result := fun.Call(argv) - // If we have an error that is not nil, stop execution and return that error to the caller. - if len(result) == 2 && !result[1].IsNil() { - s.at(node) - s.errorf("error calling %s: %s", name, result[1].Interface().(error)) - } - return result[0] -} - -// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero. -func canBeNil(typ reflect.Type) bool { - switch typ.Kind() { - case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return true - } - return false -} - -// validateType guarantees that the value is valid and assignable to the type. -func (s *state) validateType(value reflect.Value, typ reflect.Type) reflect.Value { - if !value.IsValid() { - if typ == nil || canBeNil(typ) { - // An untyped nil interface{}. Accept as a proper nil value. - return reflect.Zero(typ) - } - s.errorf("invalid value; expected %s", typ) - } - if typ != nil && !value.Type().AssignableTo(typ) { - if value.Kind() == reflect.Interface && !value.IsNil() { - value = value.Elem() - if value.Type().AssignableTo(typ) { - return value - } - // fallthrough - } - // Does one dereference or indirection work? We could do more, as we - // do with method receivers, but that gets messy and method receivers - // are much more constrained, so it makes more sense there than here. - // Besides, one is almost always all you need. - switch { - case value.Kind() == reflect.Ptr && value.Type().Elem().AssignableTo(typ): - value = value.Elem() - if !value.IsValid() { - s.errorf("dereference of nil pointer of type %s", typ) - } - case reflect.PtrTo(value.Type()).AssignableTo(typ) && value.CanAddr(): - value = value.Addr() - default: - s.errorf("wrong type for value; expected %s; got %s", typ, value.Type()) - } - } - return value -} - -func (s *state) evalArg(dot reflect.Value, typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - switch arg := n.(type) { - case *parse.DotNode: - return s.validateType(dot, typ) - case *parse.NilNode: - if canBeNil(typ) { - return reflect.Zero(typ) - } - s.errorf("cannot assign nil to %s", typ) - case *parse.FieldNode: - return s.validateType(s.evalFieldNode(dot, arg, []parse.Node{n}, zero), typ) - case *parse.VariableNode: - return s.validateType(s.evalVariableNode(dot, arg, nil, zero), typ) - case *parse.PipeNode: - return s.validateType(s.evalPipeline(dot, arg), typ) - case *parse.IdentifierNode: - return s.evalFunction(dot, arg, arg, nil, zero) - case *parse.ChainNode: - return s.validateType(s.evalChainNode(dot, arg, nil, zero), typ) - } - switch typ.Kind() { - case reflect.Bool: - return s.evalBool(typ, n) - case reflect.Complex64, reflect.Complex128: - return s.evalComplex(typ, n) - case reflect.Float32, reflect.Float64: - return s.evalFloat(typ, n) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return s.evalInteger(typ, n) - case reflect.Interface: - if typ.NumMethod() == 0 { - return s.evalEmptyInterface(dot, n) - } - case reflect.String: - return s.evalString(typ, n) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return s.evalUnsignedInteger(typ, n) - } - s.errorf("can't handle %s for arg of type %s", n, typ) - panic("not reached") -} - -func (s *state) evalBool(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.BoolNode); ok { - value := reflect.New(typ).Elem() - value.SetBool(n.True) - return value - } - s.errorf("expected bool; found %s", n) - panic("not reached") -} - -func (s *state) evalString(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.StringNode); ok { - value := reflect.New(typ).Elem() - value.SetString(n.Text) - return value - } - s.errorf("expected string; found %s", n) - panic("not reached") -} - -func (s *state) evalInteger(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.NumberNode); ok && n.IsInt { - value := reflect.New(typ).Elem() - value.SetInt(n.Int64) - return value - } - s.errorf("expected integer; found %s", n) - panic("not reached") -} - -func (s *state) evalUnsignedInteger(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.NumberNode); ok && n.IsUint { - value := reflect.New(typ).Elem() - value.SetUint(n.Uint64) - return value - } - s.errorf("expected unsigned integer; found %s", n) - panic("not reached") -} - -func (s *state) evalFloat(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.NumberNode); ok && n.IsFloat { - value := reflect.New(typ).Elem() - value.SetFloat(n.Float64) - return value - } - s.errorf("expected float; found %s", n) - panic("not reached") -} - -func (s *state) evalComplex(typ reflect.Type, n parse.Node) reflect.Value { - if n, ok := n.(*parse.NumberNode); ok && n.IsComplex { - value := reflect.New(typ).Elem() - value.SetComplex(n.Complex128) - return value - } - s.errorf("expected complex; found %s", n) - panic("not reached") -} - -func (s *state) evalEmptyInterface(dot reflect.Value, n parse.Node) reflect.Value { - s.at(n) - switch n := n.(type) { - case *parse.BoolNode: - return reflect.ValueOf(n.True) - case *parse.DotNode: - return dot - case *parse.FieldNode: - return s.evalFieldNode(dot, n, nil, zero) - case *parse.IdentifierNode: - return s.evalFunction(dot, n, n, nil, zero) - case *parse.NilNode: - // NilNode is handled in evalArg, the only place that calls here. - s.errorf("evalEmptyInterface: nil (can't happen)") - case *parse.NumberNode: - return s.idealConstant(n) - case *parse.StringNode: - return reflect.ValueOf(n.Text) - case *parse.VariableNode: - return s.evalVariableNode(dot, n, nil, zero) - case *parse.PipeNode: - return s.evalPipeline(dot, n) - } - s.errorf("can't handle assignment of %s to empty interface argument", n) - panic("not reached") -} - -// indirect returns the item at the end of indirection, and a bool to indicate if it's nil. -// We indirect through pointers and empty interfaces (only) because -// non-empty interfaces have methods we might need. -func indirect(v reflect.Value) (rv reflect.Value, isNil bool) { - for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() { - if v.IsNil() { - return v, true - } - if v.Kind() == reflect.Interface && v.NumMethod() > 0 { - break - } - } - return v, false -} - -// printValue writes the textual representation of the value to the output of -// the template. -func (s *state) printValue(n parse.Node, v reflect.Value) { - s.at(n) - iface, ok := printableValue(v) - if !ok { - s.errorf("can't print %s of type %s", n, v.Type()) - } - fmt.Fprint(s.wr, iface) -} - -// printableValue returns the, possibly indirected, interface value inside v that -// is best for a call to formatted printer. -func printableValue(v reflect.Value) (interface{}, bool) { - if v.Kind() == reflect.Ptr { - v, _ = indirect(v) // fmt.Fprint handles nil. - } - if !v.IsValid() { - return "", true - } - - if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) { - if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) { - v = v.Addr() - } else { - switch v.Kind() { - case reflect.Chan, reflect.Func: - return nil, false - } - } - } - return v.Interface(), true -} - -// Types to help sort the keys in a map for reproducible output. - -type rvs []reflect.Value - -func (x rvs) Len() int { return len(x) } -func (x rvs) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -type rvInts struct{ rvs } - -func (x rvInts) Less(i, j int) bool { return x.rvs[i].Int() < x.rvs[j].Int() } - -type rvUints struct{ rvs } - -func (x rvUints) Less(i, j int) bool { return x.rvs[i].Uint() < x.rvs[j].Uint() } - -type rvFloats struct{ rvs } - -func (x rvFloats) Less(i, j int) bool { return x.rvs[i].Float() < x.rvs[j].Float() } - -type rvStrings struct{ rvs } - -func (x rvStrings) Less(i, j int) bool { return x.rvs[i].String() < x.rvs[j].String() } - -// sortKeys sorts (if it can) the slice of reflect.Values, which is a slice of map keys. -func sortKeys(v []reflect.Value) []reflect.Value { - if len(v) <= 1 { - return v - } - switch v[0].Kind() { - case reflect.Float32, reflect.Float64: - sort.Sort(rvFloats{v}) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - sort.Sort(rvInts{v}) - case reflect.String: - sort.Sort(rvStrings{v}) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - sort.Sort(rvUints{v}) - } - return v -} diff --git a/vendor/github.com/alecthomas/template/funcs.go b/vendor/github.com/alecthomas/template/funcs.go deleted file mode 100644 index 39ee5ed68fb..00000000000 --- a/vendor/github.com/alecthomas/template/funcs.go +++ /dev/null @@ -1,598 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "bytes" - "errors" - "fmt" - "io" - "net/url" - "reflect" - "strings" - "unicode" - "unicode/utf8" -) - -// FuncMap is the type of the map defining the mapping from names to functions. -// Each function must have either a single return value, or two return values of -// which the second has type error. In that case, if the second (error) -// return value evaluates to non-nil during execution, execution terminates and -// Execute returns that error. -type FuncMap map[string]interface{} - -var builtins = FuncMap{ - "and": and, - "call": call, - "html": HTMLEscaper, - "index": index, - "js": JSEscaper, - "len": length, - "not": not, - "or": or, - "print": fmt.Sprint, - "printf": fmt.Sprintf, - "println": fmt.Sprintln, - "urlquery": URLQueryEscaper, - - // Comparisons - "eq": eq, // == - "ge": ge, // >= - "gt": gt, // > - "le": le, // <= - "lt": lt, // < - "ne": ne, // != -} - -var builtinFuncs = createValueFuncs(builtins) - -// createValueFuncs turns a FuncMap into a map[string]reflect.Value -func createValueFuncs(funcMap FuncMap) map[string]reflect.Value { - m := make(map[string]reflect.Value) - addValueFuncs(m, funcMap) - return m -} - -// addValueFuncs adds to values the functions in funcs, converting them to reflect.Values. -func addValueFuncs(out map[string]reflect.Value, in FuncMap) { - for name, fn := range in { - v := reflect.ValueOf(fn) - if v.Kind() != reflect.Func { - panic("value for " + name + " not a function") - } - if !goodFunc(v.Type()) { - panic(fmt.Errorf("can't install method/function %q with %d results", name, v.Type().NumOut())) - } - out[name] = v - } -} - -// addFuncs adds to values the functions in funcs. It does no checking of the input - -// call addValueFuncs first. -func addFuncs(out, in FuncMap) { - for name, fn := range in { - out[name] = fn - } -} - -// goodFunc checks that the function or method has the right result signature. -func goodFunc(typ reflect.Type) bool { - // We allow functions with 1 result or 2 results where the second is an error. - switch { - case typ.NumOut() == 1: - return true - case typ.NumOut() == 2 && typ.Out(1) == errorType: - return true - } - return false -} - -// findFunction looks for a function in the template, and global map. -func findFunction(name string, tmpl *Template) (reflect.Value, bool) { - if tmpl != nil && tmpl.common != nil { - if fn := tmpl.execFuncs[name]; fn.IsValid() { - return fn, true - } - } - if fn := builtinFuncs[name]; fn.IsValid() { - return fn, true - } - return reflect.Value{}, false -} - -// Indexing. - -// index returns the result of indexing its first argument by the following -// arguments. Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each -// indexed item must be a map, slice, or array. -func index(item interface{}, indices ...interface{}) (interface{}, error) { - v := reflect.ValueOf(item) - for _, i := range indices { - index := reflect.ValueOf(i) - var isNil bool - if v, isNil = indirect(v); isNil { - return nil, fmt.Errorf("index of nil pointer") - } - switch v.Kind() { - case reflect.Array, reflect.Slice, reflect.String: - var x int64 - switch index.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - x = index.Int() - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - x = int64(index.Uint()) - default: - return nil, fmt.Errorf("cannot index slice/array with type %s", index.Type()) - } - if x < 0 || x >= int64(v.Len()) { - return nil, fmt.Errorf("index out of range: %d", x) - } - v = v.Index(int(x)) - case reflect.Map: - if !index.IsValid() { - index = reflect.Zero(v.Type().Key()) - } - if !index.Type().AssignableTo(v.Type().Key()) { - return nil, fmt.Errorf("%s is not index type for %s", index.Type(), v.Type()) - } - if x := v.MapIndex(index); x.IsValid() { - v = x - } else { - v = reflect.Zero(v.Type().Elem()) - } - default: - return nil, fmt.Errorf("can't index item of type %s", v.Type()) - } - } - return v.Interface(), nil -} - -// Length - -// length returns the length of the item, with an error if it has no defined length. -func length(item interface{}) (int, error) { - v, isNil := indirect(reflect.ValueOf(item)) - if isNil { - return 0, fmt.Errorf("len of nil pointer") - } - switch v.Kind() { - case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String: - return v.Len(), nil - } - return 0, fmt.Errorf("len of type %s", v.Type()) -} - -// Function invocation - -// call returns the result of evaluating the first argument as a function. -// The function must return 1 result, or 2 results, the second of which is an error. -func call(fn interface{}, args ...interface{}) (interface{}, error) { - v := reflect.ValueOf(fn) - typ := v.Type() - if typ.Kind() != reflect.Func { - return nil, fmt.Errorf("non-function of type %s", typ) - } - if !goodFunc(typ) { - return nil, fmt.Errorf("function called with %d args; should be 1 or 2", typ.NumOut()) - } - numIn := typ.NumIn() - var dddType reflect.Type - if typ.IsVariadic() { - if len(args) < numIn-1 { - return nil, fmt.Errorf("wrong number of args: got %d want at least %d", len(args), numIn-1) - } - dddType = typ.In(numIn - 1).Elem() - } else { - if len(args) != numIn { - return nil, fmt.Errorf("wrong number of args: got %d want %d", len(args), numIn) - } - } - argv := make([]reflect.Value, len(args)) - for i, arg := range args { - value := reflect.ValueOf(arg) - // Compute the expected type. Clumsy because of variadics. - var argType reflect.Type - if !typ.IsVariadic() || i < numIn-1 { - argType = typ.In(i) - } else { - argType = dddType - } - if !value.IsValid() && canBeNil(argType) { - value = reflect.Zero(argType) - } - if !value.Type().AssignableTo(argType) { - return nil, fmt.Errorf("arg %d has type %s; should be %s", i, value.Type(), argType) - } - argv[i] = value - } - result := v.Call(argv) - if len(result) == 2 && !result[1].IsNil() { - return result[0].Interface(), result[1].Interface().(error) - } - return result[0].Interface(), nil -} - -// Boolean logic. - -func truth(a interface{}) bool { - t, _ := isTrue(reflect.ValueOf(a)) - return t -} - -// and computes the Boolean AND of its arguments, returning -// the first false argument it encounters, or the last argument. -func and(arg0 interface{}, args ...interface{}) interface{} { - if !truth(arg0) { - return arg0 - } - for i := range args { - arg0 = args[i] - if !truth(arg0) { - break - } - } - return arg0 -} - -// or computes the Boolean OR of its arguments, returning -// the first true argument it encounters, or the last argument. -func or(arg0 interface{}, args ...interface{}) interface{} { - if truth(arg0) { - return arg0 - } - for i := range args { - arg0 = args[i] - if truth(arg0) { - break - } - } - return arg0 -} - -// not returns the Boolean negation of its argument. -func not(arg interface{}) (truth bool) { - truth, _ = isTrue(reflect.ValueOf(arg)) - return !truth -} - -// Comparison. - -// TODO: Perhaps allow comparison between signed and unsigned integers. - -var ( - errBadComparisonType = errors.New("invalid type for comparison") - errBadComparison = errors.New("incompatible types for comparison") - errNoComparison = errors.New("missing argument for comparison") -) - -type kind int - -const ( - invalidKind kind = iota - boolKind - complexKind - intKind - floatKind - integerKind - stringKind - uintKind -) - -func basicKind(v reflect.Value) (kind, error) { - switch v.Kind() { - case reflect.Bool: - return boolKind, nil - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return intKind, nil - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return uintKind, nil - case reflect.Float32, reflect.Float64: - return floatKind, nil - case reflect.Complex64, reflect.Complex128: - return complexKind, nil - case reflect.String: - return stringKind, nil - } - return invalidKind, errBadComparisonType -} - -// eq evaluates the comparison a == b || a == c || ... -func eq(arg1 interface{}, arg2 ...interface{}) (bool, error) { - v1 := reflect.ValueOf(arg1) - k1, err := basicKind(v1) - if err != nil { - return false, err - } - if len(arg2) == 0 { - return false, errNoComparison - } - for _, arg := range arg2 { - v2 := reflect.ValueOf(arg) - k2, err := basicKind(v2) - if err != nil { - return false, err - } - truth := false - if k1 != k2 { - // Special case: Can compare integer values regardless of type's sign. - switch { - case k1 == intKind && k2 == uintKind: - truth = v1.Int() >= 0 && uint64(v1.Int()) == v2.Uint() - case k1 == uintKind && k2 == intKind: - truth = v2.Int() >= 0 && v1.Uint() == uint64(v2.Int()) - default: - return false, errBadComparison - } - } else { - switch k1 { - case boolKind: - truth = v1.Bool() == v2.Bool() - case complexKind: - truth = v1.Complex() == v2.Complex() - case floatKind: - truth = v1.Float() == v2.Float() - case intKind: - truth = v1.Int() == v2.Int() - case stringKind: - truth = v1.String() == v2.String() - case uintKind: - truth = v1.Uint() == v2.Uint() - default: - panic("invalid kind") - } - } - if truth { - return true, nil - } - } - return false, nil -} - -// ne evaluates the comparison a != b. -func ne(arg1, arg2 interface{}) (bool, error) { - // != is the inverse of ==. - equal, err := eq(arg1, arg2) - return !equal, err -} - -// lt evaluates the comparison a < b. -func lt(arg1, arg2 interface{}) (bool, error) { - v1 := reflect.ValueOf(arg1) - k1, err := basicKind(v1) - if err != nil { - return false, err - } - v2 := reflect.ValueOf(arg2) - k2, err := basicKind(v2) - if err != nil { - return false, err - } - truth := false - if k1 != k2 { - // Special case: Can compare integer values regardless of type's sign. - switch { - case k1 == intKind && k2 == uintKind: - truth = v1.Int() < 0 || uint64(v1.Int()) < v2.Uint() - case k1 == uintKind && k2 == intKind: - truth = v2.Int() >= 0 && v1.Uint() < uint64(v2.Int()) - default: - return false, errBadComparison - } - } else { - switch k1 { - case boolKind, complexKind: - return false, errBadComparisonType - case floatKind: - truth = v1.Float() < v2.Float() - case intKind: - truth = v1.Int() < v2.Int() - case stringKind: - truth = v1.String() < v2.String() - case uintKind: - truth = v1.Uint() < v2.Uint() - default: - panic("invalid kind") - } - } - return truth, nil -} - -// le evaluates the comparison <= b. -func le(arg1, arg2 interface{}) (bool, error) { - // <= is < or ==. - lessThan, err := lt(arg1, arg2) - if lessThan || err != nil { - return lessThan, err - } - return eq(arg1, arg2) -} - -// gt evaluates the comparison a > b. -func gt(arg1, arg2 interface{}) (bool, error) { - // > is the inverse of <=. - lessOrEqual, err := le(arg1, arg2) - if err != nil { - return false, err - } - return !lessOrEqual, nil -} - -// ge evaluates the comparison a >= b. -func ge(arg1, arg2 interface{}) (bool, error) { - // >= is the inverse of <. - lessThan, err := lt(arg1, arg2) - if err != nil { - return false, err - } - return !lessThan, nil -} - -// HTML escaping. - -var ( - htmlQuot = []byte(""") // shorter than """ - htmlApos = []byte("'") // shorter than "'" and apos was not in HTML until HTML5 - htmlAmp = []byte("&") - htmlLt = []byte("<") - htmlGt = []byte(">") -) - -// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b. -func HTMLEscape(w io.Writer, b []byte) { - last := 0 - for i, c := range b { - var html []byte - switch c { - case '"': - html = htmlQuot - case '\'': - html = htmlApos - case '&': - html = htmlAmp - case '<': - html = htmlLt - case '>': - html = htmlGt - default: - continue - } - w.Write(b[last:i]) - w.Write(html) - last = i + 1 - } - w.Write(b[last:]) -} - -// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s. -func HTMLEscapeString(s string) string { - // Avoid allocation if we can. - if strings.IndexAny(s, `'"&<>`) < 0 { - return s - } - var b bytes.Buffer - HTMLEscape(&b, []byte(s)) - return b.String() -} - -// HTMLEscaper returns the escaped HTML equivalent of the textual -// representation of its arguments. -func HTMLEscaper(args ...interface{}) string { - return HTMLEscapeString(evalArgs(args)) -} - -// JavaScript escaping. - -var ( - jsLowUni = []byte(`\u00`) - hex = []byte("0123456789ABCDEF") - - jsBackslash = []byte(`\\`) - jsApos = []byte(`\'`) - jsQuot = []byte(`\"`) - jsLt = []byte(`\x3C`) - jsGt = []byte(`\x3E`) -) - -// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b. -func JSEscape(w io.Writer, b []byte) { - last := 0 - for i := 0; i < len(b); i++ { - c := b[i] - - if !jsIsSpecial(rune(c)) { - // fast path: nothing to do - continue - } - w.Write(b[last:i]) - - if c < utf8.RuneSelf { - // Quotes, slashes and angle brackets get quoted. - // Control characters get written as \u00XX. - switch c { - case '\\': - w.Write(jsBackslash) - case '\'': - w.Write(jsApos) - case '"': - w.Write(jsQuot) - case '<': - w.Write(jsLt) - case '>': - w.Write(jsGt) - default: - w.Write(jsLowUni) - t, b := c>>4, c&0x0f - w.Write(hex[t : t+1]) - w.Write(hex[b : b+1]) - } - } else { - // Unicode rune. - r, size := utf8.DecodeRune(b[i:]) - if unicode.IsPrint(r) { - w.Write(b[i : i+size]) - } else { - fmt.Fprintf(w, "\\u%04X", r) - } - i += size - 1 - } - last = i + 1 - } - w.Write(b[last:]) -} - -// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s. -func JSEscapeString(s string) string { - // Avoid allocation if we can. - if strings.IndexFunc(s, jsIsSpecial) < 0 { - return s - } - var b bytes.Buffer - JSEscape(&b, []byte(s)) - return b.String() -} - -func jsIsSpecial(r rune) bool { - switch r { - case '\\', '\'', '"', '<', '>': - return true - } - return r < ' ' || utf8.RuneSelf <= r -} - -// JSEscaper returns the escaped JavaScript equivalent of the textual -// representation of its arguments. -func JSEscaper(args ...interface{}) string { - return JSEscapeString(evalArgs(args)) -} - -// URLQueryEscaper returns the escaped value of the textual representation of -// its arguments in a form suitable for embedding in a URL query. -func URLQueryEscaper(args ...interface{}) string { - return url.QueryEscape(evalArgs(args)) -} - -// evalArgs formats the list of arguments into a string. It is therefore equivalent to -// fmt.Sprint(args...) -// except that each argument is indirected (if a pointer), as required, -// using the same rules as the default string evaluation during template -// execution. -func evalArgs(args []interface{}) string { - ok := false - var s string - // Fast path for simple common case. - if len(args) == 1 { - s, ok = args[0].(string) - } - if !ok { - for i, arg := range args { - a, ok := printableValue(reflect.ValueOf(arg)) - if ok { - args[i] = a - } // else left fmt do its thing - } - s = fmt.Sprint(args...) - } - return s -} diff --git a/vendor/github.com/alecthomas/template/go.mod b/vendor/github.com/alecthomas/template/go.mod deleted file mode 100644 index a70670ae21c..00000000000 --- a/vendor/github.com/alecthomas/template/go.mod +++ /dev/null @@ -1 +0,0 @@ -module github.com/alecthomas/template diff --git a/vendor/github.com/alecthomas/template/helper.go b/vendor/github.com/alecthomas/template/helper.go deleted file mode 100644 index 3636fb54d69..00000000000 --- a/vendor/github.com/alecthomas/template/helper.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Helper functions to make constructing templates easier. - -package template - -import ( - "fmt" - "io/ioutil" - "path/filepath" -) - -// Functions and methods to parse templates. - -// Must is a helper that wraps a call to a function returning (*Template, error) -// and panics if the error is non-nil. It is intended for use in variable -// initializations such as -// var t = template.Must(template.New("name").Parse("text")) -func Must(t *Template, err error) *Template { - if err != nil { - panic(err) - } - return t -} - -// ParseFiles creates a new Template and parses the template definitions from -// the named files. The returned template's name will have the (base) name and -// (parsed) contents of the first file. There must be at least one file. -// If an error occurs, parsing stops and the returned *Template is nil. -func ParseFiles(filenames ...string) (*Template, error) { - return parseFiles(nil, filenames...) -} - -// ParseFiles parses the named files and associates the resulting templates with -// t. If an error occurs, parsing stops and the returned template is nil; -// otherwise it is t. There must be at least one file. -func (t *Template) ParseFiles(filenames ...string) (*Template, error) { - return parseFiles(t, filenames...) -} - -// parseFiles is the helper for the method and function. If the argument -// template is nil, it is created from the first file. -func parseFiles(t *Template, filenames ...string) (*Template, error) { - if len(filenames) == 0 { - // Not really a problem, but be consistent. - return nil, fmt.Errorf("template: no files named in call to ParseFiles") - } - for _, filename := range filenames { - b, err := ioutil.ReadFile(filename) - if err != nil { - return nil, err - } - s := string(b) - name := filepath.Base(filename) - // First template becomes return value if not already defined, - // and we use that one for subsequent New calls to associate - // all the templates together. Also, if this file has the same name - // as t, this file becomes the contents of t, so - // t, err := New(name).Funcs(xxx).ParseFiles(name) - // works. Otherwise we create a new template associated with t. - var tmpl *Template - if t == nil { - t = New(name) - } - if name == t.Name() { - tmpl = t - } else { - tmpl = t.New(name) - } - _, err = tmpl.Parse(s) - if err != nil { - return nil, err - } - } - return t, nil -} - -// ParseGlob creates a new Template and parses the template definitions from the -// files identified by the pattern, which must match at least one file. The -// returned template will have the (base) name and (parsed) contents of the -// first file matched by the pattern. ParseGlob is equivalent to calling -// ParseFiles with the list of files matched by the pattern. -func ParseGlob(pattern string) (*Template, error) { - return parseGlob(nil, pattern) -} - -// ParseGlob parses the template definitions in the files identified by the -// pattern and associates the resulting templates with t. The pattern is -// processed by filepath.Glob and must match at least one file. ParseGlob is -// equivalent to calling t.ParseFiles with the list of files matched by the -// pattern. -func (t *Template) ParseGlob(pattern string) (*Template, error) { - return parseGlob(t, pattern) -} - -// parseGlob is the implementation of the function and method ParseGlob. -func parseGlob(t *Template, pattern string) (*Template, error) { - filenames, err := filepath.Glob(pattern) - if err != nil { - return nil, err - } - if len(filenames) == 0 { - return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) - } - return parseFiles(t, filenames...) -} diff --git a/vendor/github.com/alecthomas/template/parse/lex.go b/vendor/github.com/alecthomas/template/parse/lex.go deleted file mode 100644 index 55f1c051e86..00000000000 --- a/vendor/github.com/alecthomas/template/parse/lex.go +++ /dev/null @@ -1,556 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package parse - -import ( - "fmt" - "strings" - "unicode" - "unicode/utf8" -) - -// item represents a token or text string returned from the scanner. -type item struct { - typ itemType // The type of this item. - pos Pos // The starting position, in bytes, of this item in the input string. - val string // The value of this item. -} - -func (i item) String() string { - switch { - case i.typ == itemEOF: - return "EOF" - case i.typ == itemError: - return i.val - case i.typ > itemKeyword: - return fmt.Sprintf("<%s>", i.val) - case len(i.val) > 10: - return fmt.Sprintf("%.10q...", i.val) - } - return fmt.Sprintf("%q", i.val) -} - -// itemType identifies the type of lex items. -type itemType int - -const ( - itemError itemType = iota // error occurred; value is text of error - itemBool // boolean constant - itemChar // printable ASCII character; grab bag for comma etc. - itemCharConstant // character constant - itemComplex // complex constant (1+2i); imaginary is just a number - itemColonEquals // colon-equals (':=') introducing a declaration - itemEOF - itemField // alphanumeric identifier starting with '.' - itemIdentifier // alphanumeric identifier not starting with '.' - itemLeftDelim // left action delimiter - itemLeftParen // '(' inside action - itemNumber // simple number, including imaginary - itemPipe // pipe symbol - itemRawString // raw quoted string (includes quotes) - itemRightDelim // right action delimiter - itemElideNewline // elide newline after right delim - itemRightParen // ')' inside action - itemSpace // run of spaces separating arguments - itemString // quoted string (includes quotes) - itemText // plain text - itemVariable // variable starting with '$', such as '$' or '$1' or '$hello' - // Keywords appear after all the rest. - itemKeyword // used only to delimit the keywords - itemDot // the cursor, spelled '.' - itemDefine // define keyword - itemElse // else keyword - itemEnd // end keyword - itemIf // if keyword - itemNil // the untyped nil constant, easiest to treat as a keyword - itemRange // range keyword - itemTemplate // template keyword - itemWith // with keyword -) - -var key = map[string]itemType{ - ".": itemDot, - "define": itemDefine, - "else": itemElse, - "end": itemEnd, - "if": itemIf, - "range": itemRange, - "nil": itemNil, - "template": itemTemplate, - "with": itemWith, -} - -const eof = -1 - -// stateFn represents the state of the scanner as a function that returns the next state. -type stateFn func(*lexer) stateFn - -// lexer holds the state of the scanner. -type lexer struct { - name string // the name of the input; used only for error reports - input string // the string being scanned - leftDelim string // start of action - rightDelim string // end of action - state stateFn // the next lexing function to enter - pos Pos // current position in the input - start Pos // start position of this item - width Pos // width of last rune read from input - lastPos Pos // position of most recent item returned by nextItem - items chan item // channel of scanned items - parenDepth int // nesting depth of ( ) exprs -} - -// next returns the next rune in the input. -func (l *lexer) next() rune { - if int(l.pos) >= len(l.input) { - l.width = 0 - return eof - } - r, w := utf8.DecodeRuneInString(l.input[l.pos:]) - l.width = Pos(w) - l.pos += l.width - return r -} - -// peek returns but does not consume the next rune in the input. -func (l *lexer) peek() rune { - r := l.next() - l.backup() - return r -} - -// backup steps back one rune. Can only be called once per call of next. -func (l *lexer) backup() { - l.pos -= l.width -} - -// emit passes an item back to the client. -func (l *lexer) emit(t itemType) { - l.items <- item{t, l.start, l.input[l.start:l.pos]} - l.start = l.pos -} - -// ignore skips over the pending input before this point. -func (l *lexer) ignore() { - l.start = l.pos -} - -// accept consumes the next rune if it's from the valid set. -func (l *lexer) accept(valid string) bool { - if strings.IndexRune(valid, l.next()) >= 0 { - return true - } - l.backup() - return false -} - -// acceptRun consumes a run of runes from the valid set. -func (l *lexer) acceptRun(valid string) { - for strings.IndexRune(valid, l.next()) >= 0 { - } - l.backup() -} - -// lineNumber reports which line we're on, based on the position of -// the previous item returned by nextItem. Doing it this way -// means we don't have to worry about peek double counting. -func (l *lexer) lineNumber() int { - return 1 + strings.Count(l.input[:l.lastPos], "\n") -} - -// errorf returns an error token and terminates the scan by passing -// back a nil pointer that will be the next state, terminating l.nextItem. -func (l *lexer) errorf(format string, args ...interface{}) stateFn { - l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)} - return nil -} - -// nextItem returns the next item from the input. -func (l *lexer) nextItem() item { - item := <-l.items - l.lastPos = item.pos - return item -} - -// lex creates a new scanner for the input string. -func lex(name, input, left, right string) *lexer { - if left == "" { - left = leftDelim - } - if right == "" { - right = rightDelim - } - l := &lexer{ - name: name, - input: input, - leftDelim: left, - rightDelim: right, - items: make(chan item), - } - go l.run() - return l -} - -// run runs the state machine for the lexer. -func (l *lexer) run() { - for l.state = lexText; l.state != nil; { - l.state = l.state(l) - } -} - -// state functions - -const ( - leftDelim = "{{" - rightDelim = "}}" - leftComment = "/*" - rightComment = "*/" -) - -// lexText scans until an opening action delimiter, "{{". -func lexText(l *lexer) stateFn { - for { - if strings.HasPrefix(l.input[l.pos:], l.leftDelim) { - if l.pos > l.start { - l.emit(itemText) - } - return lexLeftDelim - } - if l.next() == eof { - break - } - } - // Correctly reached EOF. - if l.pos > l.start { - l.emit(itemText) - } - l.emit(itemEOF) - return nil -} - -// lexLeftDelim scans the left delimiter, which is known to be present. -func lexLeftDelim(l *lexer) stateFn { - l.pos += Pos(len(l.leftDelim)) - if strings.HasPrefix(l.input[l.pos:], leftComment) { - return lexComment - } - l.emit(itemLeftDelim) - l.parenDepth = 0 - return lexInsideAction -} - -// lexComment scans a comment. The left comment marker is known to be present. -func lexComment(l *lexer) stateFn { - l.pos += Pos(len(leftComment)) - i := strings.Index(l.input[l.pos:], rightComment) - if i < 0 { - return l.errorf("unclosed comment") - } - l.pos += Pos(i + len(rightComment)) - if !strings.HasPrefix(l.input[l.pos:], l.rightDelim) { - return l.errorf("comment ends before closing delimiter") - - } - l.pos += Pos(len(l.rightDelim)) - l.ignore() - return lexText -} - -// lexRightDelim scans the right delimiter, which is known to be present. -func lexRightDelim(l *lexer) stateFn { - l.pos += Pos(len(l.rightDelim)) - l.emit(itemRightDelim) - if l.peek() == '\\' { - l.pos++ - l.emit(itemElideNewline) - } - return lexText -} - -// lexInsideAction scans the elements inside action delimiters. -func lexInsideAction(l *lexer) stateFn { - // Either number, quoted string, or identifier. - // Spaces separate arguments; runs of spaces turn into itemSpace. - // Pipe symbols separate and are emitted. - if strings.HasPrefix(l.input[l.pos:], l.rightDelim+"\\") || strings.HasPrefix(l.input[l.pos:], l.rightDelim) { - if l.parenDepth == 0 { - return lexRightDelim - } - return l.errorf("unclosed left paren") - } - switch r := l.next(); { - case r == eof || isEndOfLine(r): - return l.errorf("unclosed action") - case isSpace(r): - return lexSpace - case r == ':': - if l.next() != '=' { - return l.errorf("expected :=") - } - l.emit(itemColonEquals) - case r == '|': - l.emit(itemPipe) - case r == '"': - return lexQuote - case r == '`': - return lexRawQuote - case r == '$': - return lexVariable - case r == '\'': - return lexChar - case r == '.': - // special look-ahead for ".field" so we don't break l.backup(). - if l.pos < Pos(len(l.input)) { - r := l.input[l.pos] - if r < '0' || '9' < r { - return lexField - } - } - fallthrough // '.' can start a number. - case r == '+' || r == '-' || ('0' <= r && r <= '9'): - l.backup() - return lexNumber - case isAlphaNumeric(r): - l.backup() - return lexIdentifier - case r == '(': - l.emit(itemLeftParen) - l.parenDepth++ - return lexInsideAction - case r == ')': - l.emit(itemRightParen) - l.parenDepth-- - if l.parenDepth < 0 { - return l.errorf("unexpected right paren %#U", r) - } - return lexInsideAction - case r <= unicode.MaxASCII && unicode.IsPrint(r): - l.emit(itemChar) - return lexInsideAction - default: - return l.errorf("unrecognized character in action: %#U", r) - } - return lexInsideAction -} - -// lexSpace scans a run of space characters. -// One space has already been seen. -func lexSpace(l *lexer) stateFn { - for isSpace(l.peek()) { - l.next() - } - l.emit(itemSpace) - return lexInsideAction -} - -// lexIdentifier scans an alphanumeric. -func lexIdentifier(l *lexer) stateFn { -Loop: - for { - switch r := l.next(); { - case isAlphaNumeric(r): - // absorb. - default: - l.backup() - word := l.input[l.start:l.pos] - if !l.atTerminator() { - return l.errorf("bad character %#U", r) - } - switch { - case key[word] > itemKeyword: - l.emit(key[word]) - case word[0] == '.': - l.emit(itemField) - case word == "true", word == "false": - l.emit(itemBool) - default: - l.emit(itemIdentifier) - } - break Loop - } - } - return lexInsideAction -} - -// lexField scans a field: .Alphanumeric. -// The . has been scanned. -func lexField(l *lexer) stateFn { - return lexFieldOrVariable(l, itemField) -} - -// lexVariable scans a Variable: $Alphanumeric. -// The $ has been scanned. -func lexVariable(l *lexer) stateFn { - if l.atTerminator() { // Nothing interesting follows -> "$". - l.emit(itemVariable) - return lexInsideAction - } - return lexFieldOrVariable(l, itemVariable) -} - -// lexVariable scans a field or variable: [.$]Alphanumeric. -// The . or $ has been scanned. -func lexFieldOrVariable(l *lexer, typ itemType) stateFn { - if l.atTerminator() { // Nothing interesting follows -> "." or "$". - if typ == itemVariable { - l.emit(itemVariable) - } else { - l.emit(itemDot) - } - return lexInsideAction - } - var r rune - for { - r = l.next() - if !isAlphaNumeric(r) { - l.backup() - break - } - } - if !l.atTerminator() { - return l.errorf("bad character %#U", r) - } - l.emit(typ) - return lexInsideAction -} - -// atTerminator reports whether the input is at valid termination character to -// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases -// like "$x+2" not being acceptable without a space, in case we decide one -// day to implement arithmetic. -func (l *lexer) atTerminator() bool { - r := l.peek() - if isSpace(r) || isEndOfLine(r) { - return true - } - switch r { - case eof, '.', ',', '|', ':', ')', '(': - return true - } - // Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will - // succeed but should fail) but only in extremely rare cases caused by willfully - // bad choice of delimiter. - if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r { - return true - } - return false -} - -// lexChar scans a character constant. The initial quote is already -// scanned. Syntax checking is done by the parser. -func lexChar(l *lexer) stateFn { -Loop: - for { - switch l.next() { - case '\\': - if r := l.next(); r != eof && r != '\n' { - break - } - fallthrough - case eof, '\n': - return l.errorf("unterminated character constant") - case '\'': - break Loop - } - } - l.emit(itemCharConstant) - return lexInsideAction -} - -// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This -// isn't a perfect number scanner - for instance it accepts "." and "0x0.2" -// and "089" - but when it's wrong the input is invalid and the parser (via -// strconv) will notice. -func lexNumber(l *lexer) stateFn { - if !l.scanNumber() { - return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) - } - if sign := l.peek(); sign == '+' || sign == '-' { - // Complex: 1+2i. No spaces, must end in 'i'. - if !l.scanNumber() || l.input[l.pos-1] != 'i' { - return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) - } - l.emit(itemComplex) - } else { - l.emit(itemNumber) - } - return lexInsideAction -} - -func (l *lexer) scanNumber() bool { - // Optional leading sign. - l.accept("+-") - // Is it hex? - digits := "0123456789" - if l.accept("0") && l.accept("xX") { - digits = "0123456789abcdefABCDEF" - } - l.acceptRun(digits) - if l.accept(".") { - l.acceptRun(digits) - } - if l.accept("eE") { - l.accept("+-") - l.acceptRun("0123456789") - } - // Is it imaginary? - l.accept("i") - // Next thing mustn't be alphanumeric. - if isAlphaNumeric(l.peek()) { - l.next() - return false - } - return true -} - -// lexQuote scans a quoted string. -func lexQuote(l *lexer) stateFn { -Loop: - for { - switch l.next() { - case '\\': - if r := l.next(); r != eof && r != '\n' { - break - } - fallthrough - case eof, '\n': - return l.errorf("unterminated quoted string") - case '"': - break Loop - } - } - l.emit(itemString) - return lexInsideAction -} - -// lexRawQuote scans a raw quoted string. -func lexRawQuote(l *lexer) stateFn { -Loop: - for { - switch l.next() { - case eof, '\n': - return l.errorf("unterminated raw quoted string") - case '`': - break Loop - } - } - l.emit(itemRawString) - return lexInsideAction -} - -// isSpace reports whether r is a space character. -func isSpace(r rune) bool { - return r == ' ' || r == '\t' -} - -// isEndOfLine reports whether r is an end-of-line character. -func isEndOfLine(r rune) bool { - return r == '\r' || r == '\n' -} - -// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore. -func isAlphaNumeric(r rune) bool { - return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r) -} diff --git a/vendor/github.com/alecthomas/template/parse/node.go b/vendor/github.com/alecthomas/template/parse/node.go deleted file mode 100644 index 55c37f6dbac..00000000000 --- a/vendor/github.com/alecthomas/template/parse/node.go +++ /dev/null @@ -1,834 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Parse nodes. - -package parse - -import ( - "bytes" - "fmt" - "strconv" - "strings" -) - -var textFormat = "%s" // Changed to "%q" in tests for better error messages. - -// A Node is an element in the parse tree. The interface is trivial. -// The interface contains an unexported method so that only -// types local to this package can satisfy it. -type Node interface { - Type() NodeType - String() string - // Copy does a deep copy of the Node and all its components. - // To avoid type assertions, some XxxNodes also have specialized - // CopyXxx methods that return *XxxNode. - Copy() Node - Position() Pos // byte position of start of node in full original input string - // tree returns the containing *Tree. - // It is unexported so all implementations of Node are in this package. - tree() *Tree -} - -// NodeType identifies the type of a parse tree node. -type NodeType int - -// Pos represents a byte position in the original input text from which -// this template was parsed. -type Pos int - -func (p Pos) Position() Pos { - return p -} - -// Type returns itself and provides an easy default implementation -// for embedding in a Node. Embedded in all non-trivial Nodes. -func (t NodeType) Type() NodeType { - return t -} - -const ( - NodeText NodeType = iota // Plain text. - NodeAction // A non-control action such as a field evaluation. - NodeBool // A boolean constant. - NodeChain // A sequence of field accesses. - NodeCommand // An element of a pipeline. - NodeDot // The cursor, dot. - nodeElse // An else action. Not added to tree. - nodeEnd // An end action. Not added to tree. - NodeField // A field or method name. - NodeIdentifier // An identifier; always a function name. - NodeIf // An if action. - NodeList // A list of Nodes. - NodeNil // An untyped nil constant. - NodeNumber // A numerical constant. - NodePipe // A pipeline of commands. - NodeRange // A range action. - NodeString // A string constant. - NodeTemplate // A template invocation action. - NodeVariable // A $ variable. - NodeWith // A with action. -) - -// Nodes. - -// ListNode holds a sequence of nodes. -type ListNode struct { - NodeType - Pos - tr *Tree - Nodes []Node // The element nodes in lexical order. -} - -func (t *Tree) newList(pos Pos) *ListNode { - return &ListNode{tr: t, NodeType: NodeList, Pos: pos} -} - -func (l *ListNode) append(n Node) { - l.Nodes = append(l.Nodes, n) -} - -func (l *ListNode) tree() *Tree { - return l.tr -} - -func (l *ListNode) String() string { - b := new(bytes.Buffer) - for _, n := range l.Nodes { - fmt.Fprint(b, n) - } - return b.String() -} - -func (l *ListNode) CopyList() *ListNode { - if l == nil { - return l - } - n := l.tr.newList(l.Pos) - for _, elem := range l.Nodes { - n.append(elem.Copy()) - } - return n -} - -func (l *ListNode) Copy() Node { - return l.CopyList() -} - -// TextNode holds plain text. -type TextNode struct { - NodeType - Pos - tr *Tree - Text []byte // The text; may span newlines. -} - -func (t *Tree) newText(pos Pos, text string) *TextNode { - return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)} -} - -func (t *TextNode) String() string { - return fmt.Sprintf(textFormat, t.Text) -} - -func (t *TextNode) tree() *Tree { - return t.tr -} - -func (t *TextNode) Copy() Node { - return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)} -} - -// PipeNode holds a pipeline with optional declaration -type PipeNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Decl []*VariableNode // Variable declarations in lexical order. - Cmds []*CommandNode // The commands in lexical order. -} - -func (t *Tree) newPipeline(pos Pos, line int, decl []*VariableNode) *PipeNode { - return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: decl} -} - -func (p *PipeNode) append(command *CommandNode) { - p.Cmds = append(p.Cmds, command) -} - -func (p *PipeNode) String() string { - s := "" - if len(p.Decl) > 0 { - for i, v := range p.Decl { - if i > 0 { - s += ", " - } - s += v.String() - } - s += " := " - } - for i, c := range p.Cmds { - if i > 0 { - s += " | " - } - s += c.String() - } - return s -} - -func (p *PipeNode) tree() *Tree { - return p.tr -} - -func (p *PipeNode) CopyPipe() *PipeNode { - if p == nil { - return p - } - var decl []*VariableNode - for _, d := range p.Decl { - decl = append(decl, d.Copy().(*VariableNode)) - } - n := p.tr.newPipeline(p.Pos, p.Line, decl) - for _, c := range p.Cmds { - n.append(c.Copy().(*CommandNode)) - } - return n -} - -func (p *PipeNode) Copy() Node { - return p.CopyPipe() -} - -// ActionNode holds an action (something bounded by delimiters). -// Control actions have their own nodes; ActionNode represents simple -// ones such as field evaluations and parenthesized pipelines. -type ActionNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Pipe *PipeNode // The pipeline in the action. -} - -func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode { - return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe} -} - -func (a *ActionNode) String() string { - return fmt.Sprintf("{{%s}}", a.Pipe) - -} - -func (a *ActionNode) tree() *Tree { - return a.tr -} - -func (a *ActionNode) Copy() Node { - return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe()) - -} - -// CommandNode holds a command (a pipeline inside an evaluating action). -type CommandNode struct { - NodeType - Pos - tr *Tree - Args []Node // Arguments in lexical order: Identifier, field, or constant. -} - -func (t *Tree) newCommand(pos Pos) *CommandNode { - return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos} -} - -func (c *CommandNode) append(arg Node) { - c.Args = append(c.Args, arg) -} - -func (c *CommandNode) String() string { - s := "" - for i, arg := range c.Args { - if i > 0 { - s += " " - } - if arg, ok := arg.(*PipeNode); ok { - s += "(" + arg.String() + ")" - continue - } - s += arg.String() - } - return s -} - -func (c *CommandNode) tree() *Tree { - return c.tr -} - -func (c *CommandNode) Copy() Node { - if c == nil { - return c - } - n := c.tr.newCommand(c.Pos) - for _, c := range c.Args { - n.append(c.Copy()) - } - return n -} - -// IdentifierNode holds an identifier. -type IdentifierNode struct { - NodeType - Pos - tr *Tree - Ident string // The identifier's name. -} - -// NewIdentifier returns a new IdentifierNode with the given identifier name. -func NewIdentifier(ident string) *IdentifierNode { - return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident} -} - -// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature. -// Chained for convenience. -// TODO: fix one day? -func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode { - i.Pos = pos - return i -} - -// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature. -// Chained for convenience. -// TODO: fix one day? -func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode { - i.tr = t - return i -} - -func (i *IdentifierNode) String() string { - return i.Ident -} - -func (i *IdentifierNode) tree() *Tree { - return i.tr -} - -func (i *IdentifierNode) Copy() Node { - return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos) -} - -// VariableNode holds a list of variable names, possibly with chained field -// accesses. The dollar sign is part of the (first) name. -type VariableNode struct { - NodeType - Pos - tr *Tree - Ident []string // Variable name and fields in lexical order. -} - -func (t *Tree) newVariable(pos Pos, ident string) *VariableNode { - return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")} -} - -func (v *VariableNode) String() string { - s := "" - for i, id := range v.Ident { - if i > 0 { - s += "." - } - s += id - } - return s -} - -func (v *VariableNode) tree() *Tree { - return v.tr -} - -func (v *VariableNode) Copy() Node { - return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)} -} - -// DotNode holds the special identifier '.'. -type DotNode struct { - NodeType - Pos - tr *Tree -} - -func (t *Tree) newDot(pos Pos) *DotNode { - return &DotNode{tr: t, NodeType: NodeDot, Pos: pos} -} - -func (d *DotNode) Type() NodeType { - // Override method on embedded NodeType for API compatibility. - // TODO: Not really a problem; could change API without effect but - // api tool complains. - return NodeDot -} - -func (d *DotNode) String() string { - return "." -} - -func (d *DotNode) tree() *Tree { - return d.tr -} - -func (d *DotNode) Copy() Node { - return d.tr.newDot(d.Pos) -} - -// NilNode holds the special identifier 'nil' representing an untyped nil constant. -type NilNode struct { - NodeType - Pos - tr *Tree -} - -func (t *Tree) newNil(pos Pos) *NilNode { - return &NilNode{tr: t, NodeType: NodeNil, Pos: pos} -} - -func (n *NilNode) Type() NodeType { - // Override method on embedded NodeType for API compatibility. - // TODO: Not really a problem; could change API without effect but - // api tool complains. - return NodeNil -} - -func (n *NilNode) String() string { - return "nil" -} - -func (n *NilNode) tree() *Tree { - return n.tr -} - -func (n *NilNode) Copy() Node { - return n.tr.newNil(n.Pos) -} - -// FieldNode holds a field (identifier starting with '.'). -// The names may be chained ('.x.y'). -// The period is dropped from each ident. -type FieldNode struct { - NodeType - Pos - tr *Tree - Ident []string // The identifiers in lexical order. -} - -func (t *Tree) newField(pos Pos, ident string) *FieldNode { - return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period -} - -func (f *FieldNode) String() string { - s := "" - for _, id := range f.Ident { - s += "." + id - } - return s -} - -func (f *FieldNode) tree() *Tree { - return f.tr -} - -func (f *FieldNode) Copy() Node { - return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)} -} - -// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.'). -// The names may be chained ('.x.y'). -// The periods are dropped from each ident. -type ChainNode struct { - NodeType - Pos - tr *Tree - Node Node - Field []string // The identifiers in lexical order. -} - -func (t *Tree) newChain(pos Pos, node Node) *ChainNode { - return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node} -} - -// Add adds the named field (which should start with a period) to the end of the chain. -func (c *ChainNode) Add(field string) { - if len(field) == 0 || field[0] != '.' { - panic("no dot in field") - } - field = field[1:] // Remove leading dot. - if field == "" { - panic("empty field") - } - c.Field = append(c.Field, field) -} - -func (c *ChainNode) String() string { - s := c.Node.String() - if _, ok := c.Node.(*PipeNode); ok { - s = "(" + s + ")" - } - for _, field := range c.Field { - s += "." + field - } - return s -} - -func (c *ChainNode) tree() *Tree { - return c.tr -} - -func (c *ChainNode) Copy() Node { - return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)} -} - -// BoolNode holds a boolean constant. -type BoolNode struct { - NodeType - Pos - tr *Tree - True bool // The value of the boolean constant. -} - -func (t *Tree) newBool(pos Pos, true bool) *BoolNode { - return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true} -} - -func (b *BoolNode) String() string { - if b.True { - return "true" - } - return "false" -} - -func (b *BoolNode) tree() *Tree { - return b.tr -} - -func (b *BoolNode) Copy() Node { - return b.tr.newBool(b.Pos, b.True) -} - -// NumberNode holds a number: signed or unsigned integer, float, or complex. -// The value is parsed and stored under all the types that can represent the value. -// This simulates in a small amount of code the behavior of Go's ideal constants. -type NumberNode struct { - NodeType - Pos - tr *Tree - IsInt bool // Number has an integral value. - IsUint bool // Number has an unsigned integral value. - IsFloat bool // Number has a floating-point value. - IsComplex bool // Number is complex. - Int64 int64 // The signed integer value. - Uint64 uint64 // The unsigned integer value. - Float64 float64 // The floating-point value. - Complex128 complex128 // The complex value. - Text string // The original textual representation from the input. -} - -func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) { - n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text} - switch typ { - case itemCharConstant: - rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0]) - if err != nil { - return nil, err - } - if tail != "'" { - return nil, fmt.Errorf("malformed character constant: %s", text) - } - n.Int64 = int64(rune) - n.IsInt = true - n.Uint64 = uint64(rune) - n.IsUint = true - n.Float64 = float64(rune) // odd but those are the rules. - n.IsFloat = true - return n, nil - case itemComplex: - // fmt.Sscan can parse the pair, so let it do the work. - if _, err := fmt.Sscan(text, &n.Complex128); err != nil { - return nil, err - } - n.IsComplex = true - n.simplifyComplex() - return n, nil - } - // Imaginary constants can only be complex unless they are zero. - if len(text) > 0 && text[len(text)-1] == 'i' { - f, err := strconv.ParseFloat(text[:len(text)-1], 64) - if err == nil { - n.IsComplex = true - n.Complex128 = complex(0, f) - n.simplifyComplex() - return n, nil - } - } - // Do integer test first so we get 0x123 etc. - u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below. - if err == nil { - n.IsUint = true - n.Uint64 = u - } - i, err := strconv.ParseInt(text, 0, 64) - if err == nil { - n.IsInt = true - n.Int64 = i - if i == 0 { - n.IsUint = true // in case of -0. - n.Uint64 = u - } - } - // If an integer extraction succeeded, promote the float. - if n.IsInt { - n.IsFloat = true - n.Float64 = float64(n.Int64) - } else if n.IsUint { - n.IsFloat = true - n.Float64 = float64(n.Uint64) - } else { - f, err := strconv.ParseFloat(text, 64) - if err == nil { - n.IsFloat = true - n.Float64 = f - // If a floating-point extraction succeeded, extract the int if needed. - if !n.IsInt && float64(int64(f)) == f { - n.IsInt = true - n.Int64 = int64(f) - } - if !n.IsUint && float64(uint64(f)) == f { - n.IsUint = true - n.Uint64 = uint64(f) - } - } - } - if !n.IsInt && !n.IsUint && !n.IsFloat { - return nil, fmt.Errorf("illegal number syntax: %q", text) - } - return n, nil -} - -// simplifyComplex pulls out any other types that are represented by the complex number. -// These all require that the imaginary part be zero. -func (n *NumberNode) simplifyComplex() { - n.IsFloat = imag(n.Complex128) == 0 - if n.IsFloat { - n.Float64 = real(n.Complex128) - n.IsInt = float64(int64(n.Float64)) == n.Float64 - if n.IsInt { - n.Int64 = int64(n.Float64) - } - n.IsUint = float64(uint64(n.Float64)) == n.Float64 - if n.IsUint { - n.Uint64 = uint64(n.Float64) - } - } -} - -func (n *NumberNode) String() string { - return n.Text -} - -func (n *NumberNode) tree() *Tree { - return n.tr -} - -func (n *NumberNode) Copy() Node { - nn := new(NumberNode) - *nn = *n // Easy, fast, correct. - return nn -} - -// StringNode holds a string constant. The value has been "unquoted". -type StringNode struct { - NodeType - Pos - tr *Tree - Quoted string // The original text of the string, with quotes. - Text string // The string, after quote processing. -} - -func (t *Tree) newString(pos Pos, orig, text string) *StringNode { - return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text} -} - -func (s *StringNode) String() string { - return s.Quoted -} - -func (s *StringNode) tree() *Tree { - return s.tr -} - -func (s *StringNode) Copy() Node { - return s.tr.newString(s.Pos, s.Quoted, s.Text) -} - -// endNode represents an {{end}} action. -// It does not appear in the final parse tree. -type endNode struct { - NodeType - Pos - tr *Tree -} - -func (t *Tree) newEnd(pos Pos) *endNode { - return &endNode{tr: t, NodeType: nodeEnd, Pos: pos} -} - -func (e *endNode) String() string { - return "{{end}}" -} - -func (e *endNode) tree() *Tree { - return e.tr -} - -func (e *endNode) Copy() Node { - return e.tr.newEnd(e.Pos) -} - -// elseNode represents an {{else}} action. Does not appear in the final tree. -type elseNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) -} - -func (t *Tree) newElse(pos Pos, line int) *elseNode { - return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line} -} - -func (e *elseNode) Type() NodeType { - return nodeElse -} - -func (e *elseNode) String() string { - return "{{else}}" -} - -func (e *elseNode) tree() *Tree { - return e.tr -} - -func (e *elseNode) Copy() Node { - return e.tr.newElse(e.Pos, e.Line) -} - -// BranchNode is the common representation of if, range, and with. -type BranchNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Pipe *PipeNode // The pipeline to be evaluated. - List *ListNode // What to execute if the value is non-empty. - ElseList *ListNode // What to execute if the value is empty (nil if absent). -} - -func (b *BranchNode) String() string { - name := "" - switch b.NodeType { - case NodeIf: - name = "if" - case NodeRange: - name = "range" - case NodeWith: - name = "with" - default: - panic("unknown branch type") - } - if b.ElseList != nil { - return fmt.Sprintf("{{%s %s}}%s{{else}}%s{{end}}", name, b.Pipe, b.List, b.ElseList) - } - return fmt.Sprintf("{{%s %s}}%s{{end}}", name, b.Pipe, b.List) -} - -func (b *BranchNode) tree() *Tree { - return b.tr -} - -func (b *BranchNode) Copy() Node { - switch b.NodeType { - case NodeIf: - return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) - case NodeRange: - return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) - case NodeWith: - return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) - default: - panic("unknown branch type") - } -} - -// IfNode represents an {{if}} action and its commands. -type IfNode struct { - BranchNode -} - -func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode { - return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} -} - -func (i *IfNode) Copy() Node { - return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList()) -} - -// RangeNode represents a {{range}} action and its commands. -type RangeNode struct { - BranchNode -} - -func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode { - return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} -} - -func (r *RangeNode) Copy() Node { - return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList()) -} - -// WithNode represents a {{with}} action and its commands. -type WithNode struct { - BranchNode -} - -func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode { - return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} -} - -func (w *WithNode) Copy() Node { - return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList()) -} - -// TemplateNode represents a {{template}} action. -type TemplateNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Name string // The name of the template (unquoted). - Pipe *PipeNode // The command to evaluate as dot for the template. -} - -func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode { - return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe} -} - -func (t *TemplateNode) String() string { - if t.Pipe == nil { - return fmt.Sprintf("{{template %q}}", t.Name) - } - return fmt.Sprintf("{{template %q %s}}", t.Name, t.Pipe) -} - -func (t *TemplateNode) tree() *Tree { - return t.tr -} - -func (t *TemplateNode) Copy() Node { - return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe()) -} diff --git a/vendor/github.com/alecthomas/template/parse/parse.go b/vendor/github.com/alecthomas/template/parse/parse.go deleted file mode 100644 index 0d77ade8718..00000000000 --- a/vendor/github.com/alecthomas/template/parse/parse.go +++ /dev/null @@ -1,700 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package parse builds parse trees for templates as defined by text/template -// and html/template. Clients should use those packages to construct templates -// rather than this one, which provides shared internal data structures not -// intended for general use. -package parse - -import ( - "bytes" - "fmt" - "runtime" - "strconv" - "strings" -) - -// Tree is the representation of a single parsed template. -type Tree struct { - Name string // name of the template represented by the tree. - ParseName string // name of the top-level template during parsing, for error messages. - Root *ListNode // top-level root of the tree. - text string // text parsed to create the template (or its parent) - // Parsing only; cleared after parse. - funcs []map[string]interface{} - lex *lexer - token [3]item // three-token lookahead for parser. - peekCount int - vars []string // variables defined at the moment. -} - -// Copy returns a copy of the Tree. Any parsing state is discarded. -func (t *Tree) Copy() *Tree { - if t == nil { - return nil - } - return &Tree{ - Name: t.Name, - ParseName: t.ParseName, - Root: t.Root.CopyList(), - text: t.text, - } -} - -// Parse returns a map from template name to parse.Tree, created by parsing the -// templates described in the argument string. The top-level template will be -// given the specified name. If an error is encountered, parsing stops and an -// empty map is returned with the error. -func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]interface{}) (treeSet map[string]*Tree, err error) { - treeSet = make(map[string]*Tree) - t := New(name) - t.text = text - _, err = t.Parse(text, leftDelim, rightDelim, treeSet, funcs...) - return -} - -// next returns the next token. -func (t *Tree) next() item { - if t.peekCount > 0 { - t.peekCount-- - } else { - t.token[0] = t.lex.nextItem() - } - return t.token[t.peekCount] -} - -// backup backs the input stream up one token. -func (t *Tree) backup() { - t.peekCount++ -} - -// backup2 backs the input stream up two tokens. -// The zeroth token is already there. -func (t *Tree) backup2(t1 item) { - t.token[1] = t1 - t.peekCount = 2 -} - -// backup3 backs the input stream up three tokens -// The zeroth token is already there. -func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back. - t.token[1] = t1 - t.token[2] = t2 - t.peekCount = 3 -} - -// peek returns but does not consume the next token. -func (t *Tree) peek() item { - if t.peekCount > 0 { - return t.token[t.peekCount-1] - } - t.peekCount = 1 - t.token[0] = t.lex.nextItem() - return t.token[0] -} - -// nextNonSpace returns the next non-space token. -func (t *Tree) nextNonSpace() (token item) { - for { - token = t.next() - if token.typ != itemSpace { - break - } - } - return token -} - -// peekNonSpace returns but does not consume the next non-space token. -func (t *Tree) peekNonSpace() (token item) { - for { - token = t.next() - if token.typ != itemSpace { - break - } - } - t.backup() - return token -} - -// Parsing. - -// New allocates a new parse tree with the given name. -func New(name string, funcs ...map[string]interface{}) *Tree { - return &Tree{ - Name: name, - funcs: funcs, - } -} - -// ErrorContext returns a textual representation of the location of the node in the input text. -// The receiver is only used when the node does not have a pointer to the tree inside, -// which can occur in old code. -func (t *Tree) ErrorContext(n Node) (location, context string) { - pos := int(n.Position()) - tree := n.tree() - if tree == nil { - tree = t - } - text := tree.text[:pos] - byteNum := strings.LastIndex(text, "\n") - if byteNum == -1 { - byteNum = pos // On first line. - } else { - byteNum++ // After the newline. - byteNum = pos - byteNum - } - lineNum := 1 + strings.Count(text, "\n") - context = n.String() - if len(context) > 20 { - context = fmt.Sprintf("%.20s...", context) - } - return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context -} - -// errorf formats the error and terminates processing. -func (t *Tree) errorf(format string, args ...interface{}) { - t.Root = nil - format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.lex.lineNumber(), format) - panic(fmt.Errorf(format, args...)) -} - -// error terminates processing. -func (t *Tree) error(err error) { - t.errorf("%s", err) -} - -// expect consumes the next token and guarantees it has the required type. -func (t *Tree) expect(expected itemType, context string) item { - token := t.nextNonSpace() - if token.typ != expected { - t.unexpected(token, context) - } - return token -} - -// expectOneOf consumes the next token and guarantees it has one of the required types. -func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item { - token := t.nextNonSpace() - if token.typ != expected1 && token.typ != expected2 { - t.unexpected(token, context) - } - return token -} - -// unexpected complains about the token and terminates processing. -func (t *Tree) unexpected(token item, context string) { - t.errorf("unexpected %s in %s", token, context) -} - -// recover is the handler that turns panics into returns from the top level of Parse. -func (t *Tree) recover(errp *error) { - e := recover() - if e != nil { - if _, ok := e.(runtime.Error); ok { - panic(e) - } - if t != nil { - t.stopParse() - } - *errp = e.(error) - } - return -} - -// startParse initializes the parser, using the lexer. -func (t *Tree) startParse(funcs []map[string]interface{}, lex *lexer) { - t.Root = nil - t.lex = lex - t.vars = []string{"$"} - t.funcs = funcs -} - -// stopParse terminates parsing. -func (t *Tree) stopParse() { - t.lex = nil - t.vars = nil - t.funcs = nil -} - -// Parse parses the template definition string to construct a representation of -// the template for execution. If either action delimiter string is empty, the -// default ("{{" or "}}") is used. Embedded template definitions are added to -// the treeSet map. -func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]interface{}) (tree *Tree, err error) { - defer t.recover(&err) - t.ParseName = t.Name - t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim)) - t.text = text - t.parse(treeSet) - t.add(treeSet) - t.stopParse() - return t, nil -} - -// add adds tree to the treeSet. -func (t *Tree) add(treeSet map[string]*Tree) { - tree := treeSet[t.Name] - if tree == nil || IsEmptyTree(tree.Root) { - treeSet[t.Name] = t - return - } - if !IsEmptyTree(t.Root) { - t.errorf("template: multiple definition of template %q", t.Name) - } -} - -// IsEmptyTree reports whether this tree (node) is empty of everything but space. -func IsEmptyTree(n Node) bool { - switch n := n.(type) { - case nil: - return true - case *ActionNode: - case *IfNode: - case *ListNode: - for _, node := range n.Nodes { - if !IsEmptyTree(node) { - return false - } - } - return true - case *RangeNode: - case *TemplateNode: - case *TextNode: - return len(bytes.TrimSpace(n.Text)) == 0 - case *WithNode: - default: - panic("unknown node: " + n.String()) - } - return false -} - -// parse is the top-level parser for a template, essentially the same -// as itemList except it also parses {{define}} actions. -// It runs to EOF. -func (t *Tree) parse(treeSet map[string]*Tree) (next Node) { - t.Root = t.newList(t.peek().pos) - for t.peek().typ != itemEOF { - if t.peek().typ == itemLeftDelim { - delim := t.next() - if t.nextNonSpace().typ == itemDefine { - newT := New("definition") // name will be updated once we know it. - newT.text = t.text - newT.ParseName = t.ParseName - newT.startParse(t.funcs, t.lex) - newT.parseDefinition(treeSet) - continue - } - t.backup2(delim) - } - n := t.textOrAction() - if n.Type() == nodeEnd { - t.errorf("unexpected %s", n) - } - t.Root.append(n) - } - return nil -} - -// parseDefinition parses a {{define}} ... {{end}} template definition and -// installs the definition in the treeSet map. The "define" keyword has already -// been scanned. -func (t *Tree) parseDefinition(treeSet map[string]*Tree) { - const context = "define clause" - name := t.expectOneOf(itemString, itemRawString, context) - var err error - t.Name, err = strconv.Unquote(name.val) - if err != nil { - t.error(err) - } - t.expect(itemRightDelim, context) - var end Node - t.Root, end = t.itemList() - if end.Type() != nodeEnd { - t.errorf("unexpected %s in %s", end, context) - } - t.add(treeSet) - t.stopParse() -} - -// itemList: -// textOrAction* -// Terminates at {{end}} or {{else}}, returned separately. -func (t *Tree) itemList() (list *ListNode, next Node) { - list = t.newList(t.peekNonSpace().pos) - for t.peekNonSpace().typ != itemEOF { - n := t.textOrAction() - switch n.Type() { - case nodeEnd, nodeElse: - return list, n - } - list.append(n) - } - t.errorf("unexpected EOF") - return -} - -// textOrAction: -// text | action -func (t *Tree) textOrAction() Node { - switch token := t.nextNonSpace(); token.typ { - case itemElideNewline: - return t.elideNewline() - case itemText: - return t.newText(token.pos, token.val) - case itemLeftDelim: - return t.action() - default: - t.unexpected(token, "input") - } - return nil -} - -// elideNewline: -// Remove newlines trailing rightDelim if \\ is present. -func (t *Tree) elideNewline() Node { - token := t.peek() - if token.typ != itemText { - t.unexpected(token, "input") - return nil - } - - t.next() - stripped := strings.TrimLeft(token.val, "\n\r") - diff := len(token.val) - len(stripped) - if diff > 0 { - // This is a bit nasty. We mutate the token in-place to remove - // preceding newlines. - token.pos += Pos(diff) - token.val = stripped - } - return t.newText(token.pos, token.val) -} - -// Action: -// control -// command ("|" command)* -// Left delim is past. Now get actions. -// First word could be a keyword such as range. -func (t *Tree) action() (n Node) { - switch token := t.nextNonSpace(); token.typ { - case itemElse: - return t.elseControl() - case itemEnd: - return t.endControl() - case itemIf: - return t.ifControl() - case itemRange: - return t.rangeControl() - case itemTemplate: - return t.templateControl() - case itemWith: - return t.withControl() - } - t.backup() - // Do not pop variables; they persist until "end". - return t.newAction(t.peek().pos, t.lex.lineNumber(), t.pipeline("command")) -} - -// Pipeline: -// declarations? command ('|' command)* -func (t *Tree) pipeline(context string) (pipe *PipeNode) { - var decl []*VariableNode - pos := t.peekNonSpace().pos - // Are there declarations? - for { - if v := t.peekNonSpace(); v.typ == itemVariable { - t.next() - // Since space is a token, we need 3-token look-ahead here in the worst case: - // in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an - // argument variable rather than a declaration. So remember the token - // adjacent to the variable so we can push it back if necessary. - tokenAfterVariable := t.peek() - if next := t.peekNonSpace(); next.typ == itemColonEquals || (next.typ == itemChar && next.val == ",") { - t.nextNonSpace() - variable := t.newVariable(v.pos, v.val) - decl = append(decl, variable) - t.vars = append(t.vars, v.val) - if next.typ == itemChar && next.val == "," { - if context == "range" && len(decl) < 2 { - continue - } - t.errorf("too many declarations in %s", context) - } - } else if tokenAfterVariable.typ == itemSpace { - t.backup3(v, tokenAfterVariable) - } else { - t.backup2(v) - } - } - break - } - pipe = t.newPipeline(pos, t.lex.lineNumber(), decl) - for { - switch token := t.nextNonSpace(); token.typ { - case itemRightDelim, itemRightParen: - if len(pipe.Cmds) == 0 { - t.errorf("missing value for %s", context) - } - if token.typ == itemRightParen { - t.backup() - } - return - case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier, - itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen: - t.backup() - pipe.append(t.command()) - default: - t.unexpected(token, context) - } - } -} - -func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) { - defer t.popVars(len(t.vars)) - line = t.lex.lineNumber() - pipe = t.pipeline(context) - var next Node - list, next = t.itemList() - switch next.Type() { - case nodeEnd: //done - case nodeElse: - if allowElseIf { - // Special case for "else if". If the "else" is followed immediately by an "if", - // the elseControl will have left the "if" token pending. Treat - // {{if a}}_{{else if b}}_{{end}} - // as - // {{if a}}_{{else}}{{if b}}_{{end}}{{end}}. - // To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}} - // is assumed. This technique works even for long if-else-if chains. - // TODO: Should we allow else-if in with and range? - if t.peek().typ == itemIf { - t.next() // Consume the "if" token. - elseList = t.newList(next.Position()) - elseList.append(t.ifControl()) - // Do not consume the next item - only one {{end}} required. - break - } - } - elseList, next = t.itemList() - if next.Type() != nodeEnd { - t.errorf("expected end; found %s", next) - } - } - return pipe.Position(), line, pipe, list, elseList -} - -// If: -// {{if pipeline}} itemList {{end}} -// {{if pipeline}} itemList {{else}} itemList {{end}} -// If keyword is past. -func (t *Tree) ifControl() Node { - return t.newIf(t.parseControl(true, "if")) -} - -// Range: -// {{range pipeline}} itemList {{end}} -// {{range pipeline}} itemList {{else}} itemList {{end}} -// Range keyword is past. -func (t *Tree) rangeControl() Node { - return t.newRange(t.parseControl(false, "range")) -} - -// With: -// {{with pipeline}} itemList {{end}} -// {{with pipeline}} itemList {{else}} itemList {{end}} -// If keyword is past. -func (t *Tree) withControl() Node { - return t.newWith(t.parseControl(false, "with")) -} - -// End: -// {{end}} -// End keyword is past. -func (t *Tree) endControl() Node { - return t.newEnd(t.expect(itemRightDelim, "end").pos) -} - -// Else: -// {{else}} -// Else keyword is past. -func (t *Tree) elseControl() Node { - // Special case for "else if". - peek := t.peekNonSpace() - if peek.typ == itemIf { - // We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ". - return t.newElse(peek.pos, t.lex.lineNumber()) - } - return t.newElse(t.expect(itemRightDelim, "else").pos, t.lex.lineNumber()) -} - -// Template: -// {{template stringValue pipeline}} -// Template keyword is past. The name must be something that can evaluate -// to a string. -func (t *Tree) templateControl() Node { - var name string - token := t.nextNonSpace() - switch token.typ { - case itemString, itemRawString: - s, err := strconv.Unquote(token.val) - if err != nil { - t.error(err) - } - name = s - default: - t.unexpected(token, "template invocation") - } - var pipe *PipeNode - if t.nextNonSpace().typ != itemRightDelim { - t.backup() - // Do not pop variables; they persist until "end". - pipe = t.pipeline("template") - } - return t.newTemplate(token.pos, t.lex.lineNumber(), name, pipe) -} - -// command: -// operand (space operand)* -// space-separated arguments up to a pipeline character or right delimiter. -// we consume the pipe character but leave the right delim to terminate the action. -func (t *Tree) command() *CommandNode { - cmd := t.newCommand(t.peekNonSpace().pos) - for { - t.peekNonSpace() // skip leading spaces. - operand := t.operand() - if operand != nil { - cmd.append(operand) - } - switch token := t.next(); token.typ { - case itemSpace: - continue - case itemError: - t.errorf("%s", token.val) - case itemRightDelim, itemRightParen: - t.backup() - case itemPipe: - default: - t.errorf("unexpected %s in operand; missing space?", token) - } - break - } - if len(cmd.Args) == 0 { - t.errorf("empty command") - } - return cmd -} - -// operand: -// term .Field* -// An operand is a space-separated component of a command, -// a term possibly followed by field accesses. -// A nil return means the next item is not an operand. -func (t *Tree) operand() Node { - node := t.term() - if node == nil { - return nil - } - if t.peek().typ == itemField { - chain := t.newChain(t.peek().pos, node) - for t.peek().typ == itemField { - chain.Add(t.next().val) - } - // Compatibility with original API: If the term is of type NodeField - // or NodeVariable, just put more fields on the original. - // Otherwise, keep the Chain node. - // TODO: Switch to Chains always when we can. - switch node.Type() { - case NodeField: - node = t.newField(chain.Position(), chain.String()) - case NodeVariable: - node = t.newVariable(chain.Position(), chain.String()) - default: - node = chain - } - } - return node -} - -// term: -// literal (number, string, nil, boolean) -// function (identifier) -// . -// .Field -// $ -// '(' pipeline ')' -// A term is a simple "expression". -// A nil return means the next item is not a term. -func (t *Tree) term() Node { - switch token := t.nextNonSpace(); token.typ { - case itemError: - t.errorf("%s", token.val) - case itemIdentifier: - if !t.hasFunction(token.val) { - t.errorf("function %q not defined", token.val) - } - return NewIdentifier(token.val).SetTree(t).SetPos(token.pos) - case itemDot: - return t.newDot(token.pos) - case itemNil: - return t.newNil(token.pos) - case itemVariable: - return t.useVar(token.pos, token.val) - case itemField: - return t.newField(token.pos, token.val) - case itemBool: - return t.newBool(token.pos, token.val == "true") - case itemCharConstant, itemComplex, itemNumber: - number, err := t.newNumber(token.pos, token.val, token.typ) - if err != nil { - t.error(err) - } - return number - case itemLeftParen: - pipe := t.pipeline("parenthesized pipeline") - if token := t.next(); token.typ != itemRightParen { - t.errorf("unclosed right paren: unexpected %s", token) - } - return pipe - case itemString, itemRawString: - s, err := strconv.Unquote(token.val) - if err != nil { - t.error(err) - } - return t.newString(token.pos, token.val, s) - } - t.backup() - return nil -} - -// hasFunction reports if a function name exists in the Tree's maps. -func (t *Tree) hasFunction(name string) bool { - for _, funcMap := range t.funcs { - if funcMap == nil { - continue - } - if funcMap[name] != nil { - return true - } - } - return false -} - -// popVars trims the variable list to the specified length -func (t *Tree) popVars(n int) { - t.vars = t.vars[:n] -} - -// useVar returns a node for a variable reference. It errors if the -// variable is not defined. -func (t *Tree) useVar(pos Pos, name string) Node { - v := t.newVariable(pos, name) - for _, varName := range t.vars { - if varName == v.Ident[0] { - return v - } - } - t.errorf("undefined variable %q", v.Ident[0]) - return nil -} diff --git a/vendor/github.com/alecthomas/template/template.go b/vendor/github.com/alecthomas/template/template.go deleted file mode 100644 index 447ed2abaea..00000000000 --- a/vendor/github.com/alecthomas/template/template.go +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "fmt" - "reflect" - - "github.com/alecthomas/template/parse" -) - -// common holds the information shared by related templates. -type common struct { - tmpl map[string]*Template - // We use two maps, one for parsing and one for execution. - // This separation makes the API cleaner since it doesn't - // expose reflection to the client. - parseFuncs FuncMap - execFuncs map[string]reflect.Value -} - -// Template is the representation of a parsed template. The *parse.Tree -// field is exported only for use by html/template and should be treated -// as unexported by all other clients. -type Template struct { - name string - *parse.Tree - *common - leftDelim string - rightDelim string -} - -// New allocates a new template with the given name. -func New(name string) *Template { - return &Template{ - name: name, - } -} - -// Name returns the name of the template. -func (t *Template) Name() string { - return t.name -} - -// New allocates a new template associated with the given one and with the same -// delimiters. The association, which is transitive, allows one template to -// invoke another with a {{template}} action. -func (t *Template) New(name string) *Template { - t.init() - return &Template{ - name: name, - common: t.common, - leftDelim: t.leftDelim, - rightDelim: t.rightDelim, - } -} - -func (t *Template) init() { - if t.common == nil { - t.common = new(common) - t.tmpl = make(map[string]*Template) - t.parseFuncs = make(FuncMap) - t.execFuncs = make(map[string]reflect.Value) - } -} - -// Clone returns a duplicate of the template, including all associated -// templates. The actual representation is not copied, but the name space of -// associated templates is, so further calls to Parse in the copy will add -// templates to the copy but not to the original. Clone can be used to prepare -// common templates and use them with variant definitions for other templates -// by adding the variants after the clone is made. -func (t *Template) Clone() (*Template, error) { - nt := t.copy(nil) - nt.init() - nt.tmpl[t.name] = nt - for k, v := range t.tmpl { - if k == t.name { // Already installed. - continue - } - // The associated templates share nt's common structure. - tmpl := v.copy(nt.common) - nt.tmpl[k] = tmpl - } - for k, v := range t.parseFuncs { - nt.parseFuncs[k] = v - } - for k, v := range t.execFuncs { - nt.execFuncs[k] = v - } - return nt, nil -} - -// copy returns a shallow copy of t, with common set to the argument. -func (t *Template) copy(c *common) *Template { - nt := New(t.name) - nt.Tree = t.Tree - nt.common = c - nt.leftDelim = t.leftDelim - nt.rightDelim = t.rightDelim - return nt -} - -// AddParseTree creates a new template with the name and parse tree -// and associates it with t. -func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) { - if t.common != nil && t.tmpl[name] != nil { - return nil, fmt.Errorf("template: redefinition of template %q", name) - } - nt := t.New(name) - nt.Tree = tree - t.tmpl[name] = nt - return nt, nil -} - -// Templates returns a slice of the templates associated with t, including t -// itself. -func (t *Template) Templates() []*Template { - if t.common == nil { - return nil - } - // Return a slice so we don't expose the map. - m := make([]*Template, 0, len(t.tmpl)) - for _, v := range t.tmpl { - m = append(m, v) - } - return m -} - -// Delims sets the action delimiters to the specified strings, to be used in -// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template -// definitions will inherit the settings. An empty delimiter stands for the -// corresponding default: {{ or }}. -// The return value is the template, so calls can be chained. -func (t *Template) Delims(left, right string) *Template { - t.leftDelim = left - t.rightDelim = right - return t -} - -// Funcs adds the elements of the argument map to the template's function map. -// It panics if a value in the map is not a function with appropriate return -// type. However, it is legal to overwrite elements of the map. The return -// value is the template, so calls can be chained. -func (t *Template) Funcs(funcMap FuncMap) *Template { - t.init() - addValueFuncs(t.execFuncs, funcMap) - addFuncs(t.parseFuncs, funcMap) - return t -} - -// Lookup returns the template with the given name that is associated with t, -// or nil if there is no such template. -func (t *Template) Lookup(name string) *Template { - if t.common == nil { - return nil - } - return t.tmpl[name] -} - -// Parse parses a string into a template. Nested template definitions will be -// associated with the top-level template t. Parse may be called multiple times -// to parse definitions of templates to associate with t. It is an error if a -// resulting template is non-empty (contains content other than template -// definitions) and would replace a non-empty template with the same name. -// (In multiple calls to Parse with the same receiver template, only one call -// can contain text other than space, comments, and template definitions.) -func (t *Template) Parse(text string) (*Template, error) { - t.init() - trees, err := parse.Parse(t.name, text, t.leftDelim, t.rightDelim, t.parseFuncs, builtins) - if err != nil { - return nil, err - } - // Add the newly parsed trees, including the one for t, into our common structure. - for name, tree := range trees { - // If the name we parsed is the name of this template, overwrite this template. - // The associate method checks it's not a redefinition. - tmpl := t - if name != t.name { - tmpl = t.New(name) - } - // Even if t == tmpl, we need to install it in the common.tmpl map. - if replace, err := t.associate(tmpl, tree); err != nil { - return nil, err - } else if replace { - tmpl.Tree = tree - } - tmpl.leftDelim = t.leftDelim - tmpl.rightDelim = t.rightDelim - } - return t, nil -} - -// associate installs the new template into the group of templates associated -// with t. It is an error to reuse a name except to overwrite an empty -// template. The two are already known to share the common structure. -// The boolean return value reports wither to store this tree as t.Tree. -func (t *Template) associate(new *Template, tree *parse.Tree) (bool, error) { - if new.common != t.common { - panic("internal error: associate not common") - } - name := new.name - if old := t.tmpl[name]; old != nil { - oldIsEmpty := parse.IsEmptyTree(old.Root) - newIsEmpty := parse.IsEmptyTree(tree.Root) - if newIsEmpty { - // Whether old is empty or not, new is empty; no reason to replace old. - return false, nil - } - if !oldIsEmpty { - return false, fmt.Errorf("template: redefinition of template %q", name) - } - } - t.tmpl[name] = new - return true, nil -} diff --git a/vendor/github.com/alecthomas/units/COPYING b/vendor/github.com/alecthomas/units/COPYING deleted file mode 100644 index 2993ec085d3..00000000000 --- a/vendor/github.com/alecthomas/units/COPYING +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2014 Alec Thomas - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/alecthomas/units/README.md b/vendor/github.com/alecthomas/units/README.md deleted file mode 100644 index bee884e3c1c..00000000000 --- a/vendor/github.com/alecthomas/units/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Units - Helpful unit multipliers and functions for Go - -The goal of this package is to have functionality similar to the [time](http://golang.org/pkg/time/) package. - -It allows for code like this: - -```go -n, err := ParseBase2Bytes("1KB") -// n == 1024 -n = units.Mebibyte * 512 -``` diff --git a/vendor/github.com/alecthomas/units/bytes.go b/vendor/github.com/alecthomas/units/bytes.go deleted file mode 100644 index 61d0ca479ab..00000000000 --- a/vendor/github.com/alecthomas/units/bytes.go +++ /dev/null @@ -1,85 +0,0 @@ -package units - -// Base2Bytes is the old non-SI power-of-2 byte scale (1024 bytes in a kilobyte, -// etc.). -type Base2Bytes int64 - -// Base-2 byte units. -const ( - Kibibyte Base2Bytes = 1024 - KiB = Kibibyte - Mebibyte = Kibibyte * 1024 - MiB = Mebibyte - Gibibyte = Mebibyte * 1024 - GiB = Gibibyte - Tebibyte = Gibibyte * 1024 - TiB = Tebibyte - Pebibyte = Tebibyte * 1024 - PiB = Pebibyte - Exbibyte = Pebibyte * 1024 - EiB = Exbibyte -) - -var ( - bytesUnitMap = MakeUnitMap("iB", "B", 1024) - oldBytesUnitMap = MakeUnitMap("B", "B", 1024) -) - -// ParseBase2Bytes supports both iB and B in base-2 multipliers. That is, KB -// and KiB are both 1024. -// However "kB", which is the correct SI spelling of 1000 Bytes, is rejected. -func ParseBase2Bytes(s string) (Base2Bytes, error) { - n, err := ParseUnit(s, bytesUnitMap) - if err != nil { - n, err = ParseUnit(s, oldBytesUnitMap) - } - return Base2Bytes(n), err -} - -func (b Base2Bytes) String() string { - return ToString(int64(b), 1024, "iB", "B") -} - -var ( - metricBytesUnitMap = MakeUnitMap("B", "B", 1000) -) - -// MetricBytes are SI byte units (1000 bytes in a kilobyte). -type MetricBytes SI - -// SI base-10 byte units. -const ( - Kilobyte MetricBytes = 1000 - KB = Kilobyte - Megabyte = Kilobyte * 1000 - MB = Megabyte - Gigabyte = Megabyte * 1000 - GB = Gigabyte - Terabyte = Gigabyte * 1000 - TB = Terabyte - Petabyte = Terabyte * 1000 - PB = Petabyte - Exabyte = Petabyte * 1000 - EB = Exabyte -) - -// ParseMetricBytes parses base-10 metric byte units. That is, KB is 1000 bytes. -func ParseMetricBytes(s string) (MetricBytes, error) { - n, err := ParseUnit(s, metricBytesUnitMap) - return MetricBytes(n), err -} - -// TODO: represents 1000B as uppercase "KB", while SI standard requires "kB". -func (m MetricBytes) String() string { - return ToString(int64(m), 1000, "B", "B") -} - -// ParseStrictBytes supports both iB and B suffixes for base 2 and metric, -// respectively. That is, KiB represents 1024 and kB, KB represent 1000. -func ParseStrictBytes(s string) (int64, error) { - n, err := ParseUnit(s, bytesUnitMap) - if err != nil { - n, err = ParseUnit(s, metricBytesUnitMap) - } - return int64(n), err -} diff --git a/vendor/github.com/alecthomas/units/doc.go b/vendor/github.com/alecthomas/units/doc.go deleted file mode 100644 index 156ae386723..00000000000 --- a/vendor/github.com/alecthomas/units/doc.go +++ /dev/null @@ -1,13 +0,0 @@ -// Package units provides helpful unit multipliers and functions for Go. -// -// The goal of this package is to have functionality similar to the time [1] package. -// -// -// [1] http://golang.org/pkg/time/ -// -// It allows for code like this: -// -// n, err := ParseBase2Bytes("1KB") -// // n == 1024 -// n = units.Mebibyte * 512 -package units diff --git a/vendor/github.com/alecthomas/units/go.mod b/vendor/github.com/alecthomas/units/go.mod deleted file mode 100644 index c7fb91f2b27..00000000000 --- a/vendor/github.com/alecthomas/units/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module github.com/alecthomas/units - -require github.com/stretchr/testify v1.4.0 diff --git a/vendor/github.com/alecthomas/units/go.sum b/vendor/github.com/alecthomas/units/go.sum deleted file mode 100644 index 8fdee5854f1..00000000000 --- a/vendor/github.com/alecthomas/units/go.sum +++ /dev/null @@ -1,11 +0,0 @@ -github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/alecthomas/units/si.go b/vendor/github.com/alecthomas/units/si.go deleted file mode 100644 index 99b2fa4fcb0..00000000000 --- a/vendor/github.com/alecthomas/units/si.go +++ /dev/null @@ -1,50 +0,0 @@ -package units - -// SI units. -type SI int64 - -// SI unit multiples. -const ( - Kilo SI = 1000 - Mega = Kilo * 1000 - Giga = Mega * 1000 - Tera = Giga * 1000 - Peta = Tera * 1000 - Exa = Peta * 1000 -) - -func MakeUnitMap(suffix, shortSuffix string, scale int64) map[string]float64 { - res := map[string]float64{ - shortSuffix: 1, - // see below for "k" / "K" - "M" + suffix: float64(scale * scale), - "G" + suffix: float64(scale * scale * scale), - "T" + suffix: float64(scale * scale * scale * scale), - "P" + suffix: float64(scale * scale * scale * scale * scale), - "E" + suffix: float64(scale * scale * scale * scale * scale * scale), - } - - // Standard SI prefixes use lowercase "k" for kilo = 1000. - // For compatibility, and to be fool-proof, we accept both "k" and "K" in metric mode. - // - // However, official binary prefixes are always capitalized - "KiB" - - // and we specifically never parse "kB" as 1024B because: - // - // (1) people pedantic enough to use lowercase according to SI unlikely to abuse "k" to mean 1024 :-) - // - // (2) Use of capital K for 1024 was an informal tradition predating IEC prefixes: - // "The binary meaning of the kilobyte for 1024 bytes typically uses the symbol KB, with an - // uppercase letter K." - // -- https://en.wikipedia.org/wiki/Kilobyte#Base_2_(1024_bytes) - // "Capitalization of the letter K became the de facto standard for binary notation, although this - // could not be extended to higher powers, and use of the lowercase k did persist.[13][14][15]" - // -- https://en.wikipedia.org/wiki/Binary_prefix#History - // See also the extensive https://en.wikipedia.org/wiki/Timeline_of_binary_prefixes. - if scale == 1024 { - res["K"+suffix] = float64(scale) - } else { - res["k"+suffix] = float64(scale) - res["K"+suffix] = float64(scale) - } - return res -} diff --git a/vendor/github.com/alecthomas/units/util.go b/vendor/github.com/alecthomas/units/util.go deleted file mode 100644 index 6527e92d164..00000000000 --- a/vendor/github.com/alecthomas/units/util.go +++ /dev/null @@ -1,138 +0,0 @@ -package units - -import ( - "errors" - "fmt" - "strings" -) - -var ( - siUnits = []string{"", "K", "M", "G", "T", "P", "E"} -) - -func ToString(n int64, scale int64, suffix, baseSuffix string) string { - mn := len(siUnits) - out := make([]string, mn) - for i, m := range siUnits { - if n%scale != 0 || i == 0 && n == 0 { - s := suffix - if i == 0 { - s = baseSuffix - } - out[mn-1-i] = fmt.Sprintf("%d%s%s", n%scale, m, s) - } - n /= scale - if n == 0 { - break - } - } - return strings.Join(out, "") -} - -// Below code ripped straight from http://golang.org/src/pkg/time/format.go?s=33392:33438#L1123 -var errLeadingInt = errors.New("units: bad [0-9]*") // never printed - -// leadingInt consumes the leading [0-9]* from s. -func leadingInt(s string) (x int64, rem string, err error) { - i := 0 - for ; i < len(s); i++ { - c := s[i] - if c < '0' || c > '9' { - break - } - if x >= (1<<63-10)/10 { - // overflow - return 0, "", errLeadingInt - } - x = x*10 + int64(c) - '0' - } - return x, s[i:], nil -} - -func ParseUnit(s string, unitMap map[string]float64) (int64, error) { - // [-+]?([0-9]*(\.[0-9]*)?[a-z]+)+ - orig := s - f := float64(0) - neg := false - - // Consume [-+]? - if s != "" { - c := s[0] - if c == '-' || c == '+' { - neg = c == '-' - s = s[1:] - } - } - // Special case: if all that is left is "0", this is zero. - if s == "0" { - return 0, nil - } - if s == "" { - return 0, errors.New("units: invalid " + orig) - } - for s != "" { - g := float64(0) // this element of the sequence - - var x int64 - var err error - - // The next character must be [0-9.] - if !(s[0] == '.' || ('0' <= s[0] && s[0] <= '9')) { - return 0, errors.New("units: invalid " + orig) - } - // Consume [0-9]* - pl := len(s) - x, s, err = leadingInt(s) - if err != nil { - return 0, errors.New("units: invalid " + orig) - } - g = float64(x) - pre := pl != len(s) // whether we consumed anything before a period - - // Consume (\.[0-9]*)? - post := false - if s != "" && s[0] == '.' { - s = s[1:] - pl := len(s) - x, s, err = leadingInt(s) - if err != nil { - return 0, errors.New("units: invalid " + orig) - } - scale := 1.0 - for n := pl - len(s); n > 0; n-- { - scale *= 10 - } - g += float64(x) / scale - post = pl != len(s) - } - if !pre && !post { - // no digits (e.g. ".s" or "-.s") - return 0, errors.New("units: invalid " + orig) - } - - // Consume unit. - i := 0 - for ; i < len(s); i++ { - c := s[i] - if c == '.' || ('0' <= c && c <= '9') { - break - } - } - u := s[:i] - s = s[i:] - unit, ok := unitMap[u] - if !ok { - return 0, errors.New("units: unknown unit " + u + " in " + orig) - } - - f += g * unit - } - - if neg { - f = -f - } - if f < float64(-1<<63) || f > float64(1<<63-1) { - return 0, errors.New("units: overflow parsing unit") - } - return int64(f), nil -} diff --git a/vendor/github.com/andybalholm/brotli/LICENSE b/vendor/github.com/andybalholm/brotli/LICENSE deleted file mode 100644 index 33b7cdd2dba..00000000000 --- a/vendor/github.com/andybalholm/brotli/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/andybalholm/brotli/README.md b/vendor/github.com/andybalholm/brotli/README.md deleted file mode 100644 index 02d115e55a3..00000000000 --- a/vendor/github.com/andybalholm/brotli/README.md +++ /dev/null @@ -1,5 +0,0 @@ -This package is a brotli compressor and decompressor implemented in Go. -It was translated from the reference implementation (https://github.com/google/brotli) -with the `c2go` tool at https://github.com/andybalholm/c2go. - -I am using it in production with https://github.com/andybalholm/redwood. diff --git a/vendor/github.com/andybalholm/brotli/backward_references.go b/vendor/github.com/andybalholm/brotli/backward_references.go deleted file mode 100644 index 1642e471ad0..00000000000 --- a/vendor/github.com/andybalholm/brotli/backward_references.go +++ /dev/null @@ -1,177 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function to find backward reference copies. */ - -func computeDistanceCode(distance uint, max_distance uint, dist_cache []int) uint { - if distance <= max_distance { - var distance_plus_3 uint = distance + 3 - var offset0 uint = distance_plus_3 - uint(dist_cache[0]) - var offset1 uint = distance_plus_3 - uint(dist_cache[1]) - if distance == uint(dist_cache[0]) { - return 0 - } else if distance == uint(dist_cache[1]) { - return 1 - } else if offset0 < 7 { - return (0x9750468 >> (4 * offset0)) & 0xF - } else if offset1 < 7 { - return (0xFDB1ACE >> (4 * offset1)) & 0xF - } else if distance == uint(dist_cache[2]) { - return 2 - } else if distance == uint(dist_cache[3]) { - return 3 - } - } - - return distance + numDistanceShortCodes - 1 -} - -/* "commands" points to the next output command to write to, "*num_commands" is - initially the total amount of commands output by previous - CreateBackwardReferences calls, and must be incremented by the amount written - by this call. */ -func createBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher hasherHandle, dist_cache []int, last_insert_len *uint, commands []command, num_commands *uint, num_literals *uint) { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var orig_commands []command = commands - var insert_length uint = *last_insert_len - var pos_end uint = position + num_bytes - var store_end uint - if num_bytes >= hasher.StoreLookahead() { - store_end = position + num_bytes - hasher.StoreLookahead() + 1 - } else { - store_end = position - } - var random_heuristics_window_size uint = literalSpreeLengthForSparseSearch(params) - var apply_random_heuristics uint = position + random_heuristics_window_size - var gap uint = 0 - /* Set maximum distance, see section 9.1. of the spec. */ - - const kMinScore uint = scoreBase + 100 - - /* For speed up heuristics for random data. */ - - /* Minimum score to accept a backward reference. */ - hasher.PrepareDistanceCache(dist_cache) - var sr2 hasherSearchResult - var sr hasherSearchResult - - for position+hasher.HashTypeLength() < pos_end { - var max_length uint = pos_end - position - var max_distance uint = brotli_min_size_t(position, max_backward_limit) - sr.len = 0 - sr.len_code_delta = 0 - sr.distance = 0 - sr.score = kMinScore - hasher.FindLongestMatch(¶ms.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr) - if sr.score > kMinScore { - /* Found a match. Let's look for something even better ahead. */ - var delayed_backward_references_in_row int = 0 - max_length-- - for ; ; max_length-- { - var cost_diff_lazy uint = 175 - if params.quality < minQualityForExtensiveReferenceSearch { - sr2.len = brotli_min_size_t(sr.len-1, max_length) - } else { - sr2.len = 0 - } - sr2.len_code_delta = 0 - sr2.distance = 0 - sr2.score = kMinScore - max_distance = brotli_min_size_t(position+1, max_backward_limit) - hasher.FindLongestMatch(¶ms.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2) - if sr2.score >= sr.score+cost_diff_lazy { - /* Ok, let's just write one byte for now and start a match from the - next byte. */ - position++ - - insert_length++ - sr = sr2 - delayed_backward_references_in_row++ - if delayed_backward_references_in_row < 4 && position+hasher.HashTypeLength() < pos_end { - continue - } - } - - break - } - - apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size - max_distance = brotli_min_size_t(position, max_backward_limit) - { - /* The first 16 codes are special short-codes, - and the minimum offset is 1. */ - var distance_code uint = computeDistanceCode(sr.distance, max_distance+gap, dist_cache) - if (sr.distance <= (max_distance + gap)) && distance_code > 0 { - dist_cache[3] = dist_cache[2] - dist_cache[2] = dist_cache[1] - dist_cache[1] = dist_cache[0] - dist_cache[0] = int(sr.distance) - hasher.PrepareDistanceCache(dist_cache) - } - - initCommand(&commands[0], ¶ms.dist, insert_length, sr.len, sr.len_code_delta, distance_code) - commands = commands[1:] - } - - *num_literals += insert_length - insert_length = 0 - /* Put the hash keys into the table, if there are enough bytes left. - Depending on the hasher implementation, it can push all positions - in the given range or only a subset of them. - Avoid hash poisoning with RLE data. */ - { - var range_start uint = position + 2 - var range_end uint = brotli_min_size_t(position+sr.len, store_end) - if sr.distance < sr.len>>2 { - range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2))) - } - - hasher.StoreRange(ringbuffer, ringbuffer_mask, range_start, range_end) - } - - position += sr.len - } else { - insert_length++ - position++ - - /* If we have not seen matches for a long time, we can skip some - match lookups. Unsuccessful match lookups are very very expensive - and this kind of a heuristic speeds up compression quite - a lot. */ - if position > apply_random_heuristics { - /* Going through uncompressible data, jump. */ - if position > apply_random_heuristics+4*random_heuristics_window_size { - var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 4) - /* It is quite a long time since we saw a copy, so we assume - that this data is not compressible, and store hashes less - often. Hashes of non compressible data are less likely to - turn out to be useful in the future, too, so we store less of - them to not to flood out the hash table of good compressible - data. */ - - var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin) - for ; position < pos_jump; position += 4 { - hasher.Store(ringbuffer, ringbuffer_mask, position) - insert_length += 4 - } - } else { - var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 2) - var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin) - for ; position < pos_jump; position += 2 { - hasher.Store(ringbuffer, ringbuffer_mask, position) - insert_length += 2 - } - } - } - } - } - - insert_length += pos_end - position - *last_insert_len = insert_length - *num_commands += uint(-cap(commands) + cap(orig_commands)) -} diff --git a/vendor/github.com/andybalholm/brotli/backward_references_hq.go b/vendor/github.com/andybalholm/brotli/backward_references_hq.go deleted file mode 100644 index 5eac736133f..00000000000 --- a/vendor/github.com/andybalholm/brotli/backward_references_hq.go +++ /dev/null @@ -1,795 +0,0 @@ -package brotli - -import "math" - -type zopfliNode struct { - length uint32 - distance uint32 - dcode_insert_length uint32 - u struct { - cost float32 - next uint32 - shortcut uint32 - } -} - -const maxEffectiveDistanceAlphabetSize = 544 - -const kInfinity float32 = 1.7e38 /* ~= 2 ^ 127 */ - -var kDistanceCacheIndex = []uint32{0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1} - -var kDistanceCacheOffset = []int{0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3} - -func initZopfliNodes(array []zopfliNode, length uint) { - var stub zopfliNode - var i uint - stub.length = 1 - stub.distance = 0 - stub.dcode_insert_length = 0 - stub.u.cost = kInfinity - for i = 0; i < length; i++ { - array[i] = stub - } -} - -func zopfliNodeCopyLength(self *zopfliNode) uint32 { - return self.length & 0x1FFFFFF -} - -func zopfliNodeLengthCode(self *zopfliNode) uint32 { - var modifier uint32 = self.length >> 25 - return zopfliNodeCopyLength(self) + 9 - modifier -} - -func zopfliNodeCopyDistance(self *zopfliNode) uint32 { - return self.distance -} - -func zopfliNodeDistanceCode(self *zopfliNode) uint32 { - var short_code uint32 = self.dcode_insert_length >> 27 - if short_code == 0 { - return zopfliNodeCopyDistance(self) + numDistanceShortCodes - 1 - } else { - return short_code - 1 - } -} - -func zopfliNodeCommandLength(self *zopfliNode) uint32 { - return zopfliNodeCopyLength(self) + (self.dcode_insert_length & 0x7FFFFFF) -} - -/* Histogram based cost model for zopflification. */ -type zopfliCostModel struct { - cost_cmd_ [numCommandSymbols]float32 - cost_dist_ []float32 - distance_histogram_size uint32 - literal_costs_ []float32 - min_cost_cmd_ float32 - num_bytes_ uint -} - -func initZopfliCostModel(self *zopfliCostModel, dist *distanceParams, num_bytes uint) { - var distance_histogram_size uint32 = dist.alphabet_size - if distance_histogram_size > maxEffectiveDistanceAlphabetSize { - distance_histogram_size = maxEffectiveDistanceAlphabetSize - } - - self.num_bytes_ = num_bytes - self.literal_costs_ = make([]float32, (num_bytes + 2)) - self.cost_dist_ = make([]float32, (dist.alphabet_size)) - self.distance_histogram_size = distance_histogram_size -} - -func cleanupZopfliCostModel(self *zopfliCostModel) { - self.literal_costs_ = nil - self.cost_dist_ = nil -} - -func setCost(histogram []uint32, histogram_size uint, literal_histogram bool, cost []float32) { - var sum uint = 0 - var missing_symbol_sum uint - var log2sum float32 - var missing_symbol_cost float32 - var i uint - for i = 0; i < histogram_size; i++ { - sum += uint(histogram[i]) - } - - log2sum = float32(fastLog2(sum)) - missing_symbol_sum = sum - if !literal_histogram { - for i = 0; i < histogram_size; i++ { - if histogram[i] == 0 { - missing_symbol_sum++ - } - } - } - - missing_symbol_cost = float32(fastLog2(missing_symbol_sum)) + 2 - for i = 0; i < histogram_size; i++ { - if histogram[i] == 0 { - cost[i] = missing_symbol_cost - continue - } - - /* Shannon bits for this symbol. */ - cost[i] = log2sum - float32(fastLog2(uint(histogram[i]))) - - /* Cannot be coded with less than 1 bit */ - if cost[i] < 1 { - cost[i] = 1 - } - } -} - -func zopfliCostModelSetFromCommands(self *zopfliCostModel, position uint, ringbuffer []byte, ringbuffer_mask uint, commands []command, num_commands uint, last_insert_len uint) { - var histogram_literal [numLiteralSymbols]uint32 - var histogram_cmd [numCommandSymbols]uint32 - var histogram_dist [maxEffectiveDistanceAlphabetSize]uint32 - var cost_literal [numLiteralSymbols]float32 - var pos uint = position - last_insert_len - var min_cost_cmd float32 = kInfinity - var i uint - var cost_cmd []float32 = self.cost_cmd_[:] - var literal_costs []float32 - - histogram_literal = [numLiteralSymbols]uint32{} - histogram_cmd = [numCommandSymbols]uint32{} - histogram_dist = [maxEffectiveDistanceAlphabetSize]uint32{} - - for i = 0; i < num_commands; i++ { - var inslength uint = uint(commands[i].insert_len_) - var copylength uint = uint(commandCopyLen(&commands[i])) - var distcode uint = uint(commands[i].dist_prefix_) & 0x3FF - var cmdcode uint = uint(commands[i].cmd_prefix_) - var j uint - - histogram_cmd[cmdcode]++ - if cmdcode >= 128 { - histogram_dist[distcode]++ - } - - for j = 0; j < inslength; j++ { - histogram_literal[ringbuffer[(pos+j)&ringbuffer_mask]]++ - } - - pos += inslength + copylength - } - - setCost(histogram_literal[:], numLiteralSymbols, true, cost_literal[:]) - setCost(histogram_cmd[:], numCommandSymbols, false, cost_cmd) - setCost(histogram_dist[:], uint(self.distance_histogram_size), false, self.cost_dist_) - - for i = 0; i < numCommandSymbols; i++ { - min_cost_cmd = brotli_min_float(min_cost_cmd, cost_cmd[i]) - } - - self.min_cost_cmd_ = min_cost_cmd - { - literal_costs = self.literal_costs_ - var literal_carry float32 = 0.0 - var num_bytes uint = self.num_bytes_ - literal_costs[0] = 0.0 - for i = 0; i < num_bytes; i++ { - literal_carry += cost_literal[ringbuffer[(position+i)&ringbuffer_mask]] - literal_costs[i+1] = literal_costs[i] + literal_carry - literal_carry -= literal_costs[i+1] - literal_costs[i] - } - } -} - -func zopfliCostModelSetFromLiteralCosts(self *zopfliCostModel, position uint, ringbuffer []byte, ringbuffer_mask uint) { - var literal_costs []float32 = self.literal_costs_ - var literal_carry float32 = 0.0 - var cost_dist []float32 = self.cost_dist_ - var cost_cmd []float32 = self.cost_cmd_[:] - var num_bytes uint = self.num_bytes_ - var i uint - estimateBitCostsForLiterals(position, num_bytes, ringbuffer_mask, ringbuffer, literal_costs[1:]) - literal_costs[0] = 0.0 - for i = 0; i < num_bytes; i++ { - literal_carry += literal_costs[i+1] - literal_costs[i+1] = literal_costs[i] + literal_carry - literal_carry -= literal_costs[i+1] - literal_costs[i] - } - - for i = 0; i < numCommandSymbols; i++ { - cost_cmd[i] = float32(fastLog2(uint(11 + uint32(i)))) - } - - for i = 0; uint32(i) < self.distance_histogram_size; i++ { - cost_dist[i] = float32(fastLog2(uint(20 + uint32(i)))) - } - - self.min_cost_cmd_ = float32(fastLog2(11)) -} - -func zopfliCostModelGetCommandCost(self *zopfliCostModel, cmdcode uint16) float32 { - return self.cost_cmd_[cmdcode] -} - -func zopfliCostModelGetDistanceCost(self *zopfliCostModel, distcode uint) float32 { - return self.cost_dist_[distcode] -} - -func zopfliCostModelGetLiteralCosts(self *zopfliCostModel, from uint, to uint) float32 { - return self.literal_costs_[to] - self.literal_costs_[from] -} - -func zopfliCostModelGetMinCostCmd(self *zopfliCostModel) float32 { - return self.min_cost_cmd_ -} - -/* REQUIRES: len >= 2, start_pos <= pos */ -/* REQUIRES: cost < kInfinity, nodes[start_pos].cost < kInfinity */ -/* Maintains the "ZopfliNode array invariant". */ -func updateZopfliNode(nodes []zopfliNode, pos uint, start_pos uint, len uint, len_code uint, dist uint, short_code uint, cost float32) { - var next *zopfliNode = &nodes[pos+len] - next.length = uint32(len | (len+9-len_code)<<25) - next.distance = uint32(dist) - next.dcode_insert_length = uint32(short_code<<27 | (pos - start_pos)) - next.u.cost = cost -} - -type posData struct { - pos uint - distance_cache [4]int - costdiff float32 - cost float32 -} - -/* Maintains the smallest 8 cost difference together with their positions */ -type startPosQueue struct { - q_ [8]posData - idx_ uint -} - -func initStartPosQueue(self *startPosQueue) { - self.idx_ = 0 -} - -func startPosQueueSize(self *startPosQueue) uint { - return brotli_min_size_t(self.idx_, 8) -} - -func startPosQueuePush(self *startPosQueue, posdata *posData) { - var offset uint = ^(self.idx_) & 7 - self.idx_++ - var len uint = startPosQueueSize(self) - var i uint - var q []posData = self.q_[:] - q[offset] = *posdata - - /* Restore the sorted order. In the list of |len| items at most |len - 1| - adjacent element comparisons / swaps are required. */ - for i = 1; i < len; i++ { - if q[offset&7].costdiff > q[(offset+1)&7].costdiff { - var tmp posData = q[offset&7] - q[offset&7] = q[(offset+1)&7] - q[(offset+1)&7] = tmp - } - - offset++ - } -} - -func startPosQueueAt(self *startPosQueue, k uint) *posData { - return &self.q_[(k-self.idx_)&7] -} - -/* Returns the minimum possible copy length that can improve the cost of any */ -/* future position. */ -func computeMinimumCopyLength(start_cost float32, nodes []zopfliNode, num_bytes uint, pos uint) uint { - var min_cost float32 = start_cost - var len uint = 2 - var next_len_bucket uint = 4 - /* Compute the minimum possible cost of reaching any future position. */ - - var next_len_offset uint = 10 - for pos+len <= num_bytes && nodes[pos+len].u.cost <= min_cost { - /* We already reached (pos + len) with no more cost than the minimum - possible cost of reaching anything from this pos, so there is no point in - looking for lengths <= len. */ - len++ - - if len == next_len_offset { - /* We reached the next copy length code bucket, so we add one more - extra bit to the minimum cost. */ - min_cost += 1.0 - - next_len_offset += next_len_bucket - next_len_bucket *= 2 - } - } - - return uint(len) -} - -/* REQUIRES: nodes[pos].cost < kInfinity - REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */ -func computeDistanceShortcut(block_start uint, pos uint, max_backward_limit uint, gap uint, nodes []zopfliNode) uint32 { - var clen uint = uint(zopfliNodeCopyLength(&nodes[pos])) - var ilen uint = uint(nodes[pos].dcode_insert_length & 0x7FFFFFF) - var dist uint = uint(zopfliNodeCopyDistance(&nodes[pos])) - - /* Since |block_start + pos| is the end position of the command, the copy part - starts from |block_start + pos - clen|. Distances that are greater than - this or greater than |max_backward_limit| + |gap| are static dictionary - references, and do not update the last distances. - Also distance code 0 (last distance) does not update the last distances. */ - if pos == 0 { - return 0 - } else if dist+clen <= block_start+pos+gap && dist <= max_backward_limit+gap && zopfliNodeDistanceCode(&nodes[pos]) > 0 { - return uint32(pos) - } else { - return nodes[pos-clen-ilen].u.shortcut - } -} - -/* Fills in dist_cache[0..3] with the last four distances (as defined by - Section 4. of the Spec) that would be used at (block_start + pos) if we - used the shortest path of commands from block_start, computed from - nodes[0..pos]. The last four distances at block_start are in - starting_dist_cache[0..3]. - REQUIRES: nodes[pos].cost < kInfinity - REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */ -func computeDistanceCache(pos uint, starting_dist_cache []int, nodes []zopfliNode, dist_cache []int) { - var idx int = 0 - var p uint = uint(nodes[pos].u.shortcut) - for idx < 4 && p > 0 { - var ilen uint = uint(nodes[p].dcode_insert_length & 0x7FFFFFF) - var clen uint = uint(zopfliNodeCopyLength(&nodes[p])) - var dist uint = uint(zopfliNodeCopyDistance(&nodes[p])) - dist_cache[idx] = int(dist) - idx++ - - /* Because of prerequisite, p >= clen + ilen >= 2. */ - p = uint(nodes[p-clen-ilen].u.shortcut) - } - - for ; idx < 4; idx++ { - dist_cache[idx] = starting_dist_cache[0] - starting_dist_cache = starting_dist_cache[1:] - } -} - -/* Maintains "ZopfliNode array invariant" and pushes node to the queue, if it - is eligible. */ -func evaluateNode(block_start uint, pos uint, max_backward_limit uint, gap uint, starting_dist_cache []int, model *zopfliCostModel, queue *startPosQueue, nodes []zopfliNode) { - /* Save cost, because ComputeDistanceCache invalidates it. */ - var node_cost float32 = nodes[pos].u.cost - nodes[pos].u.shortcut = computeDistanceShortcut(block_start, pos, max_backward_limit, gap, nodes) - if node_cost <= zopfliCostModelGetLiteralCosts(model, 0, pos) { - var posdata posData - posdata.pos = pos - posdata.cost = node_cost - posdata.costdiff = node_cost - zopfliCostModelGetLiteralCosts(model, 0, pos) - computeDistanceCache(pos, starting_dist_cache, nodes, posdata.distance_cache[:]) - startPosQueuePush(queue, &posdata) - } -} - -/* Returns longest copy length. */ -func updateNodes(num_bytes uint, block_start uint, pos uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, max_backward_limit uint, starting_dist_cache []int, num_matches uint, matches []backwardMatch, model *zopfliCostModel, queue *startPosQueue, nodes []zopfliNode) uint { - var cur_ix uint = block_start + pos - var cur_ix_masked uint = cur_ix & ringbuffer_mask - var max_distance uint = brotli_min_size_t(cur_ix, max_backward_limit) - var max_len uint = num_bytes - pos - var max_zopfli_len uint = maxZopfliLen(params) - var max_iters uint = maxZopfliCandidates(params) - var min_len uint - var result uint = 0 - var k uint - var gap uint = 0 - - evaluateNode(block_start, pos, max_backward_limit, gap, starting_dist_cache, model, queue, nodes) - { - var posdata *posData = startPosQueueAt(queue, 0) - var min_cost float32 = (posdata.cost + zopfliCostModelGetMinCostCmd(model) + zopfliCostModelGetLiteralCosts(model, posdata.pos, pos)) - min_len = computeMinimumCopyLength(min_cost, nodes, num_bytes, pos) - } - - /* Go over the command starting positions in order of increasing cost - difference. */ - for k = 0; k < max_iters && k < startPosQueueSize(queue); k++ { - var posdata *posData = startPosQueueAt(queue, k) - var start uint = posdata.pos - var inscode uint16 = getInsertLengthCode(pos - start) - var start_costdiff float32 = posdata.costdiff - var base_cost float32 = start_costdiff + float32(getInsertExtra(inscode)) + zopfliCostModelGetLiteralCosts(model, 0, pos) - var best_len uint = min_len - 1 - var j uint = 0 - /* Look for last distance matches using the distance cache from this - starting position. */ - for ; j < numDistanceShortCodes && best_len < max_len; j++ { - var idx uint = uint(kDistanceCacheIndex[j]) - var backward uint = uint(posdata.distance_cache[idx] + kDistanceCacheOffset[j]) - var prev_ix uint = cur_ix - backward - var len uint = 0 - var continuation byte = ringbuffer[cur_ix_masked+best_len] - if cur_ix_masked+best_len > ringbuffer_mask { - break - } - - if backward > max_distance+gap { - /* Word dictionary -> ignore. */ - continue - } - - if backward <= max_distance { - /* Regular backward reference. */ - if prev_ix >= cur_ix { - continue - } - - prev_ix &= ringbuffer_mask - if prev_ix+best_len > ringbuffer_mask || continuation != ringbuffer[prev_ix+best_len] { - continue - } - - len = findMatchLengthWithLimit(ringbuffer[prev_ix:], ringbuffer[cur_ix_masked:], max_len) - } else { - continue - } - { - var dist_cost float32 = base_cost + zopfliCostModelGetDistanceCost(model, j) - var l uint - for l = best_len + 1; l <= len; l++ { - var copycode uint16 = getCopyLengthCode(l) - var cmdcode uint16 = combineLengthCodes(inscode, copycode, j == 0) - var tmp float32 - if cmdcode < 128 { - tmp = base_cost - } else { - tmp = dist_cost - } - var cost float32 = tmp + float32(getCopyExtra(copycode)) + zopfliCostModelGetCommandCost(model, cmdcode) - if cost < nodes[pos+l].u.cost { - updateZopfliNode(nodes, pos, start, l, l, backward, j+1, cost) - result = brotli_max_size_t(result, l) - } - - best_len = l - } - } - } - - /* At higher iterations look only for new last distance matches, since - looking only for new command start positions with the same distances - does not help much. */ - if k >= 2 { - continue - } - { - /* Loop through all possible copy lengths at this position. */ - var len uint = min_len - for j = 0; j < num_matches; j++ { - var match backwardMatch = matches[j] - var dist uint = uint(match.distance) - var is_dictionary_match bool = (dist > max_distance+gap) - var dist_code uint = dist + numDistanceShortCodes - 1 - var dist_symbol uint16 - var distextra uint32 - var distnumextra uint32 - var dist_cost float32 - var max_match_len uint - /* We already tried all possible last distance matches, so we can use - normal distance code here. */ - prefixEncodeCopyDistance(dist_code, uint(params.dist.num_direct_distance_codes), uint(params.dist.distance_postfix_bits), &dist_symbol, &distextra) - - distnumextra = uint32(dist_symbol) >> 10 - dist_cost = base_cost + float32(distnumextra) + zopfliCostModelGetDistanceCost(model, uint(dist_symbol)&0x3FF) - - /* Try all copy lengths up until the maximum copy length corresponding - to this distance. If the distance refers to the static dictionary, or - the maximum length is long enough, try only one maximum length. */ - max_match_len = backwardMatchLength(&match) - - if len < max_match_len && (is_dictionary_match || max_match_len > max_zopfli_len) { - len = max_match_len - } - - for ; len <= max_match_len; len++ { - var len_code uint - if is_dictionary_match { - len_code = backwardMatchLengthCode(&match) - } else { - len_code = len - } - var copycode uint16 = getCopyLengthCode(len_code) - var cmdcode uint16 = combineLengthCodes(inscode, copycode, false) - var cost float32 = dist_cost + float32(getCopyExtra(copycode)) + zopfliCostModelGetCommandCost(model, cmdcode) - if cost < nodes[pos+len].u.cost { - updateZopfliNode(nodes, pos, start, uint(len), len_code, dist, 0, cost) - result = brotli_max_size_t(result, uint(len)) - } - } - } - } - } - - return result -} - -func computeShortestPathFromNodes(num_bytes uint, nodes []zopfliNode) uint { - var index uint = num_bytes - var num_commands uint = 0 - for nodes[index].dcode_insert_length&0x7FFFFFF == 0 && nodes[index].length == 1 { - index-- - } - nodes[index].u.next = math.MaxUint32 - for index != 0 { - var len uint = uint(zopfliNodeCommandLength(&nodes[index])) - index -= uint(len) - nodes[index].u.next = uint32(len) - num_commands++ - } - - return num_commands -} - -/* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */ -func zopfliCreateCommands(num_bytes uint, block_start uint, nodes []zopfliNode, dist_cache []int, last_insert_len *uint, params *encoderParams, commands []command, num_literals *uint) { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var pos uint = 0 - var offset uint32 = nodes[0].u.next - var i uint - var gap uint = 0 - for i = 0; offset != math.MaxUint32; i++ { - var next *zopfliNode = &nodes[uint32(pos)+offset] - var copy_length uint = uint(zopfliNodeCopyLength(next)) - var insert_length uint = uint(next.dcode_insert_length & 0x7FFFFFF) - pos += insert_length - offset = next.u.next - if i == 0 { - insert_length += *last_insert_len - *last_insert_len = 0 - } - { - var distance uint = uint(zopfliNodeCopyDistance(next)) - var len_code uint = uint(zopfliNodeLengthCode(next)) - var max_distance uint = brotli_min_size_t(block_start+pos, max_backward_limit) - var is_dictionary bool = (distance > max_distance+gap) - var dist_code uint = uint(zopfliNodeDistanceCode(next)) - initCommand(&commands[i], ¶ms.dist, insert_length, copy_length, int(len_code)-int(copy_length), dist_code) - - if !is_dictionary && dist_code > 0 { - dist_cache[3] = dist_cache[2] - dist_cache[2] = dist_cache[1] - dist_cache[1] = dist_cache[0] - dist_cache[0] = int(distance) - } - } - - *num_literals += insert_length - pos += copy_length - } - - *last_insert_len += num_bytes - pos -} - -func zopfliIterate(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, gap uint, dist_cache []int, model *zopfliCostModel, num_matches []uint32, matches []backwardMatch, nodes []zopfliNode) uint { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var max_zopfli_len uint = maxZopfliLen(params) - var queue startPosQueue - var cur_match_pos uint = 0 - var i uint - nodes[0].length = 0 - nodes[0].u.cost = 0 - initStartPosQueue(&queue) - for i = 0; i+3 < num_bytes; i++ { - var skip uint = updateNodes(num_bytes, position, i, ringbuffer, ringbuffer_mask, params, max_backward_limit, dist_cache, uint(num_matches[i]), matches[cur_match_pos:], model, &queue, nodes) - if skip < longCopyQuickStep { - skip = 0 - } - cur_match_pos += uint(num_matches[i]) - if num_matches[i] == 1 && backwardMatchLength(&matches[cur_match_pos-1]) > max_zopfli_len { - skip = brotli_max_size_t(backwardMatchLength(&matches[cur_match_pos-1]), skip) - } - - if skip > 1 { - skip-- - for skip != 0 { - i++ - if i+3 >= num_bytes { - break - } - evaluateNode(position, i, max_backward_limit, gap, dist_cache, model, &queue, nodes) - cur_match_pos += uint(num_matches[i]) - skip-- - } - } - } - - return computeShortestPathFromNodes(num_bytes, nodes) -} - -/* Computes the shortest path of commands from position to at most - position + num_bytes. - - On return, path->size() is the number of commands found and path[i] is the - length of the i-th command (copy length plus insert length). - Note that the sum of the lengths of all commands can be less than num_bytes. - - On return, the nodes[0..num_bytes] array will have the following - "ZopfliNode array invariant": - For each i in [1..num_bytes], if nodes[i].cost < kInfinity, then - (1) nodes[i].copy_length() >= 2 - (2) nodes[i].command_length() <= i and - (3) nodes[i - nodes[i].command_length()].cost < kInfinity - - REQUIRES: nodes != nil and len(nodes) >= num_bytes + 1 */ -func zopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, dist_cache []int, hasher *h10, nodes []zopfliNode) uint { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var max_zopfli_len uint = maxZopfliLen(params) - var model zopfliCostModel - var queue startPosQueue - var matches [2 * (maxNumMatchesH10 + 64)]backwardMatch - var store_end uint - if num_bytes >= hasher.StoreLookahead() { - store_end = position + num_bytes - hasher.StoreLookahead() + 1 - } else { - store_end = position - } - var i uint - var gap uint = 0 - var lz_matches_offset uint = 0 - nodes[0].length = 0 - nodes[0].u.cost = 0 - initZopfliCostModel(&model, ¶ms.dist, num_bytes) - zopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask) - initStartPosQueue(&queue) - for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ { - var pos uint = position + i - var max_distance uint = brotli_min_size_t(pos, max_backward_limit) - var skip uint - var num_matches uint - num_matches = findAllMatchesH10(hasher, ¶ms.dictionary, ringbuffer, ringbuffer_mask, pos, num_bytes-i, max_distance, gap, params, matches[lz_matches_offset:]) - if num_matches > 0 && backwardMatchLength(&matches[num_matches-1]) > max_zopfli_len { - matches[0] = matches[num_matches-1] - num_matches = 1 - } - - skip = updateNodes(num_bytes, position, i, ringbuffer, ringbuffer_mask, params, max_backward_limit, dist_cache, num_matches, matches[:], &model, &queue, nodes) - if skip < longCopyQuickStep { - skip = 0 - } - if num_matches == 1 && backwardMatchLength(&matches[0]) > max_zopfli_len { - skip = brotli_max_size_t(backwardMatchLength(&matches[0]), skip) - } - - if skip > 1 { - /* Add the tail of the copy to the hasher. */ - hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+skip, store_end)) - - skip-- - for skip != 0 { - i++ - if i+hasher.HashTypeLength()-1 >= num_bytes { - break - } - evaluateNode(position, i, max_backward_limit, gap, dist_cache, &model, &queue, nodes) - skip-- - } - } - } - - cleanupZopfliCostModel(&model) - return computeShortestPathFromNodes(num_bytes, nodes) -} - -func createZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher *h10, dist_cache []int, last_insert_len *uint, commands []command, num_commands *uint, num_literals *uint) { - var nodes []zopfliNode - nodes = make([]zopfliNode, (num_bytes + 1)) - initZopfliNodes(nodes, num_bytes+1) - *num_commands += zopfliComputeShortestPath(num_bytes, position, ringbuffer, ringbuffer_mask, params, dist_cache, hasher, nodes) - zopfliCreateCommands(num_bytes, position, nodes, dist_cache, last_insert_len, params, commands, num_literals) - nodes = nil -} - -func createHqZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher hasherHandle, dist_cache []int, last_insert_len *uint, commands []command, num_commands *uint, num_literals *uint) { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var num_matches []uint32 = make([]uint32, num_bytes) - var matches_size uint = 4 * num_bytes - var store_end uint - if num_bytes >= hasher.StoreLookahead() { - store_end = position + num_bytes - hasher.StoreLookahead() + 1 - } else { - store_end = position - } - var cur_match_pos uint = 0 - var i uint - var orig_num_literals uint - var orig_last_insert_len uint - var orig_dist_cache [4]int - var orig_num_commands uint - var model zopfliCostModel - var nodes []zopfliNode - var matches []backwardMatch = make([]backwardMatch, matches_size) - var gap uint = 0 - var shadow_matches uint = 0 - var new_array []backwardMatch - for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ { - var pos uint = position + i - var max_distance uint = brotli_min_size_t(pos, max_backward_limit) - var max_length uint = num_bytes - i - var num_found_matches uint - var cur_match_end uint - var j uint - - /* Ensure that we have enough free slots. */ - if matches_size < cur_match_pos+maxNumMatchesH10+shadow_matches { - var new_size uint = matches_size - if new_size == 0 { - new_size = cur_match_pos + maxNumMatchesH10 + shadow_matches - } - - for new_size < cur_match_pos+maxNumMatchesH10+shadow_matches { - new_size *= 2 - } - - new_array = make([]backwardMatch, new_size) - if matches_size != 0 { - copy(new_array, matches[:matches_size]) - } - - matches = new_array - matches_size = new_size - } - - num_found_matches = findAllMatchesH10(hasher.(*h10), ¶ms.dictionary, ringbuffer, ringbuffer_mask, pos, max_length, max_distance, gap, params, matches[cur_match_pos+shadow_matches:]) - cur_match_end = cur_match_pos + num_found_matches - for j = cur_match_pos; j+1 < cur_match_end; j++ { - assert(backwardMatchLength(&matches[j]) <= backwardMatchLength(&matches[j+1])) - } - - num_matches[i] = uint32(num_found_matches) - if num_found_matches > 0 { - var match_len uint = backwardMatchLength(&matches[cur_match_end-1]) - if match_len > maxZopfliLenQuality11 { - var skip uint = match_len - 1 - matches[cur_match_pos] = matches[cur_match_end-1] - cur_match_pos++ - num_matches[i] = 1 - - /* Add the tail of the copy to the hasher. */ - hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+match_len, store_end)) - var pos uint = i - for i := 0; i < int(skip); i++ { - num_matches[pos+1:][i] = 0 - } - i += skip - } else { - cur_match_pos = cur_match_end - } - } - } - - orig_num_literals = *num_literals - orig_last_insert_len = *last_insert_len - copy(orig_dist_cache[:], dist_cache[:4]) - orig_num_commands = *num_commands - nodes = make([]zopfliNode, (num_bytes + 1)) - initZopfliCostModel(&model, ¶ms.dist, num_bytes) - for i = 0; i < 2; i++ { - initZopfliNodes(nodes, num_bytes+1) - if i == 0 { - zopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask) - } else { - zopfliCostModelSetFromCommands(&model, position, ringbuffer, ringbuffer_mask, commands, *num_commands-orig_num_commands, orig_last_insert_len) - } - - *num_commands = orig_num_commands - *num_literals = orig_num_literals - *last_insert_len = orig_last_insert_len - copy(dist_cache, orig_dist_cache[:4]) - *num_commands += zopfliIterate(num_bytes, position, ringbuffer, ringbuffer_mask, params, gap, dist_cache, &model, num_matches, matches, nodes) - zopfliCreateCommands(num_bytes, position, nodes, dist_cache, last_insert_len, params, commands, num_literals) - } - - cleanupZopfliCostModel(&model) - nodes = nil - matches = nil - num_matches = nil -} diff --git a/vendor/github.com/andybalholm/brotli/bit_cost.go b/vendor/github.com/andybalholm/brotli/bit_cost.go deleted file mode 100644 index 0005fc15e63..00000000000 --- a/vendor/github.com/andybalholm/brotli/bit_cost.go +++ /dev/null @@ -1,436 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Functions to estimate the bit cost of Huffman trees. */ -func shannonEntropy(population []uint32, size uint, total *uint) float64 { - var sum uint = 0 - var retval float64 = 0 - var population_end []uint32 = population[size:] - var p uint - for -cap(population) < -cap(population_end) { - p = uint(population[0]) - population = population[1:] - sum += p - retval -= float64(p) * fastLog2(p) - } - - if sum != 0 { - retval += float64(sum) * fastLog2(sum) - } - *total = sum - return retval -} - -func bitsEntropy(population []uint32, size uint) float64 { - var sum uint - var retval float64 = shannonEntropy(population, size, &sum) - if retval < float64(sum) { - /* At least one bit per literal is needed. */ - retval = float64(sum) - } - - return retval -} - -const kOneSymbolHistogramCost float64 = 12 -const kTwoSymbolHistogramCost float64 = 20 -const kThreeSymbolHistogramCost float64 = 28 -const kFourSymbolHistogramCost float64 = 37 - -func populationCostLiteral(histogram *histogramLiteral) float64 { - var data_size uint = histogramDataSizeLiteral() - var count int = 0 - var s [5]uint - var bits float64 = 0.0 - var i uint - if histogram.total_count_ == 0 { - return kOneSymbolHistogramCost - } - - for i = 0; i < data_size; i++ { - if histogram.data_[i] > 0 { - s[count] = i - count++ - if count > 4 { - break - } - } - } - - if count == 1 { - return kOneSymbolHistogramCost - } - - if count == 2 { - return kTwoSymbolHistogramCost + float64(histogram.total_count_) - } - - if count == 3 { - var histo0 uint32 = histogram.data_[s[0]] - var histo1 uint32 = histogram.data_[s[1]] - var histo2 uint32 = histogram.data_[s[2]] - var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2)) - return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax) - } - - if count == 4 { - var histo [4]uint32 - var h23 uint32 - var histomax uint32 - for i = 0; i < 4; i++ { - histo[i] = histogram.data_[s[i]] - } - - /* Sort */ - for i = 0; i < 4; i++ { - var j uint - for j = i + 1; j < 4; j++ { - if histo[j] > histo[i] { - var tmp uint32 = histo[j] - histo[j] = histo[i] - histo[i] = tmp - } - } - } - - h23 = histo[2] + histo[3] - histomax = brotli_max_uint32_t(h23, histo[0]) - return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax) - } - { - var max_depth uint = 1 - var depth_histo = [codeLengthCodes]uint32{0} - /* In this loop we compute the entropy of the histogram and simultaneously - build a simplified histogram of the code length codes where we use the - zero repeat code 17, but we don't use the non-zero repeat code 16. */ - - var log2total float64 = fastLog2(histogram.total_count_) - for i = 0; i < data_size; { - if histogram.data_[i] > 0 { - var log2p float64 = log2total - fastLog2(uint(histogram.data_[i])) - /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) = - = log2(total_count) - log2(count(symbol)) */ - - var depth uint = uint(log2p + 0.5) - /* Approximate the bit depth by round(-log2(P(symbol))) */ - bits += float64(histogram.data_[i]) * log2p - - if depth > 15 { - depth = 15 - } - - if depth > max_depth { - max_depth = depth - } - - depth_histo[depth]++ - i++ - } else { - var reps uint32 = 1 - /* Compute the run length of zeros and add the appropriate number of 0 - and 17 code length codes to the code length code histogram. */ - - var k uint - for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ { - reps++ - } - - i += uint(reps) - if i == data_size { - /* Don't add any cost for the last zero run, since these are encoded - only implicitly. */ - break - } - - if reps < 3 { - depth_histo[0] += reps - } else { - reps -= 2 - for reps > 0 { - depth_histo[repeatZeroCodeLength]++ - - /* Add the 3 extra bits for the 17 code length code. */ - bits += 3 - - reps >>= 3 - } - } - } - } - - /* Add the estimated encoding cost of the code length code histogram. */ - bits += float64(18 + 2*max_depth) - - /* Add the entropy of the code length code histogram. */ - bits += bitsEntropy(depth_histo[:], codeLengthCodes) - } - - return bits -} - -func populationCostCommand(histogram *histogramCommand) float64 { - var data_size uint = histogramDataSizeCommand() - var count int = 0 - var s [5]uint - var bits float64 = 0.0 - var i uint - if histogram.total_count_ == 0 { - return kOneSymbolHistogramCost - } - - for i = 0; i < data_size; i++ { - if histogram.data_[i] > 0 { - s[count] = i - count++ - if count > 4 { - break - } - } - } - - if count == 1 { - return kOneSymbolHistogramCost - } - - if count == 2 { - return kTwoSymbolHistogramCost + float64(histogram.total_count_) - } - - if count == 3 { - var histo0 uint32 = histogram.data_[s[0]] - var histo1 uint32 = histogram.data_[s[1]] - var histo2 uint32 = histogram.data_[s[2]] - var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2)) - return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax) - } - - if count == 4 { - var histo [4]uint32 - var h23 uint32 - var histomax uint32 - for i = 0; i < 4; i++ { - histo[i] = histogram.data_[s[i]] - } - - /* Sort */ - for i = 0; i < 4; i++ { - var j uint - for j = i + 1; j < 4; j++ { - if histo[j] > histo[i] { - var tmp uint32 = histo[j] - histo[j] = histo[i] - histo[i] = tmp - } - } - } - - h23 = histo[2] + histo[3] - histomax = brotli_max_uint32_t(h23, histo[0]) - return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax) - } - { - var max_depth uint = 1 - var depth_histo = [codeLengthCodes]uint32{0} - /* In this loop we compute the entropy of the histogram and simultaneously - build a simplified histogram of the code length codes where we use the - zero repeat code 17, but we don't use the non-zero repeat code 16. */ - - var log2total float64 = fastLog2(histogram.total_count_) - for i = 0; i < data_size; { - if histogram.data_[i] > 0 { - var log2p float64 = log2total - fastLog2(uint(histogram.data_[i])) - /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) = - = log2(total_count) - log2(count(symbol)) */ - - var depth uint = uint(log2p + 0.5) - /* Approximate the bit depth by round(-log2(P(symbol))) */ - bits += float64(histogram.data_[i]) * log2p - - if depth > 15 { - depth = 15 - } - - if depth > max_depth { - max_depth = depth - } - - depth_histo[depth]++ - i++ - } else { - var reps uint32 = 1 - /* Compute the run length of zeros and add the appropriate number of 0 - and 17 code length codes to the code length code histogram. */ - - var k uint - for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ { - reps++ - } - - i += uint(reps) - if i == data_size { - /* Don't add any cost for the last zero run, since these are encoded - only implicitly. */ - break - } - - if reps < 3 { - depth_histo[0] += reps - } else { - reps -= 2 - for reps > 0 { - depth_histo[repeatZeroCodeLength]++ - - /* Add the 3 extra bits for the 17 code length code. */ - bits += 3 - - reps >>= 3 - } - } - } - } - - /* Add the estimated encoding cost of the code length code histogram. */ - bits += float64(18 + 2*max_depth) - - /* Add the entropy of the code length code histogram. */ - bits += bitsEntropy(depth_histo[:], codeLengthCodes) - } - - return bits -} - -func populationCostDistance(histogram *histogramDistance) float64 { - var data_size uint = histogramDataSizeDistance() - var count int = 0 - var s [5]uint - var bits float64 = 0.0 - var i uint - if histogram.total_count_ == 0 { - return kOneSymbolHistogramCost - } - - for i = 0; i < data_size; i++ { - if histogram.data_[i] > 0 { - s[count] = i - count++ - if count > 4 { - break - } - } - } - - if count == 1 { - return kOneSymbolHistogramCost - } - - if count == 2 { - return kTwoSymbolHistogramCost + float64(histogram.total_count_) - } - - if count == 3 { - var histo0 uint32 = histogram.data_[s[0]] - var histo1 uint32 = histogram.data_[s[1]] - var histo2 uint32 = histogram.data_[s[2]] - var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2)) - return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax) - } - - if count == 4 { - var histo [4]uint32 - var h23 uint32 - var histomax uint32 - for i = 0; i < 4; i++ { - histo[i] = histogram.data_[s[i]] - } - - /* Sort */ - for i = 0; i < 4; i++ { - var j uint - for j = i + 1; j < 4; j++ { - if histo[j] > histo[i] { - var tmp uint32 = histo[j] - histo[j] = histo[i] - histo[i] = tmp - } - } - } - - h23 = histo[2] + histo[3] - histomax = brotli_max_uint32_t(h23, histo[0]) - return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax) - } - { - var max_depth uint = 1 - var depth_histo = [codeLengthCodes]uint32{0} - /* In this loop we compute the entropy of the histogram and simultaneously - build a simplified histogram of the code length codes where we use the - zero repeat code 17, but we don't use the non-zero repeat code 16. */ - - var log2total float64 = fastLog2(histogram.total_count_) - for i = 0; i < data_size; { - if histogram.data_[i] > 0 { - var log2p float64 = log2total - fastLog2(uint(histogram.data_[i])) - /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) = - = log2(total_count) - log2(count(symbol)) */ - - var depth uint = uint(log2p + 0.5) - /* Approximate the bit depth by round(-log2(P(symbol))) */ - bits += float64(histogram.data_[i]) * log2p - - if depth > 15 { - depth = 15 - } - - if depth > max_depth { - max_depth = depth - } - - depth_histo[depth]++ - i++ - } else { - var reps uint32 = 1 - /* Compute the run length of zeros and add the appropriate number of 0 - and 17 code length codes to the code length code histogram. */ - - var k uint - for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ { - reps++ - } - - i += uint(reps) - if i == data_size { - /* Don't add any cost for the last zero run, since these are encoded - only implicitly. */ - break - } - - if reps < 3 { - depth_histo[0] += reps - } else { - reps -= 2 - for reps > 0 { - depth_histo[repeatZeroCodeLength]++ - - /* Add the 3 extra bits for the 17 code length code. */ - bits += 3 - - reps >>= 3 - } - } - } - } - - /* Add the estimated encoding cost of the code length code histogram. */ - bits += float64(18 + 2*max_depth) - - /* Add the entropy of the code length code histogram. */ - bits += bitsEntropy(depth_histo[:], codeLengthCodes) - } - - return bits -} diff --git a/vendor/github.com/andybalholm/brotli/bit_reader.go b/vendor/github.com/andybalholm/brotli/bit_reader.go deleted file mode 100644 index fba8687c69f..00000000000 --- a/vendor/github.com/andybalholm/brotli/bit_reader.go +++ /dev/null @@ -1,266 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Bit reading helpers */ - -const shortFillBitWindowRead = (8 >> 1) - -var kBitMask = [33]uint32{ - 0x00000000, - 0x00000001, - 0x00000003, - 0x00000007, - 0x0000000F, - 0x0000001F, - 0x0000003F, - 0x0000007F, - 0x000000FF, - 0x000001FF, - 0x000003FF, - 0x000007FF, - 0x00000FFF, - 0x00001FFF, - 0x00003FFF, - 0x00007FFF, - 0x0000FFFF, - 0x0001FFFF, - 0x0003FFFF, - 0x0007FFFF, - 0x000FFFFF, - 0x001FFFFF, - 0x003FFFFF, - 0x007FFFFF, - 0x00FFFFFF, - 0x01FFFFFF, - 0x03FFFFFF, - 0x07FFFFFF, - 0x0FFFFFFF, - 0x1FFFFFFF, - 0x3FFFFFFF, - 0x7FFFFFFF, - 0xFFFFFFFF, -} - -func bitMask(n uint32) uint32 { - return kBitMask[n] -} - -type bitReader struct { - val_ uint64 - bit_pos_ uint32 - input []byte - input_len uint - byte_pos uint -} - -type bitReaderState struct { - val_ uint64 - bit_pos_ uint32 - input []byte - input_len uint - byte_pos uint -} - -/* Initializes the BrotliBitReader fields. */ - -/* Ensures that accumulator is not empty. - May consume up to sizeof(brotli_reg_t) - 1 bytes of input. - Returns false if data is required but there is no input available. - For BROTLI_ALIGNED_READ this function also prepares bit reader for aligned - reading. */ -func bitReaderSaveState(from *bitReader, to *bitReaderState) { - to.val_ = from.val_ - to.bit_pos_ = from.bit_pos_ - to.input = from.input - to.input_len = from.input_len - to.byte_pos = from.byte_pos -} - -func bitReaderRestoreState(to *bitReader, from *bitReaderState) { - to.val_ = from.val_ - to.bit_pos_ = from.bit_pos_ - to.input = from.input - to.input_len = from.input_len - to.byte_pos = from.byte_pos -} - -func getAvailableBits(br *bitReader) uint32 { - return 64 - br.bit_pos_ -} - -/* Returns amount of unread bytes the bit reader still has buffered from the - BrotliInput, including whole bytes in br->val_. */ -func getRemainingBytes(br *bitReader) uint { - return uint(uint32(br.input_len-br.byte_pos) + (getAvailableBits(br) >> 3)) -} - -/* Checks if there is at least |num| bytes left in the input ring-buffer - (excluding the bits remaining in br->val_). */ -func checkInputAmount(br *bitReader, num uint) bool { - return br.input_len-br.byte_pos >= num -} - -/* Guarantees that there are at least |n_bits| + 1 bits in accumulator. - Precondition: accumulator contains at least 1 bit. - |n_bits| should be in the range [1..24] for regular build. For portable - non-64-bit little-endian build only 16 bits are safe to request. */ -func fillBitWindow(br *bitReader, n_bits uint32) { - if br.bit_pos_ >= 32 { - br.val_ >>= 32 - br.bit_pos_ ^= 32 /* here same as -= 32 because of the if condition */ - br.val_ |= (uint64(binary.LittleEndian.Uint32(br.input[br.byte_pos:]))) << 32 - br.byte_pos += 4 - } -} - -/* Mostly like BrotliFillBitWindow, but guarantees only 16 bits and reads no - more than BROTLI_SHORT_FILL_BIT_WINDOW_READ bytes of input. */ -func fillBitWindow16(br *bitReader) { - fillBitWindow(br, 17) -} - -/* Tries to pull one byte of input to accumulator. - Returns false if there is no input available. */ -func pullByte(br *bitReader) bool { - if br.byte_pos == br.input_len { - return false - } - - br.val_ >>= 8 - br.val_ |= (uint64(br.input[br.byte_pos])) << 56 - br.bit_pos_ -= 8 - br.byte_pos++ - return true -} - -/* Returns currently available bits. - The number of valid bits could be calculated by BrotliGetAvailableBits. */ -func getBitsUnmasked(br *bitReader) uint64 { - return br.val_ >> br.bit_pos_ -} - -/* Like BrotliGetBits, but does not mask the result. - The result contains at least 16 valid bits. */ -func get16BitsUnmasked(br *bitReader) uint32 { - fillBitWindow(br, 16) - return uint32(getBitsUnmasked(br)) -} - -/* Returns the specified number of bits from |br| without advancing bit - position. */ -func getBits(br *bitReader, n_bits uint32) uint32 { - fillBitWindow(br, n_bits) - return uint32(getBitsUnmasked(br)) & bitMask(n_bits) -} - -/* Tries to peek the specified amount of bits. Returns false, if there - is not enough input. */ -func safeGetBits(br *bitReader, n_bits uint32, val *uint32) bool { - for getAvailableBits(br) < n_bits { - if !pullByte(br) { - return false - } - } - - *val = uint32(getBitsUnmasked(br)) & bitMask(n_bits) - return true -} - -/* Advances the bit pos by |n_bits|. */ -func dropBits(br *bitReader, n_bits uint32) { - br.bit_pos_ += n_bits -} - -func bitReaderUnload(br *bitReader) { - var unused_bytes uint32 = getAvailableBits(br) >> 3 - var unused_bits uint32 = unused_bytes << 3 - br.byte_pos -= uint(unused_bytes) - if unused_bits == 64 { - br.val_ = 0 - } else { - br.val_ <<= unused_bits - } - - br.bit_pos_ += unused_bits -} - -/* Reads the specified number of bits from |br| and advances the bit pos. - Precondition: accumulator MUST contain at least |n_bits|. */ -func takeBits(br *bitReader, n_bits uint32, val *uint32) { - *val = uint32(getBitsUnmasked(br)) & bitMask(n_bits) - dropBits(br, n_bits) -} - -/* Reads the specified number of bits from |br| and advances the bit pos. - Assumes that there is enough input to perform BrotliFillBitWindow. */ -func readBits(br *bitReader, n_bits uint32) uint32 { - var val uint32 - fillBitWindow(br, n_bits) - takeBits(br, n_bits, &val) - return val -} - -/* Tries to read the specified amount of bits. Returns false, if there - is not enough input. |n_bits| MUST be positive. */ -func safeReadBits(br *bitReader, n_bits uint32, val *uint32) bool { - for getAvailableBits(br) < n_bits { - if !pullByte(br) { - return false - } - } - - takeBits(br, n_bits, val) - return true -} - -/* Advances the bit reader position to the next byte boundary and verifies - that any skipped bits are set to zero. */ -func bitReaderJumpToByteBoundary(br *bitReader) bool { - var pad_bits_count uint32 = getAvailableBits(br) & 0x7 - var pad_bits uint32 = 0 - if pad_bits_count != 0 { - takeBits(br, pad_bits_count, &pad_bits) - } - - return pad_bits == 0 -} - -/* Copies remaining input bytes stored in the bit reader to the output. Value - |num| may not be larger than BrotliGetRemainingBytes. The bit reader must be - warmed up again after this. */ -func copyBytes(dest []byte, br *bitReader, num uint) { - for getAvailableBits(br) >= 8 && num > 0 { - dest[0] = byte(getBitsUnmasked(br)) - dropBits(br, 8) - dest = dest[1:] - num-- - } - - copy(dest, br.input[br.byte_pos:][:num]) - br.byte_pos += num -} - -func initBitReader(br *bitReader) { - br.val_ = 0 - br.bit_pos_ = 64 -} - -func warmupBitReader(br *bitReader) bool { - /* Fixing alignment after unaligned BrotliFillWindow would result accumulator - overflow. If unalignment is caused by BrotliSafeReadBits, then there is - enough space in accumulator to fix alignment. */ - if getAvailableBits(br) == 0 { - if !pullByte(br) { - return false - } - } - - return true -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter.go b/vendor/github.com/andybalholm/brotli/block_splitter.go deleted file mode 100644 index 2ccff45a3ea..00000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter.go +++ /dev/null @@ -1,153 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Block split point selection utilities. */ - -type blockSplit struct { - num_types uint - num_blocks uint - types []byte - lengths []uint32 - types_alloc_size uint - lengths_alloc_size uint -} - -const ( - kMaxLiteralHistograms uint = 100 - kMaxCommandHistograms uint = 50 - kLiteralBlockSwitchCost float64 = 28.1 - kCommandBlockSwitchCost float64 = 13.5 - kDistanceBlockSwitchCost float64 = 14.6 - kLiteralStrideLength uint = 70 - kCommandStrideLength uint = 40 - kSymbolsPerLiteralHistogram uint = 544 - kSymbolsPerCommandHistogram uint = 530 - kSymbolsPerDistanceHistogram uint = 544 - kMinLengthForBlockSplitting uint = 128 - kIterMulForRefining uint = 2 - kMinItersForRefining uint = 100 -) - -func countLiterals(cmds []command, num_commands uint) uint { - var total_length uint = 0 - /* Count how many we have. */ - - var i uint - for i = 0; i < num_commands; i++ { - total_length += uint(cmds[i].insert_len_) - } - - return total_length -} - -func copyLiteralsToByteArray(cmds []command, num_commands uint, data []byte, offset uint, mask uint, literals []byte) { - var pos uint = 0 - var from_pos uint = offset & mask - var i uint - for i = 0; i < num_commands; i++ { - var insert_len uint = uint(cmds[i].insert_len_) - if from_pos+insert_len > mask { - var head_size uint = mask + 1 - from_pos - copy(literals[pos:], data[from_pos:][:head_size]) - from_pos = 0 - pos += head_size - insert_len -= head_size - } - - if insert_len > 0 { - copy(literals[pos:], data[from_pos:][:insert_len]) - pos += insert_len - } - - from_pos = uint((uint32(from_pos+insert_len) + commandCopyLen(&cmds[i])) & uint32(mask)) - } -} - -func myRand(seed *uint32) uint32 { - /* Initial seed should be 7. In this case, loop length is (1 << 29). */ - *seed *= 16807 - - return *seed -} - -func bitCost(count uint) float64 { - if count == 0 { - return -2.0 - } else { - return fastLog2(count) - } -} - -const histogramsPerBatch = 64 - -const clustersPerBatch = 16 - -func initBlockSplit(self *blockSplit) { - self.num_types = 0 - self.num_blocks = 0 - self.types = nil - self.lengths = nil - self.types_alloc_size = 0 - self.lengths_alloc_size = 0 -} - -func destroyBlockSplit(self *blockSplit) { - self.types = nil - self.lengths = nil -} - -func splitBlock(cmds []command, num_commands uint, data []byte, pos uint, mask uint, params *encoderParams, literal_split *blockSplit, insert_and_copy_split *blockSplit, dist_split *blockSplit) { - { - var literals_count uint = countLiterals(cmds, num_commands) - var literals []byte = make([]byte, literals_count) - - /* Create a continuous array of literals. */ - copyLiteralsToByteArray(cmds, num_commands, data, pos, mask, literals) - - /* Create the block split on the array of literals. - Literal histograms have alphabet size 256. */ - splitByteVectorLiteral(literals, literals_count, kSymbolsPerLiteralHistogram, kMaxLiteralHistograms, kLiteralStrideLength, kLiteralBlockSwitchCost, params, literal_split) - - literals = nil - } - { - var insert_and_copy_codes []uint16 = make([]uint16, num_commands) - /* Compute prefix codes for commands. */ - - var i uint - for i = 0; i < num_commands; i++ { - insert_and_copy_codes[i] = cmds[i].cmd_prefix_ - } - - /* Create the block split on the array of command prefixes. */ - splitByteVectorCommand(insert_and_copy_codes, num_commands, kSymbolsPerCommandHistogram, kMaxCommandHistograms, kCommandStrideLength, kCommandBlockSwitchCost, params, insert_and_copy_split) - - /* TODO: reuse for distances? */ - - insert_and_copy_codes = nil - } - { - var distance_prefixes []uint16 = make([]uint16, num_commands) - var j uint = 0 - /* Create a continuous array of distance prefixes. */ - - var i uint - for i = 0; i < num_commands; i++ { - var cmd *command = &cmds[i] - if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 { - distance_prefixes[j] = cmd.dist_prefix_ & 0x3FF - j++ - } - } - - /* Create the block split on the array of distance prefixes. */ - splitByteVectorDistance(distance_prefixes, j, kSymbolsPerDistanceHistogram, kMaxCommandHistograms, kCommandStrideLength, kDistanceBlockSwitchCost, params, dist_split) - - distance_prefixes = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_command.go b/vendor/github.com/andybalholm/brotli/block_splitter_command.go deleted file mode 100644 index e505fe13e73..00000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter_command.go +++ /dev/null @@ -1,433 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func initialEntropyCodesCommand(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramCommand) { - var seed uint32 = 7 - var block_length uint = length / num_histograms - var i uint - clearHistogramsCommand(histograms, num_histograms) - for i = 0; i < num_histograms; i++ { - var pos uint = length * i / num_histograms - if i != 0 { - pos += uint(myRand(&seed) % uint32(block_length)) - } - - if pos+stride >= length { - pos = length - stride - 1 - } - - histogramAddVectorCommand(&histograms[i], data[pos:], stride) - } -} - -func randomSampleCommand(seed *uint32, data []uint16, length uint, stride uint, sample *histogramCommand) { - var pos uint = 0 - if stride >= length { - stride = length - } else { - pos = uint(myRand(seed) % uint32(length-stride+1)) - } - - histogramAddVectorCommand(sample, data[pos:], stride) -} - -func refineEntropyCodesCommand(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramCommand) { - var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining - var seed uint32 = 7 - var iter uint - iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms - for iter = 0; iter < iters; iter++ { - var sample histogramCommand - histogramClearCommand(&sample) - randomSampleCommand(&seed, data, length, stride, &sample) - histogramAddHistogramCommand(&histograms[iter%num_histograms], &sample) - } -} - -/* Assigns a block id from the range [0, num_histograms) to each data element - in data[0..length) and fills in block_id[0..length) with the assigned values. - Returns the number of blocks, i.e. one plus the number of block switches. */ -func findBlocksCommand(data []uint16, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramCommand, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint { - var data_size uint = histogramDataSizeCommand() - var bitmaplen uint = (num_histograms + 7) >> 3 - var num_blocks uint = 1 - var i uint - var j uint - assert(num_histograms <= 256) - if num_histograms <= 1 { - for i = 0; i < length; i++ { - block_id[i] = 0 - } - - return 1 - } - - for i := 0; i < int(data_size*num_histograms); i++ { - insert_cost[i] = 0 - } - for i = 0; i < num_histograms; i++ { - insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_))) - } - - for i = data_size; i != 0; { - i-- - for j = 0; j < num_histograms; j++ { - insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i])) - } - } - - for i := 0; i < int(num_histograms); i++ { - cost[i] = 0 - } - for i := 0; i < int(length*bitmaplen); i++ { - switch_signal[i] = 0 - } - - /* After each iteration of this loop, cost[k] will contain the difference - between the minimum cost of arriving at the current byte position using - entropy code k, and the minimum cost of arriving at the current byte - position. This difference is capped at the block switch cost, and if it - reaches block switch cost, it means that when we trace back from the last - position, we need to switch here. */ - for i = 0; i < length; i++ { - var byte_ix uint = i - var ix uint = byte_ix * bitmaplen - var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms - var min_cost float64 = 1e99 - var block_switch_cost float64 = block_switch_bitcost - var k uint - for k = 0; k < num_histograms; k++ { - /* We are coding the symbol in data[byte_ix] with entropy code k. */ - cost[k] += insert_cost[insert_cost_ix+k] - - if cost[k] < min_cost { - min_cost = cost[k] - block_id[byte_ix] = byte(k) - } - } - - /* More blocks for the beginning. */ - if byte_ix < 2000 { - block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000 - } - - for k = 0; k < num_histograms; k++ { - cost[k] -= min_cost - if cost[k] >= block_switch_cost { - var mask byte = byte(1 << (k & 7)) - cost[k] = block_switch_cost - assert(k>>3 < bitmaplen) - switch_signal[ix+(k>>3)] |= mask - /* Trace back from the last position and switch at the marked places. */ - } - } - } - { - var byte_ix uint = length - 1 - var ix uint = byte_ix * bitmaplen - var cur_id byte = block_id[byte_ix] - for byte_ix > 0 { - var mask byte = byte(1 << (cur_id & 7)) - assert(uint(cur_id)>>3 < bitmaplen) - byte_ix-- - ix -= bitmaplen - if switch_signal[ix+uint(cur_id>>3)]&mask != 0 { - if cur_id != block_id[byte_ix] { - cur_id = block_id[byte_ix] - num_blocks++ - } - } - - block_id[byte_ix] = cur_id - } - } - - return num_blocks -} - -var remapBlockIdsCommand_kInvalidId uint16 = 256 - -func remapBlockIdsCommand(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint { - var next_id uint16 = 0 - var i uint - for i = 0; i < num_histograms; i++ { - new_id[i] = remapBlockIdsCommand_kInvalidId - } - - for i = 0; i < length; i++ { - assert(uint(block_ids[i]) < num_histograms) - if new_id[block_ids[i]] == remapBlockIdsCommand_kInvalidId { - new_id[block_ids[i]] = next_id - next_id++ - } - } - - for i = 0; i < length; i++ { - block_ids[i] = byte(new_id[block_ids[i]]) - assert(uint(block_ids[i]) < num_histograms) - } - - assert(uint(next_id) <= num_histograms) - return uint(next_id) -} - -func buildBlockHistogramsCommand(data []uint16, length uint, block_ids []byte, num_histograms uint, histograms []histogramCommand) { - var i uint - clearHistogramsCommand(histograms, num_histograms) - for i = 0; i < length; i++ { - histogramAddCommand(&histograms[block_ids[i]], uint(data[i])) - } -} - -var clusterBlocksCommand_kInvalidIndex uint32 = math.MaxUint32 - -func clusterBlocksCommand(data []uint16, length uint, num_blocks uint, block_ids []byte, split *blockSplit) { - var histogram_symbols []uint32 = make([]uint32, num_blocks) - var block_lengths []uint32 = make([]uint32, num_blocks) - var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch - var all_histograms_size uint = 0 - var all_histograms_capacity uint = expected_num_clusters - var all_histograms []histogramCommand = make([]histogramCommand, all_histograms_capacity) - var cluster_size_size uint = 0 - var cluster_size_capacity uint = expected_num_clusters - var cluster_size []uint32 = make([]uint32, cluster_size_capacity) - var num_clusters uint = 0 - var histograms []histogramCommand = make([]histogramCommand, brotli_min_size_t(num_blocks, histogramsPerBatch)) - var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2 - var pairs_capacity uint = max_num_pairs + 1 - var pairs []histogramPair = make([]histogramPair, pairs_capacity) - var pos uint = 0 - var clusters []uint32 - var num_final_clusters uint - var new_index []uint32 - var i uint - var sizes = [histogramsPerBatch]uint32{0} - var new_clusters = [histogramsPerBatch]uint32{0} - var symbols = [histogramsPerBatch]uint32{0} - var remap = [histogramsPerBatch]uint32{0} - - for i := 0; i < int(num_blocks); i++ { - block_lengths[i] = 0 - } - { - var block_idx uint = 0 - for i = 0; i < length; i++ { - assert(block_idx < num_blocks) - block_lengths[block_idx]++ - if i+1 == length || block_ids[i] != block_ids[i+1] { - block_idx++ - } - } - - assert(block_idx == num_blocks) - } - - for i = 0; i < num_blocks; i += histogramsPerBatch { - var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - var k uint - histogramClearCommand(&histograms[j]) - for k = 0; uint32(k) < block_lengths[i+j]; k++ { - histogramAddCommand(&histograms[j], uint(data[pos])) - pos++ - } - - histograms[j].bit_cost_ = populationCostCommand(&histograms[j]) - new_clusters[j] = uint32(j) - symbols[j] = uint32(j) - sizes[j] = 1 - } - - num_new_clusters = histogramCombineCommand(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs) - if all_histograms_capacity < (all_histograms_size + num_new_clusters) { - var _new_size uint - if all_histograms_capacity == 0 { - _new_size = all_histograms_size + num_new_clusters - } else { - _new_size = all_histograms_capacity - } - var new_array []histogramCommand - for _new_size < (all_histograms_size + num_new_clusters) { - _new_size *= 2 - } - new_array = make([]histogramCommand, _new_size) - if all_histograms_capacity != 0 { - copy(new_array, all_histograms[:all_histograms_capacity]) - } - - all_histograms = new_array - all_histograms_capacity = _new_size - } - - brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters) - for j = 0; j < num_new_clusters; j++ { - all_histograms[all_histograms_size] = histograms[new_clusters[j]] - all_histograms_size++ - cluster_size[cluster_size_size] = sizes[new_clusters[j]] - cluster_size_size++ - remap[new_clusters[j]] = uint32(j) - } - - for j = 0; j < num_to_combine; j++ { - histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]] - } - - num_clusters += num_new_clusters - assert(num_clusters == cluster_size_size) - assert(num_clusters == all_histograms_size) - } - - histograms = nil - - max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < max_num_pairs+1 { - pairs = nil - pairs = make([]histogramPair, (max_num_pairs + 1)) - } - - clusters = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - clusters[i] = uint32(i) - } - - num_final_clusters = histogramCombineCommand(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs) - pairs = nil - cluster_size = nil - - new_index = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - new_index[i] = clusterBlocksCommand_kInvalidIndex - } - pos = 0 - { - var next_index uint32 = 0 - for i = 0; i < num_blocks; i++ { - var histo histogramCommand - var j uint - var best_out uint32 - var best_bits float64 - histogramClearCommand(&histo) - for j = 0; uint32(j) < block_lengths[i]; j++ { - histogramAddCommand(&histo, uint(data[pos])) - pos++ - } - - if i == 0 { - best_out = histogram_symbols[0] - } else { - best_out = histogram_symbols[i-1] - } - best_bits = histogramBitCostDistanceCommand(&histo, &all_histograms[best_out]) - for j = 0; j < num_final_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceCommand(&histo, &all_histograms[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - histogram_symbols[i] = best_out - if new_index[best_out] == clusterBlocksCommand_kInvalidIndex { - new_index[best_out] = next_index - next_index++ - } - } - } - - clusters = nil - all_histograms = nil - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks) - { - var cur_length uint32 = 0 - var block_idx uint = 0 - var max_type byte = 0 - for i = 0; i < num_blocks; i++ { - cur_length += block_lengths[i] - if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] { - var id byte = byte(new_index[histogram_symbols[i]]) - split.types[block_idx] = id - split.lengths[block_idx] = cur_length - max_type = brotli_max_uint8_t(max_type, id) - cur_length = 0 - block_idx++ - } - } - - split.num_blocks = block_idx - split.num_types = uint(max_type) + 1 - } - - new_index = nil - block_lengths = nil - histogram_symbols = nil -} - -func splitByteVectorCommand(data []uint16, length uint, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) { - var data_size uint = histogramDataSizeCommand() - var num_histograms uint = length/literals_per_histogram + 1 - var histograms []histogramCommand - if num_histograms > max_histograms { - num_histograms = max_histograms - } - - if length == 0 { - split.num_types = 1 - return - } else if length < kMinLengthForBlockSplitting { - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1) - split.num_types = 1 - split.types[split.num_blocks] = 0 - split.lengths[split.num_blocks] = uint32(length) - split.num_blocks++ - return - } - - histograms = make([]histogramCommand, num_histograms) - - /* Find good entropy codes. */ - initialEntropyCodesCommand(data, length, sampling_stride_length, num_histograms, histograms) - - refineEntropyCodesCommand(data, length, sampling_stride_length, num_histograms, histograms) - { - var block_ids []byte = make([]byte, length) - var num_blocks uint = 0 - var bitmaplen uint = (num_histograms + 7) >> 3 - var insert_cost []float64 = make([]float64, (data_size * num_histograms)) - var cost []float64 = make([]float64, num_histograms) - var switch_signal []byte = make([]byte, (length * bitmaplen)) - var new_id []uint16 = make([]uint16, num_histograms) - var iters uint - if params.quality < hqZopflificationQuality { - iters = 3 - } else { - iters = 10 - } - /* Find a good path through literals with the good entropy codes. */ - - var i uint - for i = 0; i < iters; i++ { - num_blocks = findBlocksCommand(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids) - num_histograms = remapBlockIdsCommand(block_ids, length, new_id, num_histograms) - buildBlockHistogramsCommand(data, length, block_ids, num_histograms, histograms) - } - - insert_cost = nil - cost = nil - switch_signal = nil - new_id = nil - histograms = nil - clusterBlocksCommand(data, length, num_blocks, block_ids, split) - block_ids = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_distance.go b/vendor/github.com/andybalholm/brotli/block_splitter_distance.go deleted file mode 100644 index 953530d518e..00000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter_distance.go +++ /dev/null @@ -1,433 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func initialEntropyCodesDistance(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramDistance) { - var seed uint32 = 7 - var block_length uint = length / num_histograms - var i uint - clearHistogramsDistance(histograms, num_histograms) - for i = 0; i < num_histograms; i++ { - var pos uint = length * i / num_histograms - if i != 0 { - pos += uint(myRand(&seed) % uint32(block_length)) - } - - if pos+stride >= length { - pos = length - stride - 1 - } - - histogramAddVectorDistance(&histograms[i], data[pos:], stride) - } -} - -func randomSampleDistance(seed *uint32, data []uint16, length uint, stride uint, sample *histogramDistance) { - var pos uint = 0 - if stride >= length { - stride = length - } else { - pos = uint(myRand(seed) % uint32(length-stride+1)) - } - - histogramAddVectorDistance(sample, data[pos:], stride) -} - -func refineEntropyCodesDistance(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramDistance) { - var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining - var seed uint32 = 7 - var iter uint - iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms - for iter = 0; iter < iters; iter++ { - var sample histogramDistance - histogramClearDistance(&sample) - randomSampleDistance(&seed, data, length, stride, &sample) - histogramAddHistogramDistance(&histograms[iter%num_histograms], &sample) - } -} - -/* Assigns a block id from the range [0, num_histograms) to each data element - in data[0..length) and fills in block_id[0..length) with the assigned values. - Returns the number of blocks, i.e. one plus the number of block switches. */ -func findBlocksDistance(data []uint16, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramDistance, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint { - var data_size uint = histogramDataSizeDistance() - var bitmaplen uint = (num_histograms + 7) >> 3 - var num_blocks uint = 1 - var i uint - var j uint - assert(num_histograms <= 256) - if num_histograms <= 1 { - for i = 0; i < length; i++ { - block_id[i] = 0 - } - - return 1 - } - - for i := 0; i < int(data_size*num_histograms); i++ { - insert_cost[i] = 0 - } - for i = 0; i < num_histograms; i++ { - insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_))) - } - - for i = data_size; i != 0; { - i-- - for j = 0; j < num_histograms; j++ { - insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i])) - } - } - - for i := 0; i < int(num_histograms); i++ { - cost[i] = 0 - } - for i := 0; i < int(length*bitmaplen); i++ { - switch_signal[i] = 0 - } - - /* After each iteration of this loop, cost[k] will contain the difference - between the minimum cost of arriving at the current byte position using - entropy code k, and the minimum cost of arriving at the current byte - position. This difference is capped at the block switch cost, and if it - reaches block switch cost, it means that when we trace back from the last - position, we need to switch here. */ - for i = 0; i < length; i++ { - var byte_ix uint = i - var ix uint = byte_ix * bitmaplen - var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms - var min_cost float64 = 1e99 - var block_switch_cost float64 = block_switch_bitcost - var k uint - for k = 0; k < num_histograms; k++ { - /* We are coding the symbol in data[byte_ix] with entropy code k. */ - cost[k] += insert_cost[insert_cost_ix+k] - - if cost[k] < min_cost { - min_cost = cost[k] - block_id[byte_ix] = byte(k) - } - } - - /* More blocks for the beginning. */ - if byte_ix < 2000 { - block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000 - } - - for k = 0; k < num_histograms; k++ { - cost[k] -= min_cost - if cost[k] >= block_switch_cost { - var mask byte = byte(1 << (k & 7)) - cost[k] = block_switch_cost - assert(k>>3 < bitmaplen) - switch_signal[ix+(k>>3)] |= mask - /* Trace back from the last position and switch at the marked places. */ - } - } - } - { - var byte_ix uint = length - 1 - var ix uint = byte_ix * bitmaplen - var cur_id byte = block_id[byte_ix] - for byte_ix > 0 { - var mask byte = byte(1 << (cur_id & 7)) - assert(uint(cur_id)>>3 < bitmaplen) - byte_ix-- - ix -= bitmaplen - if switch_signal[ix+uint(cur_id>>3)]&mask != 0 { - if cur_id != block_id[byte_ix] { - cur_id = block_id[byte_ix] - num_blocks++ - } - } - - block_id[byte_ix] = cur_id - } - } - - return num_blocks -} - -var remapBlockIdsDistance_kInvalidId uint16 = 256 - -func remapBlockIdsDistance(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint { - var next_id uint16 = 0 - var i uint - for i = 0; i < num_histograms; i++ { - new_id[i] = remapBlockIdsDistance_kInvalidId - } - - for i = 0; i < length; i++ { - assert(uint(block_ids[i]) < num_histograms) - if new_id[block_ids[i]] == remapBlockIdsDistance_kInvalidId { - new_id[block_ids[i]] = next_id - next_id++ - } - } - - for i = 0; i < length; i++ { - block_ids[i] = byte(new_id[block_ids[i]]) - assert(uint(block_ids[i]) < num_histograms) - } - - assert(uint(next_id) <= num_histograms) - return uint(next_id) -} - -func buildBlockHistogramsDistance(data []uint16, length uint, block_ids []byte, num_histograms uint, histograms []histogramDistance) { - var i uint - clearHistogramsDistance(histograms, num_histograms) - for i = 0; i < length; i++ { - histogramAddDistance(&histograms[block_ids[i]], uint(data[i])) - } -} - -var clusterBlocksDistance_kInvalidIndex uint32 = math.MaxUint32 - -func clusterBlocksDistance(data []uint16, length uint, num_blocks uint, block_ids []byte, split *blockSplit) { - var histogram_symbols []uint32 = make([]uint32, num_blocks) - var block_lengths []uint32 = make([]uint32, num_blocks) - var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch - var all_histograms_size uint = 0 - var all_histograms_capacity uint = expected_num_clusters - var all_histograms []histogramDistance = make([]histogramDistance, all_histograms_capacity) - var cluster_size_size uint = 0 - var cluster_size_capacity uint = expected_num_clusters - var cluster_size []uint32 = make([]uint32, cluster_size_capacity) - var num_clusters uint = 0 - var histograms []histogramDistance = make([]histogramDistance, brotli_min_size_t(num_blocks, histogramsPerBatch)) - var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2 - var pairs_capacity uint = max_num_pairs + 1 - var pairs []histogramPair = make([]histogramPair, pairs_capacity) - var pos uint = 0 - var clusters []uint32 - var num_final_clusters uint - var new_index []uint32 - var i uint - var sizes = [histogramsPerBatch]uint32{0} - var new_clusters = [histogramsPerBatch]uint32{0} - var symbols = [histogramsPerBatch]uint32{0} - var remap = [histogramsPerBatch]uint32{0} - - for i := 0; i < int(num_blocks); i++ { - block_lengths[i] = 0 - } - { - var block_idx uint = 0 - for i = 0; i < length; i++ { - assert(block_idx < num_blocks) - block_lengths[block_idx]++ - if i+1 == length || block_ids[i] != block_ids[i+1] { - block_idx++ - } - } - - assert(block_idx == num_blocks) - } - - for i = 0; i < num_blocks; i += histogramsPerBatch { - var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - var k uint - histogramClearDistance(&histograms[j]) - for k = 0; uint32(k) < block_lengths[i+j]; k++ { - histogramAddDistance(&histograms[j], uint(data[pos])) - pos++ - } - - histograms[j].bit_cost_ = populationCostDistance(&histograms[j]) - new_clusters[j] = uint32(j) - symbols[j] = uint32(j) - sizes[j] = 1 - } - - num_new_clusters = histogramCombineDistance(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs) - if all_histograms_capacity < (all_histograms_size + num_new_clusters) { - var _new_size uint - if all_histograms_capacity == 0 { - _new_size = all_histograms_size + num_new_clusters - } else { - _new_size = all_histograms_capacity - } - var new_array []histogramDistance - for _new_size < (all_histograms_size + num_new_clusters) { - _new_size *= 2 - } - new_array = make([]histogramDistance, _new_size) - if all_histograms_capacity != 0 { - copy(new_array, all_histograms[:all_histograms_capacity]) - } - - all_histograms = new_array - all_histograms_capacity = _new_size - } - - brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters) - for j = 0; j < num_new_clusters; j++ { - all_histograms[all_histograms_size] = histograms[new_clusters[j]] - all_histograms_size++ - cluster_size[cluster_size_size] = sizes[new_clusters[j]] - cluster_size_size++ - remap[new_clusters[j]] = uint32(j) - } - - for j = 0; j < num_to_combine; j++ { - histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]] - } - - num_clusters += num_new_clusters - assert(num_clusters == cluster_size_size) - assert(num_clusters == all_histograms_size) - } - - histograms = nil - - max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < max_num_pairs+1 { - pairs = nil - pairs = make([]histogramPair, (max_num_pairs + 1)) - } - - clusters = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - clusters[i] = uint32(i) - } - - num_final_clusters = histogramCombineDistance(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs) - pairs = nil - cluster_size = nil - - new_index = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - new_index[i] = clusterBlocksDistance_kInvalidIndex - } - pos = 0 - { - var next_index uint32 = 0 - for i = 0; i < num_blocks; i++ { - var histo histogramDistance - var j uint - var best_out uint32 - var best_bits float64 - histogramClearDistance(&histo) - for j = 0; uint32(j) < block_lengths[i]; j++ { - histogramAddDistance(&histo, uint(data[pos])) - pos++ - } - - if i == 0 { - best_out = histogram_symbols[0] - } else { - best_out = histogram_symbols[i-1] - } - best_bits = histogramBitCostDistanceDistance(&histo, &all_histograms[best_out]) - for j = 0; j < num_final_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceDistance(&histo, &all_histograms[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - histogram_symbols[i] = best_out - if new_index[best_out] == clusterBlocksDistance_kInvalidIndex { - new_index[best_out] = next_index - next_index++ - } - } - } - - clusters = nil - all_histograms = nil - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks) - { - var cur_length uint32 = 0 - var block_idx uint = 0 - var max_type byte = 0 - for i = 0; i < num_blocks; i++ { - cur_length += block_lengths[i] - if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] { - var id byte = byte(new_index[histogram_symbols[i]]) - split.types[block_idx] = id - split.lengths[block_idx] = cur_length - max_type = brotli_max_uint8_t(max_type, id) - cur_length = 0 - block_idx++ - } - } - - split.num_blocks = block_idx - split.num_types = uint(max_type) + 1 - } - - new_index = nil - block_lengths = nil - histogram_symbols = nil -} - -func splitByteVectorDistance(data []uint16, length uint, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) { - var data_size uint = histogramDataSizeDistance() - var num_histograms uint = length/literals_per_histogram + 1 - var histograms []histogramDistance - if num_histograms > max_histograms { - num_histograms = max_histograms - } - - if length == 0 { - split.num_types = 1 - return - } else if length < kMinLengthForBlockSplitting { - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1) - split.num_types = 1 - split.types[split.num_blocks] = 0 - split.lengths[split.num_blocks] = uint32(length) - split.num_blocks++ - return - } - - histograms = make([]histogramDistance, num_histograms) - - /* Find good entropy codes. */ - initialEntropyCodesDistance(data, length, sampling_stride_length, num_histograms, histograms) - - refineEntropyCodesDistance(data, length, sampling_stride_length, num_histograms, histograms) - { - var block_ids []byte = make([]byte, length) - var num_blocks uint = 0 - var bitmaplen uint = (num_histograms + 7) >> 3 - var insert_cost []float64 = make([]float64, (data_size * num_histograms)) - var cost []float64 = make([]float64, num_histograms) - var switch_signal []byte = make([]byte, (length * bitmaplen)) - var new_id []uint16 = make([]uint16, num_histograms) - var iters uint - if params.quality < hqZopflificationQuality { - iters = 3 - } else { - iters = 10 - } - /* Find a good path through literals with the good entropy codes. */ - - var i uint - for i = 0; i < iters; i++ { - num_blocks = findBlocksDistance(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids) - num_histograms = remapBlockIdsDistance(block_ids, length, new_id, num_histograms) - buildBlockHistogramsDistance(data, length, block_ids, num_histograms, histograms) - } - - insert_cost = nil - cost = nil - switch_signal = nil - new_id = nil - histograms = nil - clusterBlocksDistance(data, length, num_blocks, block_ids, split) - block_ids = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_literal.go b/vendor/github.com/andybalholm/brotli/block_splitter_literal.go deleted file mode 100644 index 1c895cf3889..00000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter_literal.go +++ /dev/null @@ -1,433 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func initialEntropyCodesLiteral(data []byte, length uint, stride uint, num_histograms uint, histograms []histogramLiteral) { - var seed uint32 = 7 - var block_length uint = length / num_histograms - var i uint - clearHistogramsLiteral(histograms, num_histograms) - for i = 0; i < num_histograms; i++ { - var pos uint = length * i / num_histograms - if i != 0 { - pos += uint(myRand(&seed) % uint32(block_length)) - } - - if pos+stride >= length { - pos = length - stride - 1 - } - - histogramAddVectorLiteral(&histograms[i], data[pos:], stride) - } -} - -func randomSampleLiteral(seed *uint32, data []byte, length uint, stride uint, sample *histogramLiteral) { - var pos uint = 0 - if stride >= length { - stride = length - } else { - pos = uint(myRand(seed) % uint32(length-stride+1)) - } - - histogramAddVectorLiteral(sample, data[pos:], stride) -} - -func refineEntropyCodesLiteral(data []byte, length uint, stride uint, num_histograms uint, histograms []histogramLiteral) { - var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining - var seed uint32 = 7 - var iter uint - iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms - for iter = 0; iter < iters; iter++ { - var sample histogramLiteral - histogramClearLiteral(&sample) - randomSampleLiteral(&seed, data, length, stride, &sample) - histogramAddHistogramLiteral(&histograms[iter%num_histograms], &sample) - } -} - -/* Assigns a block id from the range [0, num_histograms) to each data element - in data[0..length) and fills in block_id[0..length) with the assigned values. - Returns the number of blocks, i.e. one plus the number of block switches. */ -func findBlocksLiteral(data []byte, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramLiteral, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint { - var data_size uint = histogramDataSizeLiteral() - var bitmaplen uint = (num_histograms + 7) >> 3 - var num_blocks uint = 1 - var i uint - var j uint - assert(num_histograms <= 256) - if num_histograms <= 1 { - for i = 0; i < length; i++ { - block_id[i] = 0 - } - - return 1 - } - - for i := 0; i < int(data_size*num_histograms); i++ { - insert_cost[i] = 0 - } - for i = 0; i < num_histograms; i++ { - insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_))) - } - - for i = data_size; i != 0; { - i-- - for j = 0; j < num_histograms; j++ { - insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i])) - } - } - - for i := 0; i < int(num_histograms); i++ { - cost[i] = 0 - } - for i := 0; i < int(length*bitmaplen); i++ { - switch_signal[i] = 0 - } - - /* After each iteration of this loop, cost[k] will contain the difference - between the minimum cost of arriving at the current byte position using - entropy code k, and the minimum cost of arriving at the current byte - position. This difference is capped at the block switch cost, and if it - reaches block switch cost, it means that when we trace back from the last - position, we need to switch here. */ - for i = 0; i < length; i++ { - var byte_ix uint = i - var ix uint = byte_ix * bitmaplen - var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms - var min_cost float64 = 1e99 - var block_switch_cost float64 = block_switch_bitcost - var k uint - for k = 0; k < num_histograms; k++ { - /* We are coding the symbol in data[byte_ix] with entropy code k. */ - cost[k] += insert_cost[insert_cost_ix+k] - - if cost[k] < min_cost { - min_cost = cost[k] - block_id[byte_ix] = byte(k) - } - } - - /* More blocks for the beginning. */ - if byte_ix < 2000 { - block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000 - } - - for k = 0; k < num_histograms; k++ { - cost[k] -= min_cost - if cost[k] >= block_switch_cost { - var mask byte = byte(1 << (k & 7)) - cost[k] = block_switch_cost - assert(k>>3 < bitmaplen) - switch_signal[ix+(k>>3)] |= mask - /* Trace back from the last position and switch at the marked places. */ - } - } - } - { - var byte_ix uint = length - 1 - var ix uint = byte_ix * bitmaplen - var cur_id byte = block_id[byte_ix] - for byte_ix > 0 { - var mask byte = byte(1 << (cur_id & 7)) - assert(uint(cur_id)>>3 < bitmaplen) - byte_ix-- - ix -= bitmaplen - if switch_signal[ix+uint(cur_id>>3)]&mask != 0 { - if cur_id != block_id[byte_ix] { - cur_id = block_id[byte_ix] - num_blocks++ - } - } - - block_id[byte_ix] = cur_id - } - } - - return num_blocks -} - -var remapBlockIdsLiteral_kInvalidId uint16 = 256 - -func remapBlockIdsLiteral(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint { - var next_id uint16 = 0 - var i uint - for i = 0; i < num_histograms; i++ { - new_id[i] = remapBlockIdsLiteral_kInvalidId - } - - for i = 0; i < length; i++ { - assert(uint(block_ids[i]) < num_histograms) - if new_id[block_ids[i]] == remapBlockIdsLiteral_kInvalidId { - new_id[block_ids[i]] = next_id - next_id++ - } - } - - for i = 0; i < length; i++ { - block_ids[i] = byte(new_id[block_ids[i]]) - assert(uint(block_ids[i]) < num_histograms) - } - - assert(uint(next_id) <= num_histograms) - return uint(next_id) -} - -func buildBlockHistogramsLiteral(data []byte, length uint, block_ids []byte, num_histograms uint, histograms []histogramLiteral) { - var i uint - clearHistogramsLiteral(histograms, num_histograms) - for i = 0; i < length; i++ { - histogramAddLiteral(&histograms[block_ids[i]], uint(data[i])) - } -} - -var clusterBlocksLiteral_kInvalidIndex uint32 = math.MaxUint32 - -func clusterBlocksLiteral(data []byte, length uint, num_blocks uint, block_ids []byte, split *blockSplit) { - var histogram_symbols []uint32 = make([]uint32, num_blocks) - var block_lengths []uint32 = make([]uint32, num_blocks) - var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch - var all_histograms_size uint = 0 - var all_histograms_capacity uint = expected_num_clusters - var all_histograms []histogramLiteral = make([]histogramLiteral, all_histograms_capacity) - var cluster_size_size uint = 0 - var cluster_size_capacity uint = expected_num_clusters - var cluster_size []uint32 = make([]uint32, cluster_size_capacity) - var num_clusters uint = 0 - var histograms []histogramLiteral = make([]histogramLiteral, brotli_min_size_t(num_blocks, histogramsPerBatch)) - var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2 - var pairs_capacity uint = max_num_pairs + 1 - var pairs []histogramPair = make([]histogramPair, pairs_capacity) - var pos uint = 0 - var clusters []uint32 - var num_final_clusters uint - var new_index []uint32 - var i uint - var sizes = [histogramsPerBatch]uint32{0} - var new_clusters = [histogramsPerBatch]uint32{0} - var symbols = [histogramsPerBatch]uint32{0} - var remap = [histogramsPerBatch]uint32{0} - - for i := 0; i < int(num_blocks); i++ { - block_lengths[i] = 0 - } - { - var block_idx uint = 0 - for i = 0; i < length; i++ { - assert(block_idx < num_blocks) - block_lengths[block_idx]++ - if i+1 == length || block_ids[i] != block_ids[i+1] { - block_idx++ - } - } - - assert(block_idx == num_blocks) - } - - for i = 0; i < num_blocks; i += histogramsPerBatch { - var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - var k uint - histogramClearLiteral(&histograms[j]) - for k = 0; uint32(k) < block_lengths[i+j]; k++ { - histogramAddLiteral(&histograms[j], uint(data[pos])) - pos++ - } - - histograms[j].bit_cost_ = populationCostLiteral(&histograms[j]) - new_clusters[j] = uint32(j) - symbols[j] = uint32(j) - sizes[j] = 1 - } - - num_new_clusters = histogramCombineLiteral(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs) - if all_histograms_capacity < (all_histograms_size + num_new_clusters) { - var _new_size uint - if all_histograms_capacity == 0 { - _new_size = all_histograms_size + num_new_clusters - } else { - _new_size = all_histograms_capacity - } - var new_array []histogramLiteral - for _new_size < (all_histograms_size + num_new_clusters) { - _new_size *= 2 - } - new_array = make([]histogramLiteral, _new_size) - if all_histograms_capacity != 0 { - copy(new_array, all_histograms[:all_histograms_capacity]) - } - - all_histograms = new_array - all_histograms_capacity = _new_size - } - - brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters) - for j = 0; j < num_new_clusters; j++ { - all_histograms[all_histograms_size] = histograms[new_clusters[j]] - all_histograms_size++ - cluster_size[cluster_size_size] = sizes[new_clusters[j]] - cluster_size_size++ - remap[new_clusters[j]] = uint32(j) - } - - for j = 0; j < num_to_combine; j++ { - histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]] - } - - num_clusters += num_new_clusters - assert(num_clusters == cluster_size_size) - assert(num_clusters == all_histograms_size) - } - - histograms = nil - - max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < max_num_pairs+1 { - pairs = nil - pairs = make([]histogramPair, (max_num_pairs + 1)) - } - - clusters = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - clusters[i] = uint32(i) - } - - num_final_clusters = histogramCombineLiteral(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs) - pairs = nil - cluster_size = nil - - new_index = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - new_index[i] = clusterBlocksLiteral_kInvalidIndex - } - pos = 0 - { - var next_index uint32 = 0 - for i = 0; i < num_blocks; i++ { - var histo histogramLiteral - var j uint - var best_out uint32 - var best_bits float64 - histogramClearLiteral(&histo) - for j = 0; uint32(j) < block_lengths[i]; j++ { - histogramAddLiteral(&histo, uint(data[pos])) - pos++ - } - - if i == 0 { - best_out = histogram_symbols[0] - } else { - best_out = histogram_symbols[i-1] - } - best_bits = histogramBitCostDistanceLiteral(&histo, &all_histograms[best_out]) - for j = 0; j < num_final_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceLiteral(&histo, &all_histograms[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - histogram_symbols[i] = best_out - if new_index[best_out] == clusterBlocksLiteral_kInvalidIndex { - new_index[best_out] = next_index - next_index++ - } - } - } - - clusters = nil - all_histograms = nil - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks) - { - var cur_length uint32 = 0 - var block_idx uint = 0 - var max_type byte = 0 - for i = 0; i < num_blocks; i++ { - cur_length += block_lengths[i] - if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] { - var id byte = byte(new_index[histogram_symbols[i]]) - split.types[block_idx] = id - split.lengths[block_idx] = cur_length - max_type = brotli_max_uint8_t(max_type, id) - cur_length = 0 - block_idx++ - } - } - - split.num_blocks = block_idx - split.num_types = uint(max_type) + 1 - } - - new_index = nil - block_lengths = nil - histogram_symbols = nil -} - -func splitByteVectorLiteral(data []byte, length uint, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) { - var data_size uint = histogramDataSizeLiteral() - var num_histograms uint = length/literals_per_histogram + 1 - var histograms []histogramLiteral - if num_histograms > max_histograms { - num_histograms = max_histograms - } - - if length == 0 { - split.num_types = 1 - return - } else if length < kMinLengthForBlockSplitting { - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1) - split.num_types = 1 - split.types[split.num_blocks] = 0 - split.lengths[split.num_blocks] = uint32(length) - split.num_blocks++ - return - } - - histograms = make([]histogramLiteral, num_histograms) - - /* Find good entropy codes. */ - initialEntropyCodesLiteral(data, length, sampling_stride_length, num_histograms, histograms) - - refineEntropyCodesLiteral(data, length, sampling_stride_length, num_histograms, histograms) - { - var block_ids []byte = make([]byte, length) - var num_blocks uint = 0 - var bitmaplen uint = (num_histograms + 7) >> 3 - var insert_cost []float64 = make([]float64, (data_size * num_histograms)) - var cost []float64 = make([]float64, num_histograms) - var switch_signal []byte = make([]byte, (length * bitmaplen)) - var new_id []uint16 = make([]uint16, num_histograms) - var iters uint - if params.quality < hqZopflificationQuality { - iters = 3 - } else { - iters = 10 - } - /* Find a good path through literals with the good entropy codes. */ - - var i uint - for i = 0; i < iters; i++ { - num_blocks = findBlocksLiteral(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids) - num_histograms = remapBlockIdsLiteral(block_ids, length, new_id, num_histograms) - buildBlockHistogramsLiteral(data, length, block_ids, num_histograms, histograms) - } - - insert_cost = nil - cost = nil - switch_signal = nil - new_id = nil - histograms = nil - clusterBlocksLiteral(data, length, num_blocks, block_ids, split) - block_ids = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go b/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go deleted file mode 100644 index 395f6049043..00000000000 --- a/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go +++ /dev/null @@ -1,1265 +0,0 @@ -package brotli - -import "math" - -const maxHuffmanTreeSize = (2*numCommandSymbols + 1) - -/* The maximum size of Huffman dictionary for distances assuming that - NPOSTFIX = 0 and NDIRECT = 0. */ -const maxSimpleDistanceAlphabetSize = 140 - -/* Represents the range of values belonging to a prefix code: - [offset, offset + 2^nbits) */ -type prefixCodeRange struct { - offset uint32 - nbits uint32 -} - -var kBlockLengthPrefixCode = [numBlockLenSymbols]prefixCodeRange{ - prefixCodeRange{1, 2}, - prefixCodeRange{5, 2}, - prefixCodeRange{9, 2}, - prefixCodeRange{13, 2}, - prefixCodeRange{17, 3}, - prefixCodeRange{25, 3}, - prefixCodeRange{33, 3}, - prefixCodeRange{41, 3}, - prefixCodeRange{49, 4}, - prefixCodeRange{65, 4}, - prefixCodeRange{81, 4}, - prefixCodeRange{97, 4}, - prefixCodeRange{113, 5}, - prefixCodeRange{145, 5}, - prefixCodeRange{177, 5}, - prefixCodeRange{209, 5}, - prefixCodeRange{241, 6}, - prefixCodeRange{305, 6}, - prefixCodeRange{369, 7}, - prefixCodeRange{497, 8}, - prefixCodeRange{753, 9}, - prefixCodeRange{1265, 10}, - prefixCodeRange{2289, 11}, - prefixCodeRange{4337, 12}, - prefixCodeRange{8433, 13}, - prefixCodeRange{16625, 24}, -} - -func blockLengthPrefixCode(len uint32) uint32 { - var code uint32 - if len >= 177 { - if len >= 753 { - code = 20 - } else { - code = 14 - } - } else if len >= 41 { - code = 7 - } else { - code = 0 - } - for code < (numBlockLenSymbols-1) && len >= kBlockLengthPrefixCode[code+1].offset { - code++ - } - return code -} - -func getBlockLengthPrefixCode(len uint32, code *uint, n_extra *uint32, extra *uint32) { - *code = uint(blockLengthPrefixCode(uint32(len))) - *n_extra = kBlockLengthPrefixCode[*code].nbits - *extra = len - kBlockLengthPrefixCode[*code].offset -} - -type blockTypeCodeCalculator struct { - last_type uint - second_last_type uint -} - -func initBlockTypeCodeCalculator(self *blockTypeCodeCalculator) { - self.last_type = 1 - self.second_last_type = 0 -} - -func nextBlockTypeCode(calculator *blockTypeCodeCalculator, type_ byte) uint { - var type_code uint - if uint(type_) == calculator.last_type+1 { - type_code = 1 - } else if uint(type_) == calculator.second_last_type { - type_code = 0 - } else { - type_code = uint(type_) + 2 - } - calculator.second_last_type = calculator.last_type - calculator.last_type = uint(type_) - return type_code -} - -/* |nibblesbits| represents the 2 bits to encode MNIBBLES (0-3) - REQUIRES: length > 0 - REQUIRES: length <= (1 << 24) */ -func encodeMlen(length uint, bits *uint64, numbits *uint, nibblesbits *uint64) { - var lg uint - if length == 1 { - lg = 1 - } else { - lg = uint(log2FloorNonZero(uint(uint32(length-1)))) + 1 - } - var tmp uint - if lg < 16 { - tmp = 16 - } else { - tmp = (lg + 3) - } - var mnibbles uint = tmp / 4 - assert(length > 0) - assert(length <= 1<<24) - assert(lg <= 24) - *nibblesbits = uint64(mnibbles) - 4 - *numbits = mnibbles * 4 - *bits = uint64(length) - 1 -} - -func storeCommandExtra(cmd *command, storage_ix *uint, storage []byte) { - var copylen_code uint32 = commandCopyLenCode(cmd) - var inscode uint16 = getInsertLengthCode(uint(cmd.insert_len_)) - var copycode uint16 = getCopyLengthCode(uint(copylen_code)) - var insnumextra uint32 = getInsertExtra(inscode) - var insextraval uint64 = uint64(cmd.insert_len_) - uint64(getInsertBase(inscode)) - var copyextraval uint64 = uint64(copylen_code) - uint64(getCopyBase(copycode)) - var bits uint64 = copyextraval< 0 - REQUIRES: length <= (1 << 24) */ -func storeCompressedMetaBlockHeader(is_final_block bool, length uint, storage_ix *uint, storage []byte) { - var lenbits uint64 - var nlenbits uint - var nibblesbits uint64 - var is_final uint64 - if is_final_block { - is_final = 1 - } else { - is_final = 0 - } - - /* Write ISLAST bit. */ - writeBits(1, is_final, storage_ix, storage) - - /* Write ISEMPTY bit. */ - if is_final_block { - writeBits(1, 0, storage_ix, storage) - } - - encodeMlen(length, &lenbits, &nlenbits, &nibblesbits) - writeBits(2, nibblesbits, storage_ix, storage) - writeBits(nlenbits, lenbits, storage_ix, storage) - - if !is_final_block { - /* Write ISUNCOMPRESSED bit. */ - writeBits(1, 0, storage_ix, storage) - } -} - -/* Stores the uncompressed meta-block header. - REQUIRES: length > 0 - REQUIRES: length <= (1 << 24) */ -func storeUncompressedMetaBlockHeader(length uint, storage_ix *uint, storage []byte) { - var lenbits uint64 - var nlenbits uint - var nibblesbits uint64 - - /* Write ISLAST bit. - Uncompressed block cannot be the last one, so set to 0. */ - writeBits(1, 0, storage_ix, storage) - - encodeMlen(length, &lenbits, &nlenbits, &nibblesbits) - writeBits(2, nibblesbits, storage_ix, storage) - writeBits(nlenbits, lenbits, storage_ix, storage) - - /* Write ISUNCOMPRESSED bit. */ - writeBits(1, 1, storage_ix, storage) -} - -var storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder = [codeLengthCodes]byte{1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15} - -var storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeSymbols = [6]byte{0, 7, 3, 2, 1, 15} -var storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeBitLengths = [6]byte{2, 4, 3, 2, 2, 4} - -func storeHuffmanTreeOfHuffmanTreeToBitMask(num_codes int, code_length_bitdepth []byte, storage_ix *uint, storage []byte) { - var skip_some uint = 0 - var codes_to_store uint = codeLengthCodes - /* The bit lengths of the Huffman code over the code length alphabet - are compressed with the following static Huffman code: - Symbol Code - ------ ---- - 0 00 - 1 1110 - 2 110 - 3 01 - 4 10 - 5 1111 */ - - /* Throw away trailing zeros: */ - if num_codes > 1 { - for ; codes_to_store > 0; codes_to_store-- { - if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[codes_to_store-1]] != 0 { - break - } - } - } - - if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[0]] == 0 && code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[1]] == 0 { - skip_some = 2 /* skips two. */ - if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[2]] == 0 { - skip_some = 3 /* skips three. */ - } - } - - writeBits(2, uint64(skip_some), storage_ix, storage) - { - var i uint - for i = skip_some; i < codes_to_store; i++ { - var l uint = uint(code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[i]]) - writeBits(uint(storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeBitLengths[l]), uint64(storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeSymbols[l]), storage_ix, storage) - } - } -} - -func storeHuffmanTreeToBitMask(huffman_tree_size uint, huffman_tree []byte, huffman_tree_extra_bits []byte, code_length_bitdepth []byte, code_length_bitdepth_symbols []uint16, storage_ix *uint, storage []byte) { - var i uint - for i = 0; i < huffman_tree_size; i++ { - var ix uint = uint(huffman_tree[i]) - writeBits(uint(code_length_bitdepth[ix]), uint64(code_length_bitdepth_symbols[ix]), storage_ix, storage) - - /* Extra bits */ - switch ix { - case repeatPreviousCodeLength: - writeBits(2, uint64(huffman_tree_extra_bits[i]), storage_ix, storage) - - case repeatZeroCodeLength: - writeBits(3, uint64(huffman_tree_extra_bits[i]), storage_ix, storage) - } - } -} - -func storeSimpleHuffmanTree(depths []byte, symbols []uint, num_symbols uint, max_bits uint, storage_ix *uint, storage []byte) { - /* value of 1 indicates a simple Huffman code */ - writeBits(2, 1, storage_ix, storage) - - writeBits(2, uint64(num_symbols)-1, storage_ix, storage) /* NSYM - 1 */ - { - /* Sort */ - var i uint - for i = 0; i < num_symbols; i++ { - var j uint - for j = i + 1; j < num_symbols; j++ { - if depths[symbols[j]] < depths[symbols[i]] { - var tmp uint = symbols[j] - symbols[j] = symbols[i] - symbols[i] = tmp - } - } - } - } - - if num_symbols == 2 { - writeBits(max_bits, uint64(symbols[0]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[1]), storage_ix, storage) - } else if num_symbols == 3 { - writeBits(max_bits, uint64(symbols[0]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[1]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[2]), storage_ix, storage) - } else { - writeBits(max_bits, uint64(symbols[0]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[1]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[2]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[3]), storage_ix, storage) - - /* tree-select */ - var tmp int - if depths[symbols[0]] == 1 { - tmp = 1 - } else { - tmp = 0 - } - writeBits(1, uint64(tmp), storage_ix, storage) - } -} - -/* num = alphabet size - depths = symbol depths */ -func storeHuffmanTree(depths []byte, num uint, tree []huffmanTree, storage_ix *uint, storage []byte) { - var huffman_tree [numCommandSymbols]byte - var huffman_tree_extra_bits [numCommandSymbols]byte - var huffman_tree_size uint = 0 - var code_length_bitdepth = [codeLengthCodes]byte{0} - var code_length_bitdepth_symbols [codeLengthCodes]uint16 - var huffman_tree_histogram = [codeLengthCodes]uint32{0} - var i uint - var num_codes int = 0 - /* Write the Huffman tree into the brotli-representation. - The command alphabet is the largest, so this allocation will fit all - alphabets. */ - - var code uint = 0 - - assert(num <= numCommandSymbols) - - writeHuffmanTree(depths, num, &huffman_tree_size, huffman_tree[:], huffman_tree_extra_bits[:]) - - /* Calculate the statistics of the Huffman tree in brotli-representation. */ - for i = 0; i < huffman_tree_size; i++ { - huffman_tree_histogram[huffman_tree[i]]++ - } - - for i = 0; i < codeLengthCodes; i++ { - if huffman_tree_histogram[i] != 0 { - if num_codes == 0 { - code = i - num_codes = 1 - } else if num_codes == 1 { - num_codes = 2 - break - } - } - } - - /* Calculate another Huffman tree to use for compressing both the - earlier Huffman tree with. */ - createHuffmanTree(huffman_tree_histogram[:], codeLengthCodes, 5, tree, code_length_bitdepth[:]) - - convertBitDepthsToSymbols(code_length_bitdepth[:], codeLengthCodes, code_length_bitdepth_symbols[:]) - - /* Now, we have all the data, let's start storing it */ - storeHuffmanTreeOfHuffmanTreeToBitMask(num_codes, code_length_bitdepth[:], storage_ix, storage) - - if num_codes == 1 { - code_length_bitdepth[code] = 0 - } - - /* Store the real Huffman tree now. */ - storeHuffmanTreeToBitMask(huffman_tree_size, huffman_tree[:], huffman_tree_extra_bits[:], code_length_bitdepth[:], code_length_bitdepth_symbols[:], storage_ix, storage) -} - -/* Builds a Huffman tree from histogram[0:length] into depth[0:length] and - bits[0:length] and stores the encoded tree to the bit stream. */ -func buildAndStoreHuffmanTree(histogram []uint32, histogram_length uint, alphabet_size uint, tree []huffmanTree, depth []byte, bits []uint16, storage_ix *uint, storage []byte) { - var count uint = 0 - var s4 = [4]uint{0} - var i uint - var max_bits uint = 0 - for i = 0; i < histogram_length; i++ { - if histogram[i] != 0 { - if count < 4 { - s4[count] = i - } else if count > 4 { - break - } - - count++ - } - } - { - var max_bits_counter uint = alphabet_size - 1 - for max_bits_counter != 0 { - max_bits_counter >>= 1 - max_bits++ - } - } - - if count <= 1 { - writeBits(4, 1, storage_ix, storage) - writeBits(max_bits, uint64(s4[0]), storage_ix, storage) - depth[s4[0]] = 0 - bits[s4[0]] = 0 - return - } - - for i := 0; i < int(histogram_length); i++ { - depth[i] = 0 - } - createHuffmanTree(histogram, histogram_length, 15, tree, depth) - convertBitDepthsToSymbols(depth, histogram_length, bits) - - if count <= 4 { - storeSimpleHuffmanTree(depth, s4[:], count, max_bits, storage_ix, storage) - } else { - storeHuffmanTree(depth, histogram_length, tree, storage_ix, storage) - } -} - -func sortHuffmanTree1(v0 *huffmanTree, v1 *huffmanTree) bool { - return v0.total_count_ < v1.total_count_ -} - -func buildAndStoreHuffmanTreeFast(histogram []uint32, histogram_total uint, max_bits uint, depth []byte, bits []uint16, storage_ix *uint, storage []byte) { - var count uint = 0 - var symbols = [4]uint{0} - var length uint = 0 - var total uint = histogram_total - for total != 0 { - if histogram[length] != 0 { - if count < 4 { - symbols[count] = length - } - - count++ - total -= uint(histogram[length]) - } - - length++ - } - - if count <= 1 { - writeBits(4, 1, storage_ix, storage) - writeBits(max_bits, uint64(symbols[0]), storage_ix, storage) - depth[symbols[0]] = 0 - bits[symbols[0]] = 0 - return - } - - for i := 0; i < int(length); i++ { - depth[i] = 0 - } - { - var max_tree_size uint = 2*length + 1 - var tree []huffmanTree = make([]huffmanTree, max_tree_size) - var count_limit uint32 - for count_limit = 1; ; count_limit *= 2 { - var node int = 0 - var l uint - for l = length; l != 0; { - l-- - if histogram[l] != 0 { - if histogram[l] >= count_limit { - initHuffmanTree(&tree[node:][0], histogram[l], -1, int16(l)) - } else { - initHuffmanTree(&tree[node:][0], count_limit, -1, int16(l)) - } - - node++ - } - } - { - var n int = node - /* Points to the next leaf node. */ /* Points to the next non-leaf node. */ - var sentinel huffmanTree - var i int = 0 - var j int = n + 1 - var k int - - sortHuffmanTreeItems(tree, uint(n), huffmanTreeComparator(sortHuffmanTree1)) - - /* The nodes are: - [0, n): the sorted leaf nodes that we start with. - [n]: we add a sentinel here. - [n + 1, 2n): new parent nodes are added here, starting from - (n+1). These are naturally in ascending order. - [2n]: we add a sentinel at the end as well. - There will be (2n+1) elements at the end. */ - initHuffmanTree(&sentinel, math.MaxUint32, -1, -1) - - tree[node] = sentinel - node++ - tree[node] = sentinel - node++ - - for k = n - 1; k > 0; k-- { - var left int - var right int - if tree[i].total_count_ <= tree[j].total_count_ { - left = i - i++ - } else { - left = j - j++ - } - - if tree[i].total_count_ <= tree[j].total_count_ { - right = i - i++ - } else { - right = j - j++ - } - - /* The sentinel node becomes the parent node. */ - tree[node-1].total_count_ = tree[left].total_count_ + tree[right].total_count_ - - tree[node-1].index_left_ = int16(left) - tree[node-1].index_right_or_value_ = int16(right) - - /* Add back the last sentinel node. */ - tree[node] = sentinel - node++ - } - - if setDepth(2*n-1, tree, depth, 14) { - /* We need to pack the Huffman tree in 14 bits. If this was not - successful, add fake entities to the lowest values and retry. */ - break - } - } - } - - tree = nil - } - - convertBitDepthsToSymbols(depth, length, bits) - if count <= 4 { - var i uint - - /* value of 1 indicates a simple Huffman code */ - writeBits(2, 1, storage_ix, storage) - - writeBits(2, uint64(count)-1, storage_ix, storage) /* NSYM - 1 */ - - /* Sort */ - for i = 0; i < count; i++ { - var j uint - for j = i + 1; j < count; j++ { - if depth[symbols[j]] < depth[symbols[i]] { - var tmp uint = symbols[j] - symbols[j] = symbols[i] - symbols[i] = tmp - } - } - } - - if count == 2 { - writeBits(max_bits, uint64(symbols[0]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[1]), storage_ix, storage) - } else if count == 3 { - writeBits(max_bits, uint64(symbols[0]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[1]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[2]), storage_ix, storage) - } else { - writeBits(max_bits, uint64(symbols[0]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[1]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[2]), storage_ix, storage) - writeBits(max_bits, uint64(symbols[3]), storage_ix, storage) - - /* tree-select */ - var tmp int - if depth[symbols[0]] == 1 { - tmp = 1 - } else { - tmp = 0 - } - writeBits(1, uint64(tmp), storage_ix, storage) - } - } else { - var previous_value byte = 8 - var i uint - - /* Complex Huffman Tree */ - storeStaticCodeLengthCode(storage_ix, storage) - - /* Actual RLE coding. */ - for i = 0; i < length; { - var value byte = depth[i] - var reps uint = 1 - var k uint - for k = i + 1; k < length && depth[k] == value; k++ { - reps++ - } - - i += reps - if value == 0 { - writeBits(uint(kZeroRepsDepth[reps]), kZeroRepsBits[reps], storage_ix, storage) - } else { - if previous_value != value { - writeBits(uint(kCodeLengthDepth[value]), uint64(kCodeLengthBits[value]), storage_ix, storage) - reps-- - } - - if reps < 3 { - for reps != 0 { - reps-- - writeBits(uint(kCodeLengthDepth[value]), uint64(kCodeLengthBits[value]), storage_ix, storage) - } - } else { - reps -= 3 - writeBits(uint(kNonZeroRepsDepth[reps]), kNonZeroRepsBits[reps], storage_ix, storage) - } - - previous_value = value - } - } - } -} - -func indexOf(v []byte, v_size uint, value byte) uint { - var i uint = 0 - for ; i < v_size; i++ { - if v[i] == value { - return i - } - } - - return i -} - -func moveToFront(v []byte, index uint) { - var value byte = v[index] - var i uint - for i = index; i != 0; i-- { - v[i] = v[i-1] - } - - v[0] = value -} - -func moveToFrontTransform(v_in []uint32, v_size uint, v_out []uint32) { - var i uint - var mtf [256]byte - var max_value uint32 - if v_size == 0 { - return - } - - max_value = v_in[0] - for i = 1; i < v_size; i++ { - if v_in[i] > max_value { - max_value = v_in[i] - } - } - - assert(max_value < 256) - for i = 0; uint32(i) <= max_value; i++ { - mtf[i] = byte(i) - } - { - var mtf_size uint = uint(max_value + 1) - for i = 0; i < v_size; i++ { - var index uint = indexOf(mtf[:], mtf_size, byte(v_in[i])) - assert(index < mtf_size) - v_out[i] = uint32(index) - moveToFront(mtf[:], index) - } - } -} - -/* Finds runs of zeros in v[0..in_size) and replaces them with a prefix code of - the run length plus extra bits (lower 9 bits is the prefix code and the rest - are the extra bits). Non-zero values in v[] are shifted by - *max_length_prefix. Will not create prefix codes bigger than the initial - value of *max_run_length_prefix. The prefix code of run length L is simply - Log2Floor(L) and the number of extra bits is the same as the prefix code. */ -func runLengthCodeZeros(in_size uint, v []uint32, out_size *uint, max_run_length_prefix *uint32) { - var max_reps uint32 = 0 - var i uint - var max_prefix uint32 - for i = 0; i < in_size; { - var reps uint32 = 0 - for ; i < in_size && v[i] != 0; i++ { - } - for ; i < in_size && v[i] == 0; i++ { - reps++ - } - - max_reps = brotli_max_uint32_t(reps, max_reps) - } - - if max_reps > 0 { - max_prefix = log2FloorNonZero(uint(max_reps)) - } else { - max_prefix = 0 - } - max_prefix = brotli_min_uint32_t(max_prefix, *max_run_length_prefix) - *max_run_length_prefix = max_prefix - *out_size = 0 - for i = 0; i < in_size; { - assert(*out_size <= i) - if v[i] != 0 { - v[*out_size] = v[i] + *max_run_length_prefix - i++ - (*out_size)++ - } else { - var reps uint32 = 1 - var k uint - for k = i + 1; k < in_size && v[k] == 0; k++ { - reps++ - } - - i += uint(reps) - for reps != 0 { - if reps < 2< 0) - writeSingleBit(use_rle, storage_ix, storage) - if use_rle { - writeBits(4, uint64(max_run_length_prefix)-1, storage_ix, storage) - } - } - - buildAndStoreHuffmanTree(histogram[:], uint(uint32(num_clusters)+max_run_length_prefix), uint(uint32(num_clusters)+max_run_length_prefix), tree, depths[:], bits[:], storage_ix, storage) - for i = 0; i < num_rle_symbols; i++ { - var rle_symbol uint32 = rle_symbols[i] & encodeContextMap_kSymbolMask - var extra_bits_val uint32 = rle_symbols[i] >> symbolBits - writeBits(uint(depths[rle_symbol]), uint64(bits[rle_symbol]), storage_ix, storage) - if rle_symbol > 0 && rle_symbol <= max_run_length_prefix { - writeBits(uint(rle_symbol), uint64(extra_bits_val), storage_ix, storage) - } - } - - writeBits(1, 1, storage_ix, storage) /* use move-to-front */ - rle_symbols = nil -} - -/* Stores the block switch command with index block_ix to the bit stream. */ -func storeBlockSwitch(code *blockSplitCode, block_len uint32, block_type byte, is_first_block bool, storage_ix *uint, storage []byte) { - var typecode uint = nextBlockTypeCode(&code.type_code_calculator, block_type) - var lencode uint - var len_nextra uint32 - var len_extra uint32 - if !is_first_block { - writeBits(uint(code.type_depths[typecode]), uint64(code.type_bits[typecode]), storage_ix, storage) - } - - getBlockLengthPrefixCode(block_len, &lencode, &len_nextra, &len_extra) - - writeBits(uint(code.length_depths[lencode]), uint64(code.length_bits[lencode]), storage_ix, storage) - writeBits(uint(len_nextra), uint64(len_extra), storage_ix, storage) -} - -/* Builds a BlockSplitCode data structure from the block split given by the - vector of block types and block lengths and stores it to the bit stream. */ -func buildAndStoreBlockSplitCode(types []byte, lengths []uint32, num_blocks uint, num_types uint, tree []huffmanTree, code *blockSplitCode, storage_ix *uint, storage []byte) { - var type_histo [maxBlockTypeSymbols]uint32 - var length_histo [numBlockLenSymbols]uint32 - var i uint - var type_code_calculator blockTypeCodeCalculator - for i := 0; i < int(num_types+2); i++ { - type_histo[i] = 0 - } - length_histo = [numBlockLenSymbols]uint32{} - initBlockTypeCodeCalculator(&type_code_calculator) - for i = 0; i < num_blocks; i++ { - var type_code uint = nextBlockTypeCode(&type_code_calculator, types[i]) - if i != 0 { - type_histo[type_code]++ - } - length_histo[blockLengthPrefixCode(lengths[i])]++ - } - - storeVarLenUint8(num_types-1, storage_ix, storage) - if num_types > 1 { /* TODO: else? could StoreBlockSwitch occur? */ - buildAndStoreHuffmanTree(type_histo[0:], num_types+2, num_types+2, tree, code.type_depths[0:], code.type_bits[0:], storage_ix, storage) - buildAndStoreHuffmanTree(length_histo[0:], numBlockLenSymbols, numBlockLenSymbols, tree, code.length_depths[0:], code.length_bits[0:], storage_ix, storage) - storeBlockSwitch(code, lengths[0], types[0], true, storage_ix, storage) - } -} - -/* Stores a context map where the histogram type is always the block type. */ -func storeTrivialContextMap(num_types uint, context_bits uint, tree []huffmanTree, storage_ix *uint, storage []byte) { - storeVarLenUint8(num_types-1, storage_ix, storage) - if num_types > 1 { - var repeat_code uint = context_bits - 1 - var repeat_bits uint = (1 << repeat_code) - 1 - var alphabet_size uint = num_types + repeat_code - var histogram [maxContextMapSymbols]uint32 - var depths [maxContextMapSymbols]byte - var bits [maxContextMapSymbols]uint16 - var i uint - for i := 0; i < int(alphabet_size); i++ { - histogram[i] = 0 - } - - /* Write RLEMAX. */ - writeBits(1, 1, storage_ix, storage) - - writeBits(4, uint64(repeat_code)-1, storage_ix, storage) - histogram[repeat_code] = uint32(num_types) - histogram[0] = 1 - for i = context_bits; i < alphabet_size; i++ { - histogram[i] = 1 - } - - buildAndStoreHuffmanTree(histogram[:], alphabet_size, alphabet_size, tree, depths[:], bits[:], storage_ix, storage) - for i = 0; i < num_types; i++ { - var tmp uint - if i == 0 { - tmp = 0 - } else { - tmp = i + context_bits - 1 - } - var code uint = tmp - writeBits(uint(depths[code]), uint64(bits[code]), storage_ix, storage) - writeBits(uint(depths[repeat_code]), uint64(bits[repeat_code]), storage_ix, storage) - writeBits(repeat_code, uint64(repeat_bits), storage_ix, storage) - } - - /* Write IMTF (inverse-move-to-front) bit. */ - writeBits(1, 1, storage_ix, storage) - } -} - -/* Manages the encoding of one block category (literal, command or distance). */ -type blockEncoder struct { - histogram_length_ uint - num_block_types_ uint - block_types_ []byte - block_lengths_ []uint32 - num_blocks_ uint - block_split_code_ blockSplitCode - block_ix_ uint - block_len_ uint - entropy_ix_ uint - depths_ []byte - bits_ []uint16 -} - -func initBlockEncoder(self *blockEncoder, histogram_length uint, num_block_types uint, block_types []byte, block_lengths []uint32, num_blocks uint) { - self.histogram_length_ = histogram_length - self.num_block_types_ = num_block_types - self.block_types_ = block_types - self.block_lengths_ = block_lengths - self.num_blocks_ = num_blocks - initBlockTypeCodeCalculator(&self.block_split_code_.type_code_calculator) - self.block_ix_ = 0 - if num_blocks == 0 { - self.block_len_ = 0 - } else { - self.block_len_ = uint(block_lengths[0]) - } - self.entropy_ix_ = 0 - self.depths_ = nil - self.bits_ = nil -} - -func cleanupBlockEncoder(self *blockEncoder) { - self.depths_ = nil - self.bits_ = nil -} - -/* Creates entropy codes of block lengths and block types and stores them - to the bit stream. */ -func buildAndStoreBlockSwitchEntropyCodes(self *blockEncoder, tree []huffmanTree, storage_ix *uint, storage []byte) { - buildAndStoreBlockSplitCode(self.block_types_, self.block_lengths_, self.num_blocks_, self.num_block_types_, tree, &self.block_split_code_, storage_ix, storage) -} - -/* Stores the next symbol with the entropy code of the current block type. - Updates the block type and block length at block boundaries. */ -func storeSymbol(self *blockEncoder, symbol uint, storage_ix *uint, storage []byte) { - if self.block_len_ == 0 { - self.block_ix_++ - var block_ix uint = self.block_ix_ - var block_len uint32 = self.block_lengths_[block_ix] - var block_type byte = self.block_types_[block_ix] - self.block_len_ = uint(block_len) - self.entropy_ix_ = uint(block_type) * self.histogram_length_ - storeBlockSwitch(&self.block_split_code_, block_len, block_type, false, storage_ix, storage) - } - - self.block_len_-- - { - var ix uint = self.entropy_ix_ + symbol - writeBits(uint(self.depths_[ix]), uint64(self.bits_[ix]), storage_ix, storage) - } -} - -/* Stores the next symbol with the entropy code of the current block type and - context value. - Updates the block type and block length at block boundaries. */ -func storeSymbolWithContext(self *blockEncoder, symbol uint, context uint, context_map []uint32, storage_ix *uint, storage []byte, context_bits uint) { - if self.block_len_ == 0 { - self.block_ix_++ - var block_ix uint = self.block_ix_ - var block_len uint32 = self.block_lengths_[block_ix] - var block_type byte = self.block_types_[block_ix] - self.block_len_ = uint(block_len) - self.entropy_ix_ = uint(block_type) << context_bits - storeBlockSwitch(&self.block_split_code_, block_len, block_type, false, storage_ix, storage) - } - - self.block_len_-- - { - var histo_ix uint = uint(context_map[self.entropy_ix_+context]) - var ix uint = histo_ix*self.histogram_length_ + symbol - writeBits(uint(self.depths_[ix]), uint64(self.bits_[ix]), storage_ix, storage) - } -} - -func buildAndStoreEntropyCodesLiteral(self *blockEncoder, histograms []histogramLiteral, histograms_size uint, alphabet_size uint, tree []huffmanTree, storage_ix *uint, storage []byte) { - var table_size uint = histograms_size * self.histogram_length_ - self.depths_ = make([]byte, table_size) - self.bits_ = make([]uint16, table_size) - { - var i uint - for i = 0; i < histograms_size; i++ { - var ix uint = i * self.histogram_length_ - buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], storage_ix, storage) - } - } -} - -func buildAndStoreEntropyCodesCommand(self *blockEncoder, histograms []histogramCommand, histograms_size uint, alphabet_size uint, tree []huffmanTree, storage_ix *uint, storage []byte) { - var table_size uint = histograms_size * self.histogram_length_ - self.depths_ = make([]byte, table_size) - self.bits_ = make([]uint16, table_size) - { - var i uint - for i = 0; i < histograms_size; i++ { - var ix uint = i * self.histogram_length_ - buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], storage_ix, storage) - } - } -} - -func buildAndStoreEntropyCodesDistance(self *blockEncoder, histograms []histogramDistance, histograms_size uint, alphabet_size uint, tree []huffmanTree, storage_ix *uint, storage []byte) { - var table_size uint = histograms_size * self.histogram_length_ - self.depths_ = make([]byte, table_size) - self.bits_ = make([]uint16, table_size) - { - var i uint - for i = 0; i < histograms_size; i++ { - var ix uint = i * self.histogram_length_ - buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], storage_ix, storage) - } - } -} - -func jumpToByteBoundary(storage_ix *uint, storage []byte) { - *storage_ix = (*storage_ix + 7) &^ 7 - storage[*storage_ix>>3] = 0 -} - -func storeMetaBlock(input []byte, start_pos uint, length uint, mask uint, prev_byte byte, prev_byte2 byte, is_last bool, params *encoderParams, literal_context_mode int, commands []command, n_commands uint, mb *metaBlockSplit, storage_ix *uint, storage []byte) { - var pos uint = start_pos - var i uint - var num_distance_symbols uint32 = params.dist.alphabet_size - var num_effective_distance_symbols uint32 = num_distance_symbols - var tree []huffmanTree - var literal_context_lut contextLUT = getContextLUT(literal_context_mode) - var literal_enc blockEncoder - var command_enc blockEncoder - var distance_enc blockEncoder - var dist *distanceParams = ¶ms.dist - if params.large_window && num_effective_distance_symbols > numHistogramDistanceSymbols { - num_effective_distance_symbols = numHistogramDistanceSymbols - } - - storeCompressedMetaBlockHeader(is_last, length, storage_ix, storage) - - tree = make([]huffmanTree, maxHuffmanTreeSize) - initBlockEncoder(&literal_enc, numLiteralSymbols, mb.literal_split.num_types, mb.literal_split.types, mb.literal_split.lengths, mb.literal_split.num_blocks) - initBlockEncoder(&command_enc, numCommandSymbols, mb.command_split.num_types, mb.command_split.types, mb.command_split.lengths, mb.command_split.num_blocks) - initBlockEncoder(&distance_enc, uint(num_effective_distance_symbols), mb.distance_split.num_types, mb.distance_split.types, mb.distance_split.lengths, mb.distance_split.num_blocks) - - buildAndStoreBlockSwitchEntropyCodes(&literal_enc, tree, storage_ix, storage) - buildAndStoreBlockSwitchEntropyCodes(&command_enc, tree, storage_ix, storage) - buildAndStoreBlockSwitchEntropyCodes(&distance_enc, tree, storage_ix, storage) - - writeBits(2, uint64(dist.distance_postfix_bits), storage_ix, storage) - writeBits(4, uint64(dist.num_direct_distance_codes)>>dist.distance_postfix_bits, storage_ix, storage) - for i = 0; i < mb.literal_split.num_types; i++ { - writeBits(2, uint64(literal_context_mode), storage_ix, storage) - } - - if mb.literal_context_map_size == 0 { - storeTrivialContextMap(mb.literal_histograms_size, literalContextBits, tree, storage_ix, storage) - } else { - encodeContextMap(mb.literal_context_map, mb.literal_context_map_size, mb.literal_histograms_size, tree, storage_ix, storage) - } - - if mb.distance_context_map_size == 0 { - storeTrivialContextMap(mb.distance_histograms_size, distanceContextBits, tree, storage_ix, storage) - } else { - encodeContextMap(mb.distance_context_map, mb.distance_context_map_size, mb.distance_histograms_size, tree, storage_ix, storage) - } - - buildAndStoreEntropyCodesLiteral(&literal_enc, mb.literal_histograms, mb.literal_histograms_size, numLiteralSymbols, tree, storage_ix, storage) - buildAndStoreEntropyCodesCommand(&command_enc, mb.command_histograms, mb.command_histograms_size, numCommandSymbols, tree, storage_ix, storage) - buildAndStoreEntropyCodesDistance(&distance_enc, mb.distance_histograms, mb.distance_histograms_size, uint(num_distance_symbols), tree, storage_ix, storage) - tree = nil - - for i = 0; i < n_commands; i++ { - var cmd command = commands[i] - var cmd_code uint = uint(cmd.cmd_prefix_) - storeSymbol(&command_enc, cmd_code, storage_ix, storage) - storeCommandExtra(&cmd, storage_ix, storage) - if mb.literal_context_map_size == 0 { - var j uint - for j = uint(cmd.insert_len_); j != 0; j-- { - storeSymbol(&literal_enc, uint(input[pos&mask]), storage_ix, storage) - pos++ - } - } else { - var j uint - for j = uint(cmd.insert_len_); j != 0; j-- { - var context uint = uint(getContext(prev_byte, prev_byte2, literal_context_lut)) - var literal byte = input[pos&mask] - storeSymbolWithContext(&literal_enc, uint(literal), context, mb.literal_context_map, storage_ix, storage, literalContextBits) - prev_byte2 = prev_byte - prev_byte = literal - pos++ - } - } - - pos += uint(commandCopyLen(&cmd)) - if commandCopyLen(&cmd) != 0 { - prev_byte2 = input[(pos-2)&mask] - prev_byte = input[(pos-1)&mask] - if cmd.cmd_prefix_ >= 128 { - var dist_code uint = uint(cmd.dist_prefix_) & 0x3FF - var distnumextra uint32 = uint32(cmd.dist_prefix_) >> 10 - var distextra uint64 = uint64(cmd.dist_extra_) - if mb.distance_context_map_size == 0 { - storeSymbol(&distance_enc, dist_code, storage_ix, storage) - } else { - var context uint = uint(commandDistanceContext(&cmd)) - storeSymbolWithContext(&distance_enc, dist_code, context, mb.distance_context_map, storage_ix, storage, distanceContextBits) - } - - writeBits(uint(distnumextra), distextra, storage_ix, storage) - } - } - } - - cleanupBlockEncoder(&distance_enc) - cleanupBlockEncoder(&command_enc) - cleanupBlockEncoder(&literal_enc) - if is_last { - jumpToByteBoundary(storage_ix, storage) - } -} - -func buildHistograms(input []byte, start_pos uint, mask uint, commands []command, n_commands uint, lit_histo *histogramLiteral, cmd_histo *histogramCommand, dist_histo *histogramDistance) { - var pos uint = start_pos - var i uint - for i = 0; i < n_commands; i++ { - var cmd command = commands[i] - var j uint - histogramAddCommand(cmd_histo, uint(cmd.cmd_prefix_)) - for j = uint(cmd.insert_len_); j != 0; j-- { - histogramAddLiteral(lit_histo, uint(input[pos&mask])) - pos++ - } - - pos += uint(commandCopyLen(&cmd)) - if commandCopyLen(&cmd) != 0 && cmd.cmd_prefix_ >= 128 { - histogramAddDistance(dist_histo, uint(cmd.dist_prefix_)&0x3FF) - } - } -} - -func storeDataWithHuffmanCodes(input []byte, start_pos uint, mask uint, commands []command, n_commands uint, lit_depth []byte, lit_bits []uint16, cmd_depth []byte, cmd_bits []uint16, dist_depth []byte, dist_bits []uint16, storage_ix *uint, storage []byte) { - var pos uint = start_pos - var i uint - for i = 0; i < n_commands; i++ { - var cmd command = commands[i] - var cmd_code uint = uint(cmd.cmd_prefix_) - var j uint - writeBits(uint(cmd_depth[cmd_code]), uint64(cmd_bits[cmd_code]), storage_ix, storage) - storeCommandExtra(&cmd, storage_ix, storage) - for j = uint(cmd.insert_len_); j != 0; j-- { - var literal byte = input[pos&mask] - writeBits(uint(lit_depth[literal]), uint64(lit_bits[literal]), storage_ix, storage) - pos++ - } - - pos += uint(commandCopyLen(&cmd)) - if commandCopyLen(&cmd) != 0 && cmd.cmd_prefix_ >= 128 { - var dist_code uint = uint(cmd.dist_prefix_) & 0x3FF - var distnumextra uint32 = uint32(cmd.dist_prefix_) >> 10 - var distextra uint32 = cmd.dist_extra_ - writeBits(uint(dist_depth[dist_code]), uint64(dist_bits[dist_code]), storage_ix, storage) - writeBits(uint(distnumextra), uint64(distextra), storage_ix, storage) - } - } -} - -func storeMetaBlockTrivial(input []byte, start_pos uint, length uint, mask uint, is_last bool, params *encoderParams, commands []command, n_commands uint, storage_ix *uint, storage []byte) { - var lit_histo histogramLiteral - var cmd_histo histogramCommand - var dist_histo histogramDistance - var lit_depth [numLiteralSymbols]byte - var lit_bits [numLiteralSymbols]uint16 - var cmd_depth [numCommandSymbols]byte - var cmd_bits [numCommandSymbols]uint16 - var dist_depth [maxSimpleDistanceAlphabetSize]byte - var dist_bits [maxSimpleDistanceAlphabetSize]uint16 - var tree []huffmanTree - var num_distance_symbols uint32 = params.dist.alphabet_size - - storeCompressedMetaBlockHeader(is_last, length, storage_ix, storage) - - histogramClearLiteral(&lit_histo) - histogramClearCommand(&cmd_histo) - histogramClearDistance(&dist_histo) - - buildHistograms(input, start_pos, mask, commands, n_commands, &lit_histo, &cmd_histo, &dist_histo) - - writeBits(13, 0, storage_ix, storage) - - tree = make([]huffmanTree, maxHuffmanTreeSize) - buildAndStoreHuffmanTree(lit_histo.data_[:], numLiteralSymbols, numLiteralSymbols, tree, lit_depth[:], lit_bits[:], storage_ix, storage) - buildAndStoreHuffmanTree(cmd_histo.data_[:], numCommandSymbols, numCommandSymbols, tree, cmd_depth[:], cmd_bits[:], storage_ix, storage) - buildAndStoreHuffmanTree(dist_histo.data_[:], maxSimpleDistanceAlphabetSize, uint(num_distance_symbols), tree, dist_depth[:], dist_bits[:], storage_ix, storage) - tree = nil - storeDataWithHuffmanCodes(input, start_pos, mask, commands, n_commands, lit_depth[:], lit_bits[:], cmd_depth[:], cmd_bits[:], dist_depth[:], dist_bits[:], storage_ix, storage) - if is_last { - jumpToByteBoundary(storage_ix, storage) - } -} - -func storeMetaBlockFast(input []byte, start_pos uint, length uint, mask uint, is_last bool, params *encoderParams, commands []command, n_commands uint, storage_ix *uint, storage []byte) { - var num_distance_symbols uint32 = params.dist.alphabet_size - var distance_alphabet_bits uint32 = log2FloorNonZero(uint(num_distance_symbols-1)) + 1 - - storeCompressedMetaBlockHeader(is_last, length, storage_ix, storage) - - writeBits(13, 0, storage_ix, storage) - - if n_commands <= 128 { - var histogram = [numLiteralSymbols]uint32{0} - var pos uint = start_pos - var num_literals uint = 0 - var i uint - var lit_depth [numLiteralSymbols]byte - var lit_bits [numLiteralSymbols]uint16 - for i = 0; i < n_commands; i++ { - var cmd command = commands[i] - var j uint - for j = uint(cmd.insert_len_); j != 0; j-- { - histogram[input[pos&mask]]++ - pos++ - } - - num_literals += uint(cmd.insert_len_) - pos += uint(commandCopyLen(&cmd)) - } - - buildAndStoreHuffmanTreeFast(histogram[:], num_literals, /* max_bits = */ - 8, lit_depth[:], lit_bits[:], storage_ix, storage) - - storeStaticCommandHuffmanTree(storage_ix, storage) - storeStaticDistanceHuffmanTree(storage_ix, storage) - storeDataWithHuffmanCodes(input, start_pos, mask, commands, n_commands, lit_depth[:], lit_bits[:], kStaticCommandCodeDepth[:], kStaticCommandCodeBits[:], kStaticDistanceCodeDepth[:], kStaticDistanceCodeBits[:], storage_ix, storage) - } else { - var lit_histo histogramLiteral - var cmd_histo histogramCommand - var dist_histo histogramDistance - var lit_depth [numLiteralSymbols]byte - var lit_bits [numLiteralSymbols]uint16 - var cmd_depth [numCommandSymbols]byte - var cmd_bits [numCommandSymbols]uint16 - var dist_depth [maxSimpleDistanceAlphabetSize]byte - var dist_bits [maxSimpleDistanceAlphabetSize]uint16 - histogramClearLiteral(&lit_histo) - histogramClearCommand(&cmd_histo) - histogramClearDistance(&dist_histo) - buildHistograms(input, start_pos, mask, commands, n_commands, &lit_histo, &cmd_histo, &dist_histo) - buildAndStoreHuffmanTreeFast(lit_histo.data_[:], lit_histo.total_count_, /* max_bits = */ - 8, lit_depth[:], lit_bits[:], storage_ix, storage) - - buildAndStoreHuffmanTreeFast(cmd_histo.data_[:], cmd_histo.total_count_, /* max_bits = */ - 10, cmd_depth[:], cmd_bits[:], storage_ix, storage) - - buildAndStoreHuffmanTreeFast(dist_histo.data_[:], dist_histo.total_count_, /* max_bits = */ - uint(distance_alphabet_bits), dist_depth[:], dist_bits[:], storage_ix, storage) - - storeDataWithHuffmanCodes(input, start_pos, mask, commands, n_commands, lit_depth[:], lit_bits[:], cmd_depth[:], cmd_bits[:], dist_depth[:], dist_bits[:], storage_ix, storage) - } - - if is_last { - jumpToByteBoundary(storage_ix, storage) - } -} - -/* This is for storing uncompressed blocks (simple raw storage of - bytes-as-bytes). */ -func storeUncompressedMetaBlock(is_final_block bool, input []byte, position uint, mask uint, len uint, storage_ix *uint, storage []byte) { - var masked_pos uint = position & mask - storeUncompressedMetaBlockHeader(uint(len), storage_ix, storage) - jumpToByteBoundary(storage_ix, storage) - - if masked_pos+len > mask+1 { - var len1 uint = mask + 1 - masked_pos - copy(storage[*storage_ix>>3:], input[masked_pos:][:len1]) - *storage_ix += len1 << 3 - len -= len1 - masked_pos = 0 - } - - copy(storage[*storage_ix>>3:], input[masked_pos:][:len]) - *storage_ix += uint(len << 3) - - /* We need to clear the next 4 bytes to continue to be - compatible with BrotliWriteBits. */ - writeBitsPrepareStorage(*storage_ix, storage) - - /* Since the uncompressed block itself may not be the final block, add an - empty one after this. */ - if is_final_block { - writeBits(1, 1, storage_ix, storage) /* islast */ - writeBits(1, 1, storage_ix, storage) /* isempty */ - jumpToByteBoundary(storage_ix, storage) - } -} diff --git a/vendor/github.com/andybalholm/brotli/cluster.go b/vendor/github.com/andybalholm/brotli/cluster.go deleted file mode 100644 index df8a3282245..00000000000 --- a/vendor/github.com/andybalholm/brotli/cluster.go +++ /dev/null @@ -1,30 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Functions for clustering similar histograms together. */ - -type histogramPair struct { - idx1 uint32 - idx2 uint32 - cost_combo float64 - cost_diff float64 -} - -func histogramPairIsLess(p1 *histogramPair, p2 *histogramPair) bool { - if p1.cost_diff != p2.cost_diff { - return p1.cost_diff > p2.cost_diff - } - - return (p1.idx2 - p1.idx1) > (p2.idx2 - p2.idx1) -} - -/* Returns entropy reduction of the context map when we combine two clusters. */ -func clusterCostDiff(size_a uint, size_b uint) float64 { - var size_c uint = size_a + size_b - return float64(size_a)*fastLog2(size_a) + float64(size_b)*fastLog2(size_b) - float64(size_c)*fastLog2(size_c) -} diff --git a/vendor/github.com/andybalholm/brotli/cluster_command.go b/vendor/github.com/andybalholm/brotli/cluster_command.go deleted file mode 100644 index 7449751b210..00000000000 --- a/vendor/github.com/andybalholm/brotli/cluster_command.go +++ /dev/null @@ -1,326 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if - it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */ -func compareAndPushToQueueCommand(out []histogramCommand, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) { - var is_good_pair bool = false - var p histogramPair - p.idx2 = 0 - p.idx1 = p.idx2 - p.cost_combo = 0 - p.cost_diff = p.cost_combo - if idx1 == idx2 { - return - } - - if idx2 < idx1 { - var t uint32 = idx2 - idx2 = idx1 - idx1 = t - } - - p.idx1 = idx1 - p.idx2 = idx2 - p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2])) - p.cost_diff -= out[idx1].bit_cost_ - p.cost_diff -= out[idx2].bit_cost_ - - if out[idx1].total_count_ == 0 { - p.cost_combo = out[idx2].bit_cost_ - is_good_pair = true - } else if out[idx2].total_count_ == 0 { - p.cost_combo = out[idx1].bit_cost_ - is_good_pair = true - } else { - var threshold float64 - if *num_pairs == 0 { - threshold = 1e99 - } else { - threshold = brotli_max_double(0.0, pairs[0].cost_diff) - } - var combo histogramCommand = out[idx1] - var cost_combo float64 - histogramAddHistogramCommand(&combo, &out[idx2]) - cost_combo = populationCostCommand(&combo) - if cost_combo < threshold-p.cost_diff { - p.cost_combo = cost_combo - is_good_pair = true - } - } - - if is_good_pair { - p.cost_diff += p.cost_combo - if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) { - /* Replace the top of the queue if needed. */ - if *num_pairs < max_num_pairs { - pairs[*num_pairs] = pairs[0] - (*num_pairs)++ - } - - pairs[0] = p - } else if *num_pairs < max_num_pairs { - pairs[*num_pairs] = p - (*num_pairs)++ - } - } -} - -func histogramCombineCommand(out []histogramCommand, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint { - var cost_diff_threshold float64 = 0.0 - var min_cluster_size uint = 1 - var num_pairs uint = 0 - { - /* We maintain a vector of histogram pairs, with the property that the pair - with the maximum bit cost reduction is the first. */ - var idx1 uint - for idx1 = 0; idx1 < num_clusters; idx1++ { - var idx2 uint - for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ { - compareAndPushToQueueCommand(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs) - } - } - } - - for num_clusters > min_cluster_size { - var best_idx1 uint32 - var best_idx2 uint32 - var i uint - if pairs[0].cost_diff >= cost_diff_threshold { - cost_diff_threshold = 1e99 - min_cluster_size = max_clusters - continue - } - - /* Take the best pair from the top of heap. */ - best_idx1 = pairs[0].idx1 - - best_idx2 = pairs[0].idx2 - histogramAddHistogramCommand(&out[best_idx1], &out[best_idx2]) - out[best_idx1].bit_cost_ = pairs[0].cost_combo - cluster_size[best_idx1] += cluster_size[best_idx2] - for i = 0; i < symbols_size; i++ { - if symbols[i] == best_idx2 { - symbols[i] = best_idx1 - } - } - - for i = 0; i < num_clusters; i++ { - if clusters[i] == best_idx2 { - copy(clusters[i:], clusters[i+1:][:num_clusters-i-1]) - break - } - } - - num_clusters-- - { - /* Remove pairs intersecting the just combined best pair. */ - var copy_to_idx uint = 0 - for i = 0; i < num_pairs; i++ { - var p *histogramPair = &pairs[i] - if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 { - /* Remove invalid pair from the queue. */ - continue - } - - if histogramPairIsLess(&pairs[0], p) { - /* Replace the top of the queue if needed. */ - var front histogramPair = pairs[0] - pairs[0] = *p - pairs[copy_to_idx] = front - } else { - pairs[copy_to_idx] = *p - } - - copy_to_idx++ - } - - num_pairs = copy_to_idx - } - - /* Push new pairs formed with the combined histogram to the heap. */ - for i = 0; i < num_clusters; i++ { - compareAndPushToQueueCommand(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs) - } - } - - return num_clusters -} - -/* What is the bit cost of moving histogram from cur_symbol to candidate. */ -func histogramBitCostDistanceCommand(histogram *histogramCommand, candidate *histogramCommand) float64 { - if histogram.total_count_ == 0 { - return 0.0 - } else { - var tmp histogramCommand = *histogram - histogramAddHistogramCommand(&tmp, candidate) - return populationCostCommand(&tmp) - candidate.bit_cost_ - } -} - -/* Find the best 'out' histogram for each of the 'in' histograms. - When called, clusters[0..num_clusters) contains the unique values from - symbols[0..in_size), but this property is not preserved in this function. - Note: we assume that out[]->bit_cost_ is already up-to-date. */ -func histogramRemapCommand(in []histogramCommand, in_size uint, clusters []uint32, num_clusters uint, out []histogramCommand, symbols []uint32) { - var i uint - for i = 0; i < in_size; i++ { - var best_out uint32 - if i == 0 { - best_out = symbols[0] - } else { - best_out = symbols[i-1] - } - var best_bits float64 = histogramBitCostDistanceCommand(&in[i], &out[best_out]) - var j uint - for j = 0; j < num_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceCommand(&in[i], &out[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - symbols[i] = best_out - } - - /* Recompute each out based on raw and symbols. */ - for i = 0; i < num_clusters; i++ { - histogramClearCommand(&out[clusters[i]]) - } - - for i = 0; i < in_size; i++ { - histogramAddHistogramCommand(&out[symbols[i]], &in[i]) - } -} - -/* Reorders elements of the out[0..length) array and changes values in - symbols[0..length) array in the following way: - * when called, symbols[] contains indexes into out[], and has N unique - values (possibly N < length) - * on return, symbols'[i] = f(symbols[i]) and - out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length, - where f is a bijection between the range of symbols[] and [0..N), and - the first occurrences of values in symbols'[i] come in consecutive - increasing order. - Returns N, the number of unique values in symbols[]. */ - -var histogramReindexCommand_kInvalidIndex uint32 = math.MaxUint32 - -func histogramReindexCommand(out []histogramCommand, symbols []uint32, length uint) uint { - var new_index []uint32 = make([]uint32, length) - var next_index uint32 - var tmp []histogramCommand - var i uint - for i = 0; i < length; i++ { - new_index[i] = histogramReindexCommand_kInvalidIndex - } - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == histogramReindexCommand_kInvalidIndex { - new_index[symbols[i]] = next_index - next_index++ - } - } - - /* TODO: by using idea of "cycle-sort" we can avoid allocation of - tmp and reduce the number of copying by the factor of 2. */ - tmp = make([]histogramCommand, next_index) - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == next_index { - tmp[next_index] = out[symbols[i]] - next_index++ - } - - symbols[i] = new_index[symbols[i]] - } - - new_index = nil - for i = 0; uint32(i) < next_index; i++ { - out[i] = tmp[i] - } - - tmp = nil - return uint(next_index) -} - -func clusterHistogramsCommand(in []histogramCommand, in_size uint, max_histograms uint, out []histogramCommand, out_size *uint, histogram_symbols []uint32) { - var cluster_size []uint32 = make([]uint32, in_size) - var clusters []uint32 = make([]uint32, in_size) - var num_clusters uint = 0 - var max_input_histograms uint = 64 - var pairs_capacity uint = max_input_histograms * max_input_histograms / 2 - var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1)) - var i uint - - /* For the first pass of clustering, we allow all pairs. */ - for i = 0; i < in_size; i++ { - cluster_size[i] = 1 - } - - for i = 0; i < in_size; i++ { - out[i] = in[i] - out[i].bit_cost_ = populationCostCommand(&in[i]) - histogram_symbols[i] = uint32(i) - } - - for i = 0; i < in_size; i += max_input_histograms { - var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - clusters[num_clusters+j] = uint32(i + j) - } - - num_new_clusters = histogramCombineCommand(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity) - num_clusters += num_new_clusters - } - { - /* For the second pass, we limit the total number of histogram pairs. - After this limit is reached, we only keep searching for the best pair. */ - var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < (max_num_pairs + 1) { - var _new_size uint - if pairs_capacity == 0 { - _new_size = max_num_pairs + 1 - } else { - _new_size = pairs_capacity - } - var new_array []histogramPair - for _new_size < (max_num_pairs + 1) { - _new_size *= 2 - } - new_array = make([]histogramPair, _new_size) - if pairs_capacity != 0 { - copy(new_array, pairs[:pairs_capacity]) - } - - pairs = new_array - pairs_capacity = _new_size - } - - /* Collapse similar histograms. */ - num_clusters = histogramCombineCommand(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs) - } - - pairs = nil - cluster_size = nil - - /* Find the optimal map from original histograms to the final ones. */ - histogramRemapCommand(in, in_size, clusters, num_clusters, out, histogram_symbols) - - clusters = nil - - /* Convert the context map to a canonical form. */ - *out_size = histogramReindexCommand(out, histogram_symbols, in_size) -} diff --git a/vendor/github.com/andybalholm/brotli/cluster_distance.go b/vendor/github.com/andybalholm/brotli/cluster_distance.go deleted file mode 100644 index 1aaa86e6ed8..00000000000 --- a/vendor/github.com/andybalholm/brotli/cluster_distance.go +++ /dev/null @@ -1,326 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if - it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */ -func compareAndPushToQueueDistance(out []histogramDistance, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) { - var is_good_pair bool = false - var p histogramPair - p.idx2 = 0 - p.idx1 = p.idx2 - p.cost_combo = 0 - p.cost_diff = p.cost_combo - if idx1 == idx2 { - return - } - - if idx2 < idx1 { - var t uint32 = idx2 - idx2 = idx1 - idx1 = t - } - - p.idx1 = idx1 - p.idx2 = idx2 - p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2])) - p.cost_diff -= out[idx1].bit_cost_ - p.cost_diff -= out[idx2].bit_cost_ - - if out[idx1].total_count_ == 0 { - p.cost_combo = out[idx2].bit_cost_ - is_good_pair = true - } else if out[idx2].total_count_ == 0 { - p.cost_combo = out[idx1].bit_cost_ - is_good_pair = true - } else { - var threshold float64 - if *num_pairs == 0 { - threshold = 1e99 - } else { - threshold = brotli_max_double(0.0, pairs[0].cost_diff) - } - var combo histogramDistance = out[idx1] - var cost_combo float64 - histogramAddHistogramDistance(&combo, &out[idx2]) - cost_combo = populationCostDistance(&combo) - if cost_combo < threshold-p.cost_diff { - p.cost_combo = cost_combo - is_good_pair = true - } - } - - if is_good_pair { - p.cost_diff += p.cost_combo - if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) { - /* Replace the top of the queue if needed. */ - if *num_pairs < max_num_pairs { - pairs[*num_pairs] = pairs[0] - (*num_pairs)++ - } - - pairs[0] = p - } else if *num_pairs < max_num_pairs { - pairs[*num_pairs] = p - (*num_pairs)++ - } - } -} - -func histogramCombineDistance(out []histogramDistance, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint { - var cost_diff_threshold float64 = 0.0 - var min_cluster_size uint = 1 - var num_pairs uint = 0 - { - /* We maintain a vector of histogram pairs, with the property that the pair - with the maximum bit cost reduction is the first. */ - var idx1 uint - for idx1 = 0; idx1 < num_clusters; idx1++ { - var idx2 uint - for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ { - compareAndPushToQueueDistance(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs) - } - } - } - - for num_clusters > min_cluster_size { - var best_idx1 uint32 - var best_idx2 uint32 - var i uint - if pairs[0].cost_diff >= cost_diff_threshold { - cost_diff_threshold = 1e99 - min_cluster_size = max_clusters - continue - } - - /* Take the best pair from the top of heap. */ - best_idx1 = pairs[0].idx1 - - best_idx2 = pairs[0].idx2 - histogramAddHistogramDistance(&out[best_idx1], &out[best_idx2]) - out[best_idx1].bit_cost_ = pairs[0].cost_combo - cluster_size[best_idx1] += cluster_size[best_idx2] - for i = 0; i < symbols_size; i++ { - if symbols[i] == best_idx2 { - symbols[i] = best_idx1 - } - } - - for i = 0; i < num_clusters; i++ { - if clusters[i] == best_idx2 { - copy(clusters[i:], clusters[i+1:][:num_clusters-i-1]) - break - } - } - - num_clusters-- - { - /* Remove pairs intersecting the just combined best pair. */ - var copy_to_idx uint = 0 - for i = 0; i < num_pairs; i++ { - var p *histogramPair = &pairs[i] - if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 { - /* Remove invalid pair from the queue. */ - continue - } - - if histogramPairIsLess(&pairs[0], p) { - /* Replace the top of the queue if needed. */ - var front histogramPair = pairs[0] - pairs[0] = *p - pairs[copy_to_idx] = front - } else { - pairs[copy_to_idx] = *p - } - - copy_to_idx++ - } - - num_pairs = copy_to_idx - } - - /* Push new pairs formed with the combined histogram to the heap. */ - for i = 0; i < num_clusters; i++ { - compareAndPushToQueueDistance(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs) - } - } - - return num_clusters -} - -/* What is the bit cost of moving histogram from cur_symbol to candidate. */ -func histogramBitCostDistanceDistance(histogram *histogramDistance, candidate *histogramDistance) float64 { - if histogram.total_count_ == 0 { - return 0.0 - } else { - var tmp histogramDistance = *histogram - histogramAddHistogramDistance(&tmp, candidate) - return populationCostDistance(&tmp) - candidate.bit_cost_ - } -} - -/* Find the best 'out' histogram for each of the 'in' histograms. - When called, clusters[0..num_clusters) contains the unique values from - symbols[0..in_size), but this property is not preserved in this function. - Note: we assume that out[]->bit_cost_ is already up-to-date. */ -func histogramRemapDistance(in []histogramDistance, in_size uint, clusters []uint32, num_clusters uint, out []histogramDistance, symbols []uint32) { - var i uint - for i = 0; i < in_size; i++ { - var best_out uint32 - if i == 0 { - best_out = symbols[0] - } else { - best_out = symbols[i-1] - } - var best_bits float64 = histogramBitCostDistanceDistance(&in[i], &out[best_out]) - var j uint - for j = 0; j < num_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceDistance(&in[i], &out[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - symbols[i] = best_out - } - - /* Recompute each out based on raw and symbols. */ - for i = 0; i < num_clusters; i++ { - histogramClearDistance(&out[clusters[i]]) - } - - for i = 0; i < in_size; i++ { - histogramAddHistogramDistance(&out[symbols[i]], &in[i]) - } -} - -/* Reorders elements of the out[0..length) array and changes values in - symbols[0..length) array in the following way: - * when called, symbols[] contains indexes into out[], and has N unique - values (possibly N < length) - * on return, symbols'[i] = f(symbols[i]) and - out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length, - where f is a bijection between the range of symbols[] and [0..N), and - the first occurrences of values in symbols'[i] come in consecutive - increasing order. - Returns N, the number of unique values in symbols[]. */ - -var histogramReindexDistance_kInvalidIndex uint32 = math.MaxUint32 - -func histogramReindexDistance(out []histogramDistance, symbols []uint32, length uint) uint { - var new_index []uint32 = make([]uint32, length) - var next_index uint32 - var tmp []histogramDistance - var i uint - for i = 0; i < length; i++ { - new_index[i] = histogramReindexDistance_kInvalidIndex - } - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == histogramReindexDistance_kInvalidIndex { - new_index[symbols[i]] = next_index - next_index++ - } - } - - /* TODO: by using idea of "cycle-sort" we can avoid allocation of - tmp and reduce the number of copying by the factor of 2. */ - tmp = make([]histogramDistance, next_index) - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == next_index { - tmp[next_index] = out[symbols[i]] - next_index++ - } - - symbols[i] = new_index[symbols[i]] - } - - new_index = nil - for i = 0; uint32(i) < next_index; i++ { - out[i] = tmp[i] - } - - tmp = nil - return uint(next_index) -} - -func clusterHistogramsDistance(in []histogramDistance, in_size uint, max_histograms uint, out []histogramDistance, out_size *uint, histogram_symbols []uint32) { - var cluster_size []uint32 = make([]uint32, in_size) - var clusters []uint32 = make([]uint32, in_size) - var num_clusters uint = 0 - var max_input_histograms uint = 64 - var pairs_capacity uint = max_input_histograms * max_input_histograms / 2 - var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1)) - var i uint - - /* For the first pass of clustering, we allow all pairs. */ - for i = 0; i < in_size; i++ { - cluster_size[i] = 1 - } - - for i = 0; i < in_size; i++ { - out[i] = in[i] - out[i].bit_cost_ = populationCostDistance(&in[i]) - histogram_symbols[i] = uint32(i) - } - - for i = 0; i < in_size; i += max_input_histograms { - var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - clusters[num_clusters+j] = uint32(i + j) - } - - num_new_clusters = histogramCombineDistance(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity) - num_clusters += num_new_clusters - } - { - /* For the second pass, we limit the total number of histogram pairs. - After this limit is reached, we only keep searching for the best pair. */ - var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < (max_num_pairs + 1) { - var _new_size uint - if pairs_capacity == 0 { - _new_size = max_num_pairs + 1 - } else { - _new_size = pairs_capacity - } - var new_array []histogramPair - for _new_size < (max_num_pairs + 1) { - _new_size *= 2 - } - new_array = make([]histogramPair, _new_size) - if pairs_capacity != 0 { - copy(new_array, pairs[:pairs_capacity]) - } - - pairs = new_array - pairs_capacity = _new_size - } - - /* Collapse similar histograms. */ - num_clusters = histogramCombineDistance(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs) - } - - pairs = nil - cluster_size = nil - - /* Find the optimal map from original histograms to the final ones. */ - histogramRemapDistance(in, in_size, clusters, num_clusters, out, histogram_symbols) - - clusters = nil - - /* Convert the context map to a canonical form. */ - *out_size = histogramReindexDistance(out, histogram_symbols, in_size) -} diff --git a/vendor/github.com/andybalholm/brotli/cluster_literal.go b/vendor/github.com/andybalholm/brotli/cluster_literal.go deleted file mode 100644 index 6ba66f31b2c..00000000000 --- a/vendor/github.com/andybalholm/brotli/cluster_literal.go +++ /dev/null @@ -1,326 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if - it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */ -func compareAndPushToQueueLiteral(out []histogramLiteral, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) { - var is_good_pair bool = false - var p histogramPair - p.idx2 = 0 - p.idx1 = p.idx2 - p.cost_combo = 0 - p.cost_diff = p.cost_combo - if idx1 == idx2 { - return - } - - if idx2 < idx1 { - var t uint32 = idx2 - idx2 = idx1 - idx1 = t - } - - p.idx1 = idx1 - p.idx2 = idx2 - p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2])) - p.cost_diff -= out[idx1].bit_cost_ - p.cost_diff -= out[idx2].bit_cost_ - - if out[idx1].total_count_ == 0 { - p.cost_combo = out[idx2].bit_cost_ - is_good_pair = true - } else if out[idx2].total_count_ == 0 { - p.cost_combo = out[idx1].bit_cost_ - is_good_pair = true - } else { - var threshold float64 - if *num_pairs == 0 { - threshold = 1e99 - } else { - threshold = brotli_max_double(0.0, pairs[0].cost_diff) - } - var combo histogramLiteral = out[idx1] - var cost_combo float64 - histogramAddHistogramLiteral(&combo, &out[idx2]) - cost_combo = populationCostLiteral(&combo) - if cost_combo < threshold-p.cost_diff { - p.cost_combo = cost_combo - is_good_pair = true - } - } - - if is_good_pair { - p.cost_diff += p.cost_combo - if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) { - /* Replace the top of the queue if needed. */ - if *num_pairs < max_num_pairs { - pairs[*num_pairs] = pairs[0] - (*num_pairs)++ - } - - pairs[0] = p - } else if *num_pairs < max_num_pairs { - pairs[*num_pairs] = p - (*num_pairs)++ - } - } -} - -func histogramCombineLiteral(out []histogramLiteral, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint { - var cost_diff_threshold float64 = 0.0 - var min_cluster_size uint = 1 - var num_pairs uint = 0 - { - /* We maintain a vector of histogram pairs, with the property that the pair - with the maximum bit cost reduction is the first. */ - var idx1 uint - for idx1 = 0; idx1 < num_clusters; idx1++ { - var idx2 uint - for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ { - compareAndPushToQueueLiteral(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs) - } - } - } - - for num_clusters > min_cluster_size { - var best_idx1 uint32 - var best_idx2 uint32 - var i uint - if pairs[0].cost_diff >= cost_diff_threshold { - cost_diff_threshold = 1e99 - min_cluster_size = max_clusters - continue - } - - /* Take the best pair from the top of heap. */ - best_idx1 = pairs[0].idx1 - - best_idx2 = pairs[0].idx2 - histogramAddHistogramLiteral(&out[best_idx1], &out[best_idx2]) - out[best_idx1].bit_cost_ = pairs[0].cost_combo - cluster_size[best_idx1] += cluster_size[best_idx2] - for i = 0; i < symbols_size; i++ { - if symbols[i] == best_idx2 { - symbols[i] = best_idx1 - } - } - - for i = 0; i < num_clusters; i++ { - if clusters[i] == best_idx2 { - copy(clusters[i:], clusters[i+1:][:num_clusters-i-1]) - break - } - } - - num_clusters-- - { - /* Remove pairs intersecting the just combined best pair. */ - var copy_to_idx uint = 0 - for i = 0; i < num_pairs; i++ { - var p *histogramPair = &pairs[i] - if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 { - /* Remove invalid pair from the queue. */ - continue - } - - if histogramPairIsLess(&pairs[0], p) { - /* Replace the top of the queue if needed. */ - var front histogramPair = pairs[0] - pairs[0] = *p - pairs[copy_to_idx] = front - } else { - pairs[copy_to_idx] = *p - } - - copy_to_idx++ - } - - num_pairs = copy_to_idx - } - - /* Push new pairs formed with the combined histogram to the heap. */ - for i = 0; i < num_clusters; i++ { - compareAndPushToQueueLiteral(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs) - } - } - - return num_clusters -} - -/* What is the bit cost of moving histogram from cur_symbol to candidate. */ -func histogramBitCostDistanceLiteral(histogram *histogramLiteral, candidate *histogramLiteral) float64 { - if histogram.total_count_ == 0 { - return 0.0 - } else { - var tmp histogramLiteral = *histogram - histogramAddHistogramLiteral(&tmp, candidate) - return populationCostLiteral(&tmp) - candidate.bit_cost_ - } -} - -/* Find the best 'out' histogram for each of the 'in' histograms. - When called, clusters[0..num_clusters) contains the unique values from - symbols[0..in_size), but this property is not preserved in this function. - Note: we assume that out[]->bit_cost_ is already up-to-date. */ -func histogramRemapLiteral(in []histogramLiteral, in_size uint, clusters []uint32, num_clusters uint, out []histogramLiteral, symbols []uint32) { - var i uint - for i = 0; i < in_size; i++ { - var best_out uint32 - if i == 0 { - best_out = symbols[0] - } else { - best_out = symbols[i-1] - } - var best_bits float64 = histogramBitCostDistanceLiteral(&in[i], &out[best_out]) - var j uint - for j = 0; j < num_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceLiteral(&in[i], &out[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - symbols[i] = best_out - } - - /* Recompute each out based on raw and symbols. */ - for i = 0; i < num_clusters; i++ { - histogramClearLiteral(&out[clusters[i]]) - } - - for i = 0; i < in_size; i++ { - histogramAddHistogramLiteral(&out[symbols[i]], &in[i]) - } -} - -/* Reorders elements of the out[0..length) array and changes values in - symbols[0..length) array in the following way: - * when called, symbols[] contains indexes into out[], and has N unique - values (possibly N < length) - * on return, symbols'[i] = f(symbols[i]) and - out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length, - where f is a bijection between the range of symbols[] and [0..N), and - the first occurrences of values in symbols'[i] come in consecutive - increasing order. - Returns N, the number of unique values in symbols[]. */ - -var histogramReindexLiteral_kInvalidIndex uint32 = math.MaxUint32 - -func histogramReindexLiteral(out []histogramLiteral, symbols []uint32, length uint) uint { - var new_index []uint32 = make([]uint32, length) - var next_index uint32 - var tmp []histogramLiteral - var i uint - for i = 0; i < length; i++ { - new_index[i] = histogramReindexLiteral_kInvalidIndex - } - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == histogramReindexLiteral_kInvalidIndex { - new_index[symbols[i]] = next_index - next_index++ - } - } - - /* TODO: by using idea of "cycle-sort" we can avoid allocation of - tmp and reduce the number of copying by the factor of 2. */ - tmp = make([]histogramLiteral, next_index) - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == next_index { - tmp[next_index] = out[symbols[i]] - next_index++ - } - - symbols[i] = new_index[symbols[i]] - } - - new_index = nil - for i = 0; uint32(i) < next_index; i++ { - out[i] = tmp[i] - } - - tmp = nil - return uint(next_index) -} - -func clusterHistogramsLiteral(in []histogramLiteral, in_size uint, max_histograms uint, out []histogramLiteral, out_size *uint, histogram_symbols []uint32) { - var cluster_size []uint32 = make([]uint32, in_size) - var clusters []uint32 = make([]uint32, in_size) - var num_clusters uint = 0 - var max_input_histograms uint = 64 - var pairs_capacity uint = max_input_histograms * max_input_histograms / 2 - var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1)) - var i uint - - /* For the first pass of clustering, we allow all pairs. */ - for i = 0; i < in_size; i++ { - cluster_size[i] = 1 - } - - for i = 0; i < in_size; i++ { - out[i] = in[i] - out[i].bit_cost_ = populationCostLiteral(&in[i]) - histogram_symbols[i] = uint32(i) - } - - for i = 0; i < in_size; i += max_input_histograms { - var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - clusters[num_clusters+j] = uint32(i + j) - } - - num_new_clusters = histogramCombineLiteral(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity) - num_clusters += num_new_clusters - } - { - /* For the second pass, we limit the total number of histogram pairs. - After this limit is reached, we only keep searching for the best pair. */ - var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < (max_num_pairs + 1) { - var _new_size uint - if pairs_capacity == 0 { - _new_size = max_num_pairs + 1 - } else { - _new_size = pairs_capacity - } - var new_array []histogramPair - for _new_size < (max_num_pairs + 1) { - _new_size *= 2 - } - new_array = make([]histogramPair, _new_size) - if pairs_capacity != 0 { - copy(new_array, pairs[:pairs_capacity]) - } - - pairs = new_array - pairs_capacity = _new_size - } - - /* Collapse similar histograms. */ - num_clusters = histogramCombineLiteral(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs) - } - - pairs = nil - cluster_size = nil - - /* Find the optimal map from original histograms to the final ones. */ - histogramRemapLiteral(in, in_size, clusters, num_clusters, out, histogram_symbols) - - clusters = nil - - /* Convert the context map to a canonical form. */ - *out_size = histogramReindexLiteral(out, histogram_symbols, in_size) -} diff --git a/vendor/github.com/andybalholm/brotli/command.go b/vendor/github.com/andybalholm/brotli/command.go deleted file mode 100644 index e93ccdfa1e6..00000000000 --- a/vendor/github.com/andybalholm/brotli/command.go +++ /dev/null @@ -1,252 +0,0 @@ -package brotli - -var kInsBase = []uint32{ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 8, - 10, - 14, - 18, - 26, - 34, - 50, - 66, - 98, - 130, - 194, - 322, - 578, - 1090, - 2114, - 6210, - 22594, -} - -var kInsExtra = []uint32{ - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 5, - 5, - 6, - 7, - 8, - 9, - 10, - 12, - 14, - 24, -} - -var kCopyBase = []uint32{ - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 12, - 14, - 18, - 22, - 30, - 38, - 54, - 70, - 102, - 134, - 198, - 326, - 582, - 1094, - 2118, -} - -var kCopyExtra = []uint32{ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 5, - 5, - 6, - 7, - 8, - 9, - 10, - 24, -} - -func getInsertLengthCode(insertlen uint) uint16 { - if insertlen < 6 { - return uint16(insertlen) - } else if insertlen < 130 { - var nbits uint32 = log2FloorNonZero(insertlen-2) - 1 - return uint16((nbits << 1) + uint32((insertlen-2)>>nbits) + 2) - } else if insertlen < 2114 { - return uint16(log2FloorNonZero(insertlen-66) + 10) - } else if insertlen < 6210 { - return 21 - } else if insertlen < 22594 { - return 22 - } else { - return 23 - } -} - -func getCopyLengthCode(copylen uint) uint16 { - if copylen < 10 { - return uint16(copylen - 2) - } else if copylen < 134 { - var nbits uint32 = log2FloorNonZero(copylen-6) - 1 - return uint16((nbits << 1) + uint32((copylen-6)>>nbits) + 4) - } else if copylen < 2118 { - return uint16(log2FloorNonZero(copylen-70) + 12) - } else { - return 23 - } -} - -func combineLengthCodes(inscode uint16, copycode uint16, use_last_distance bool) uint16 { - var bits64 uint16 = uint16(copycode&0x7 | (inscode&0x7)<<3) - if use_last_distance && inscode < 8 && copycode < 16 { - if copycode < 8 { - return bits64 - } else { - return bits64 | 64 - } - } else { - /* Specification: 5 Encoding of ... (last table) */ - /* offset = 2 * index, where index is in range [0..8] */ - var offset uint32 = 2 * ((uint32(copycode) >> 3) + 3*(uint32(inscode)>>3)) - - /* All values in specification are K * 64, - where K = [2, 3, 6, 4, 5, 8, 7, 9, 10], - i + 1 = [1, 2, 3, 4, 5, 6, 7, 8, 9], - K - i - 1 = [1, 1, 3, 0, 0, 2, 0, 1, 2] = D. - All values in D require only 2 bits to encode. - Magic constant is shifted 6 bits left, to avoid final multiplication. */ - offset = (offset << 5) + 0x40 + ((0x520D40 >> offset) & 0xC0) - - return uint16(offset | uint32(bits64)) - } -} - -func getLengthCode(insertlen uint, copylen uint, use_last_distance bool, code *uint16) { - var inscode uint16 = getInsertLengthCode(insertlen) - var copycode uint16 = getCopyLengthCode(copylen) - *code = combineLengthCodes(inscode, copycode, use_last_distance) -} - -func getInsertBase(inscode uint16) uint32 { - return kInsBase[inscode] -} - -func getInsertExtra(inscode uint16) uint32 { - return kInsExtra[inscode] -} - -func getCopyBase(copycode uint16) uint32 { - return kCopyBase[copycode] -} - -func getCopyExtra(copycode uint16) uint32 { - return kCopyExtra[copycode] -} - -type command struct { - insert_len_ uint32 - copy_len_ uint32 - dist_extra_ uint32 - cmd_prefix_ uint16 - dist_prefix_ uint16 -} - -/* distance_code is e.g. 0 for same-as-last short code, or 16 for offset 1. */ -func initCommand(self *command, dist *distanceParams, insertlen uint, copylen uint, copylen_code_delta int, distance_code uint) { - /* Don't rely on signed int representation, use honest casts. */ - var delta uint32 = uint32(byte(int8(copylen_code_delta))) - self.insert_len_ = uint32(insertlen) - self.copy_len_ = uint32(uint32(copylen) | delta<<25) - - /* The distance prefix and extra bits are stored in this Command as if - npostfix and ndirect were 0, they are only recomputed later after the - clustering if needed. */ - prefixEncodeCopyDistance(distance_code, uint(dist.num_direct_distance_codes), uint(dist.distance_postfix_bits), &self.dist_prefix_, &self.dist_extra_) - - getLengthCode(insertlen, uint(int(copylen)+copylen_code_delta), (self.dist_prefix_&0x3FF == 0), &self.cmd_prefix_) -} - -func initInsertCommand(self *command, insertlen uint) { - self.insert_len_ = uint32(insertlen) - self.copy_len_ = 4 << 25 - self.dist_extra_ = 0 - self.dist_prefix_ = numDistanceShortCodes - getLengthCode(insertlen, 4, false, &self.cmd_prefix_) -} - -func commandRestoreDistanceCode(self *command, dist *distanceParams) uint32 { - if uint32(self.dist_prefix_&0x3FF) < numDistanceShortCodes+dist.num_direct_distance_codes { - return uint32(self.dist_prefix_) & 0x3FF - } else { - var dcode uint32 = uint32(self.dist_prefix_) & 0x3FF - var nbits uint32 = uint32(self.dist_prefix_) >> 10 - var extra uint32 = self.dist_extra_ - var postfix_mask uint32 = (1 << dist.distance_postfix_bits) - 1 - var hcode uint32 = (dcode - dist.num_direct_distance_codes - numDistanceShortCodes) >> dist.distance_postfix_bits - var lcode uint32 = (dcode - dist.num_direct_distance_codes - numDistanceShortCodes) & postfix_mask - var offset uint32 = ((2 + (hcode & 1)) << nbits) - 4 - return ((offset + extra) << dist.distance_postfix_bits) + lcode + dist.num_direct_distance_codes + numDistanceShortCodes - } -} - -func commandDistanceContext(self *command) uint32 { - var r uint32 = uint32(self.cmd_prefix_) >> 6 - var c uint32 = uint32(self.cmd_prefix_) & 7 - if (r == 0 || r == 2 || r == 4 || r == 7) && (c <= 2) { - return c - } - - return 3 -} - -func commandCopyLen(self *command) uint32 { - return self.copy_len_ & 0x1FFFFFF -} - -func commandCopyLenCode(self *command) uint32 { - var modifier uint32 = self.copy_len_ >> 25 - var delta int32 = int32(int8(byte(modifier | (modifier&0x40)<<1))) - return uint32(int32(self.copy_len_&0x1FFFFFF) + delta) -} diff --git a/vendor/github.com/andybalholm/brotli/compress_fragment.go b/vendor/github.com/andybalholm/brotli/compress_fragment.go deleted file mode 100644 index 435898e1651..00000000000 --- a/vendor/github.com/andybalholm/brotli/compress_fragment.go +++ /dev/null @@ -1,840 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function for fast encoding of an input fragment, independently from the input - history. This function uses one-pass processing: when we find a backward - match, we immediately emit the corresponding command and literal codes to - the bit stream. - - Adapted from the CompressFragment() function in - https://github.com/google/snappy/blob/master/snappy.cc */ - -const maxDistance_compress_fragment = 262128 - -func hash5(p []byte, shift uint) uint32 { - var h uint64 = (binary.LittleEndian.Uint64(p) << 24) * uint64(kHashMul32) - return uint32(h >> shift) -} - -func hashBytesAtOffset5(v uint64, offset int, shift uint) uint32 { - assert(offset >= 0) - assert(offset <= 3) - { - var h uint64 = ((v >> uint(8*offset)) << 24) * uint64(kHashMul32) - return uint32(h >> shift) - } -} - -func isMatch5(p1 []byte, p2 []byte) bool { - var i int - for i = 0; i < 5; i++ { - if p1[i] != p2[i] { - return false - } - } - - return true -} - -/* Builds a literal prefix code into "depths" and "bits" based on the statistics - of the "input" string and stores it into the bit stream. - Note that the prefix code here is built from the pre-LZ77 input, therefore - we can only approximate the statistics of the actual literal stream. - Moreover, for long inputs we build a histogram from a sample of the input - and thus have to assign a non-zero depth for each literal. - Returns estimated compression ratio millibytes/char for encoding given input - with generated code. */ -func buildAndStoreLiteralPrefixCode(input []byte, input_size uint, depths []byte, bits []uint16, storage_ix *uint, storage []byte) uint { - var histogram = [256]uint32{0} - var histogram_total uint - var i uint - if input_size < 1<<15 { - for i = 0; i < input_size; i++ { - histogram[input[i]]++ - } - - histogram_total = input_size - for i = 0; i < 256; i++ { - /* We weigh the first 11 samples with weight 3 to account for the - balancing effect of the LZ77 phase on the histogram. */ - var adjust uint32 = 2 * brotli_min_uint32_t(histogram[i], 11) - histogram[i] += adjust - histogram_total += uint(adjust) - } - } else { - const kSampleRate uint = 29 - for i = 0; i < input_size; i += kSampleRate { - histogram[input[i]]++ - } - - histogram_total = (input_size + kSampleRate - 1) / kSampleRate - for i = 0; i < 256; i++ { - /* We add 1 to each population count to avoid 0 bit depths (since this is - only a sample and we don't know if the symbol appears or not), and we - weigh the first 11 samples with weight 3 to account for the balancing - effect of the LZ77 phase on the histogram (more frequent symbols are - more likely to be in backward references instead as literals). */ - var adjust uint32 = 1 + 2*brotli_min_uint32_t(histogram[i], 11) - histogram[i] += adjust - histogram_total += uint(adjust) - } - } - - buildAndStoreHuffmanTreeFast(histogram[:], histogram_total, /* max_bits = */ - 8, depths, bits, storage_ix, storage) - { - var literal_ratio uint = 0 - for i = 0; i < 256; i++ { - if histogram[i] != 0 { - literal_ratio += uint(histogram[i] * uint32(depths[i])) - } - } - - /* Estimated encoding ratio, millibytes per symbol. */ - return (literal_ratio * 125) / histogram_total - } -} - -/* Builds a command and distance prefix code (each 64 symbols) into "depth" and - "bits" based on "histogram" and stores it into the bit stream. */ -func buildAndStoreCommandPrefixCode1(histogram []uint32, depth []byte, bits []uint16, storage_ix *uint, storage []byte) { - var tree [129]huffmanTree - var cmd_depth = [numCommandSymbols]byte{0} - /* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */ - - var cmd_bits [64]uint16 - - createHuffmanTree(histogram, 64, 15, tree[:], depth) - createHuffmanTree(histogram[64:], 64, 14, tree[:], depth[64:]) - - /* We have to jump through a few hoops here in order to compute - the command bits because the symbols are in a different order than in - the full alphabet. This looks complicated, but having the symbols - in this order in the command bits saves a few branches in the Emit* - functions. */ - copy(cmd_depth[:], depth[:24]) - - copy(cmd_depth[24:][:], depth[40:][:8]) - copy(cmd_depth[32:][:], depth[24:][:8]) - copy(cmd_depth[40:][:], depth[48:][:8]) - copy(cmd_depth[48:][:], depth[32:][:8]) - copy(cmd_depth[56:][:], depth[56:][:8]) - convertBitDepthsToSymbols(cmd_depth[:], 64, cmd_bits[:]) - copy(bits, cmd_bits[:24]) - copy(bits[24:], cmd_bits[32:][:8]) - copy(bits[32:], cmd_bits[48:][:8]) - copy(bits[40:], cmd_bits[24:][:8]) - copy(bits[48:], cmd_bits[40:][:8]) - copy(bits[56:], cmd_bits[56:][:8]) - convertBitDepthsToSymbols(depth[64:], 64, bits[64:]) - { - /* Create the bit length array for the full command alphabet. */ - var i uint - for i := 0; i < int(64); i++ { - cmd_depth[i] = 0 - } /* only 64 first values were used */ - copy(cmd_depth[:], depth[:8]) - copy(cmd_depth[64:][:], depth[8:][:8]) - copy(cmd_depth[128:][:], depth[16:][:8]) - copy(cmd_depth[192:][:], depth[24:][:8]) - copy(cmd_depth[384:][:], depth[32:][:8]) - for i = 0; i < 8; i++ { - cmd_depth[128+8*i] = depth[40+i] - cmd_depth[256+8*i] = depth[48+i] - cmd_depth[448+8*i] = depth[56+i] - } - - storeHuffmanTree(cmd_depth[:], numCommandSymbols, tree[:], storage_ix, storage) - } - - storeHuffmanTree(depth[64:], 64, tree[:], storage_ix, storage) -} - -/* REQUIRES: insertlen < 6210 */ -func emitInsertLen1(insertlen uint, depth []byte, bits []uint16, histo []uint32, storage_ix *uint, storage []byte) { - if insertlen < 6 { - var code uint = insertlen + 40 - writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage) - histo[code]++ - } else if insertlen < 130 { - var tail uint = insertlen - 2 - var nbits uint32 = log2FloorNonZero(tail) - 1 - var prefix uint = tail >> nbits - var inscode uint = uint((nbits << 1) + uint32(prefix) + 42) - writeBits(uint(depth[inscode]), uint64(bits[inscode]), storage_ix, storage) - writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> nbits - var code uint = uint((nbits << 1) + uint32(prefix) + 20) - writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage) - writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> nbits - var code uint = uint((nbits << 1) + uint32(prefix) + 4) - writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage) - writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> 5) + 30 - writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage) - writeBits(5, uint64(tail)&31, storage_ix, storage) - writeBits(uint(depth[64]), uint64(bits[64]), storage_ix, storage) - histo[code]++ - histo[64]++ - } else if copylen < 2120 { - var tail uint = copylen - 72 - var nbits uint32 = log2FloorNonZero(tail) - var code uint = uint(nbits + 28) - writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage) - writeBits(uint(nbits), uint64(tail)-(uint64(uint(1))<> nbits) & 1 - var offset uint = (2 + prefix) << nbits - var distcode uint = uint(2*(nbits-1) + uint32(prefix) + 80) - writeBits(uint(depth[distcode]), uint64(bits[distcode]), storage_ix, storage) - writeBits(uint(nbits), uint64(d)-uint64(offset), storage_ix, storage) - histo[distcode]++ -} - -func emitLiterals(input []byte, len uint, depth []byte, bits []uint16, storage_ix *uint, storage []byte) { - var j uint - for j = 0; j < len; j++ { - var lit byte = input[j] - writeBits(uint(depth[lit]), uint64(bits[lit]), storage_ix, storage) - } -} - -/* REQUIRES: len <= 1 << 24. */ -func storeMetaBlockHeader1(len uint, is_uncompressed bool, storage_ix *uint, storage []byte) { - var nibbles uint = 6 - - /* ISLAST */ - writeBits(1, 0, storage_ix, storage) - - if len <= 1<<16 { - nibbles = 4 - } else if len <= 1<<20 { - nibbles = 5 - } - - writeBits(2, uint64(nibbles)-4, storage_ix, storage) - writeBits(nibbles*4, uint64(len)-1, storage_ix, storage) - - /* ISUNCOMPRESSED */ - writeSingleBit(is_uncompressed, storage_ix, storage) -} - -func updateBits(n_bits uint, bits uint32, pos uint, array []byte) { - for n_bits > 0 { - var byte_pos uint = pos >> 3 - var n_unchanged_bits uint = pos & 7 - var n_changed_bits uint = brotli_min_size_t(n_bits, 8-n_unchanged_bits) - var total_bits uint = n_unchanged_bits + n_changed_bits - var mask uint32 = (^((1 << total_bits) - 1)) | ((1 << n_unchanged_bits) - 1) - var unchanged_bits uint32 = uint32(array[byte_pos]) & mask - var changed_bits uint32 = bits & ((1 << n_changed_bits) - 1) - array[byte_pos] = byte(changed_bits<>= n_changed_bits - pos += n_changed_bits - } -} - -func rewindBitPosition1(new_storage_ix uint, storage_ix *uint, storage []byte) { - var bitpos uint = new_storage_ix & 7 - var mask uint = (1 << bitpos) - 1 - storage[new_storage_ix>>3] &= byte(mask) - *storage_ix = new_storage_ix -} - -var shouldMergeBlock_kSampleRate uint = 43 - -func shouldMergeBlock(data []byte, len uint, depths []byte) bool { - var histo = [256]uint{0} - var i uint - for i = 0; i < len; i += shouldMergeBlock_kSampleRate { - histo[data[i]]++ - } - { - var total uint = (len + shouldMergeBlock_kSampleRate - 1) / shouldMergeBlock_kSampleRate - var r float64 = (fastLog2(total)+0.5)*float64(total) + 200 - for i = 0; i < 256; i++ { - r -= float64(histo[i]) * (float64(depths[i]) + fastLog2(histo[i])) - } - - return r >= 0.0 - } -} - -func shouldUseUncompressedMode(metablock_start []byte, next_emit []byte, insertlen uint, literal_ratio uint) bool { - var compressed uint = uint(-cap(next_emit) + cap(metablock_start)) - if compressed*50 > insertlen { - return false - } else { - return literal_ratio > 980 - } -} - -func emitUncompressedMetaBlock1(begin []byte, end []byte, storage_ix_start uint, storage_ix *uint, storage []byte) { - var len uint = uint(-cap(end) + cap(begin)) - rewindBitPosition1(storage_ix_start, storage_ix, storage) - storeMetaBlockHeader1(uint(len), true, storage_ix, storage) - *storage_ix = (*storage_ix + 7) &^ 7 - copy(storage[*storage_ix>>3:], begin[:len]) - *storage_ix += uint(len << 3) - storage[*storage_ix>>3] = 0 -} - -var kCmdHistoSeed = [128]uint32{ - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 0, - 0, - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 0, - 0, - 0, - 0, -} - -var compressFragmentFastImpl_kFirstBlockSize uint = 3 << 15 -var compressFragmentFastImpl_kMergeBlockSize uint = 1 << 16 - -func compressFragmentFastImpl(in []byte, input_size uint, is_last bool, table []int, table_bits uint, cmd_depth []byte, cmd_bits []uint16, cmd_code_numbits *uint, cmd_code []byte, storage_ix *uint, storage []byte) { - var cmd_histo [128]uint32 - var ip_end int - var next_emit int = 0 - var base_ip int = 0 - var input int = 0 - const kInputMarginBytes uint = windowGap - const kMinMatchLen uint = 5 - var metablock_start int = input - var block_size uint = brotli_min_size_t(input_size, compressFragmentFastImpl_kFirstBlockSize) - var total_block_size uint = block_size - var mlen_storage_ix uint = *storage_ix + 3 - var lit_depth [256]byte - var lit_bits [256]uint16 - var literal_ratio uint - var ip int - var last_distance int - var shift uint = 64 - table_bits - - /* "next_emit" is a pointer to the first byte that is not covered by a - previous copy. Bytes between "next_emit" and the start of the next copy or - the end of the input will be emitted as literal bytes. */ - - /* Save the start of the first block for position and distance computations. - */ - - /* Save the bit position of the MLEN field of the meta-block header, so that - we can update it later if we decide to extend this meta-block. */ - storeMetaBlockHeader1(block_size, false, storage_ix, storage) - - /* No block splits, no contexts. */ - writeBits(13, 0, storage_ix, storage) - - literal_ratio = buildAndStoreLiteralPrefixCode(in[input:], block_size, lit_depth[:], lit_bits[:], storage_ix, storage) - { - /* Store the pre-compressed command and distance prefix codes. */ - var i uint - for i = 0; i+7 < *cmd_code_numbits; i += 8 { - writeBits(8, uint64(cmd_code[i>>3]), storage_ix, storage) - } - } - - writeBits(*cmd_code_numbits&7, uint64(cmd_code[*cmd_code_numbits>>3]), storage_ix, storage) - - /* Initialize the command and distance histograms. We will gather - statistics of command and distance codes during the processing - of this block and use it to update the command and distance - prefix codes for the next block. */ -emit_commands: - copy(cmd_histo[:], kCmdHistoSeed[:]) - - /* "ip" is the input pointer. */ - ip = input - - last_distance = -1 - ip_end = int(uint(input) + block_size) - - if block_size >= kInputMarginBytes { - var len_limit uint = brotli_min_size_t(block_size-kMinMatchLen, input_size-kInputMarginBytes) - var ip_limit int = int(uint(input) + len_limit) - /* For the last block, we need to keep a 16 bytes margin so that we can be - sure that all distances are at most window size - 16. - For all other blocks, we only need to keep a margin of 5 bytes so that - we don't go over the block size with a copy. */ - - var next_hash uint32 - ip++ - for next_hash = hash5(in[ip:], shift); ; { - var skip uint32 = 32 - var next_ip int = ip - /* Step 1: Scan forward in the input looking for a 5-byte-long match. - If we get close to exhausting the input then goto emit_remainder. - - Heuristic match skipping: If 32 bytes are scanned with no matches - found, start looking only at every other byte. If 32 more bytes are - scanned, look at every third byte, etc.. When a match is found, - immediately go back to looking at every byte. This is a small loss - (~5% performance, ~0.1% density) for compressible data due to more - bookkeeping, but for non-compressible data (such as JPEG) it's a huge - win since the compressor quickly "realizes" the data is incompressible - and doesn't bother looking for matches everywhere. - - The "skip" variable keeps track of how many bytes there are since the - last match; dividing it by 32 (i.e. right-shifting by five) gives the - number of bytes to move ahead for each iteration. */ - - var candidate int - assert(next_emit < ip) - - trawl: - for { - var hash uint32 = next_hash - var bytes_between_hash_lookups uint32 = skip >> 5 - skip++ - assert(hash == hash5(in[next_ip:], shift)) - ip = next_ip - next_ip = int(uint32(ip) + bytes_between_hash_lookups) - if next_ip > ip_limit { - goto emit_remainder - } - - next_hash = hash5(in[next_ip:], shift) - candidate = ip - last_distance - if isMatch5(in[ip:], in[candidate:]) { - if candidate < ip { - table[hash] = int(ip - base_ip) - break - } - } - - candidate = base_ip + table[hash] - assert(candidate >= base_ip) - assert(candidate < ip) - - table[hash] = int(ip - base_ip) - if !(!isMatch5(in[ip:], in[candidate:])) { - break - } - } - - /* Check copy distance. If candidate is not feasible, continue search. - Checking is done outside of hot loop to reduce overhead. */ - if ip-candidate > maxDistance_compress_fragment { - goto trawl - } - - /* Step 2: Emit the found match together with the literal bytes from - "next_emit" to the bit stream, and then see if we can find a next match - immediately afterwards. Repeat until we find no match for the input - without emitting some literal bytes. */ - { - var base int = ip - /* > 0 */ - var matched uint = 5 + findMatchLengthWithLimit(in[candidate+5:], in[ip+5:], uint(ip_end-ip)-5) - var distance int = int(base - candidate) - /* We have a 5-byte match at ip, and we need to emit bytes in - [next_emit, ip). */ - - var insert uint = uint(base - next_emit) - ip += int(matched) - if insert < 6210 { - emitInsertLen1(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - } else if shouldUseUncompressedMode(in[metablock_start:], in[next_emit:], insert, literal_ratio) { - emitUncompressedMetaBlock1(in[metablock_start:], in[base:], mlen_storage_ix-3, storage_ix, storage) - input_size -= uint(base - input) - input = base - next_emit = input - goto next_block - } else { - emitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - } - - emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], storage_ix, storage) - if distance == last_distance { - writeBits(uint(cmd_depth[64]), uint64(cmd_bits[64]), storage_ix, storage) - cmd_histo[64]++ - } else { - emitDistance1(uint(distance), cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - last_distance = distance - } - - emitCopyLenLastDistance1(matched, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some positions - within the last copy. */ - { - var input_bytes uint64 = binary.LittleEndian.Uint64(in[ip-3:]) - var prev_hash uint32 = hashBytesAtOffset5(input_bytes, 0, shift) - var cur_hash uint32 = hashBytesAtOffset5(input_bytes, 3, shift) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset5(input_bytes, 1, shift) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset5(input_bytes, 2, shift) - table[prev_hash] = int(ip - base_ip - 1) - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - for isMatch5(in[ip:], in[candidate:]) { - var base int = ip - /* We have a 5-byte match at ip, and no need to emit any literal bytes - prior to ip. */ - - var matched uint = 5 + findMatchLengthWithLimit(in[candidate+5:], in[ip+5:], uint(ip_end-ip)-5) - if ip-candidate > maxDistance_compress_fragment { - break - } - ip += int(matched) - last_distance = int(base - candidate) /* > 0 */ - emitCopyLen1(matched, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - emitDistance1(uint(last_distance), cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some positions - within the last copy. */ - { - var input_bytes uint64 = binary.LittleEndian.Uint64(in[ip-3:]) - var prev_hash uint32 = hashBytesAtOffset5(input_bytes, 0, shift) - var cur_hash uint32 = hashBytesAtOffset5(input_bytes, 3, shift) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset5(input_bytes, 1, shift) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset5(input_bytes, 2, shift) - table[prev_hash] = int(ip - base_ip - 1) - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - ip++ - next_hash = hash5(in[ip:], shift) - } - } - -emit_remainder: - assert(next_emit <= ip_end) - input += int(block_size) - input_size -= block_size - block_size = brotli_min_size_t(input_size, compressFragmentFastImpl_kMergeBlockSize) - - /* Decide if we want to continue this meta-block instead of emitting the - last insert-only command. */ - if input_size > 0 && total_block_size+block_size <= 1<<20 && shouldMergeBlock(in[input:], block_size, lit_depth[:]) { - assert(total_block_size > 1<<16) - - /* Update the size of the current meta-block and continue emitting commands. - We can do this because the current size and the new size both have 5 - nibbles. */ - total_block_size += block_size - - updateBits(20, uint32(total_block_size-1), mlen_storage_ix, storage) - goto emit_commands - } - - /* Emit the remaining bytes as literals. */ - if next_emit < ip_end { - var insert uint = uint(ip_end - next_emit) - if insert < 6210 { - emitInsertLen1(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], storage_ix, storage) - } else if shouldUseUncompressedMode(in[metablock_start:], in[next_emit:], insert, literal_ratio) { - emitUncompressedMetaBlock1(in[metablock_start:], in[ip_end:], mlen_storage_ix-3, storage_ix, storage) - } else { - emitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage) - emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], storage_ix, storage) - } - } - - next_emit = ip_end - - /* If we have more data, write a new meta-block header and prefix codes and - then continue emitting commands. */ -next_block: - if input_size > 0 { - metablock_start = input - block_size = brotli_min_size_t(input_size, compressFragmentFastImpl_kFirstBlockSize) - total_block_size = block_size - - /* Save the bit position of the MLEN field of the meta-block header, so that - we can update it later if we decide to extend this meta-block. */ - mlen_storage_ix = *storage_ix + 3 - - storeMetaBlockHeader1(block_size, false, storage_ix, storage) - - /* No block splits, no contexts. */ - writeBits(13, 0, storage_ix, storage) - - literal_ratio = buildAndStoreLiteralPrefixCode(in[input:], block_size, lit_depth[:], lit_bits[:], storage_ix, storage) - buildAndStoreCommandPrefixCode1(cmd_histo[:], cmd_depth, cmd_bits, storage_ix, storage) - goto emit_commands - } - - if !is_last { - /* If this is not the last block, update the command and distance prefix - codes for the next block and store the compressed forms. */ - cmd_code[0] = 0 - - *cmd_code_numbits = 0 - buildAndStoreCommandPrefixCode1(cmd_histo[:], cmd_depth, cmd_bits, cmd_code_numbits, cmd_code) - } -} - -/* Compresses "input" string to the "*storage" buffer as one or more complete - meta-blocks, and updates the "*storage_ix" bit position. - - If "is_last" is 1, emits an additional empty last meta-block. - - "cmd_depth" and "cmd_bits" contain the command and distance prefix codes - (see comment in encode.h) used for the encoding of this input fragment. - If "is_last" is 0, they are updated to reflect the statistics - of this input fragment, to be used for the encoding of the next fragment. - - "*cmd_code_numbits" is the number of bits of the compressed representation - of the command and distance prefix codes, and "cmd_code" is an array of - at least "(*cmd_code_numbits + 7) >> 3" size that contains the compressed - command and distance prefix codes. If "is_last" is 0, these are also - updated to represent the updated "cmd_depth" and "cmd_bits". - - REQUIRES: "input_size" is greater than zero, or "is_last" is 1. - REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24). - REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero. - REQUIRES: "table_size" is an odd (9, 11, 13, 15) power of two - OUTPUT: maximal copy distance <= |input_size| - OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */ -func compressFragmentFast(input []byte, input_size uint, is_last bool, table []int, table_size uint, cmd_depth []byte, cmd_bits []uint16, cmd_code_numbits *uint, cmd_code []byte, storage_ix *uint, storage []byte) { - var initial_storage_ix uint = *storage_ix - var table_bits uint = uint(log2FloorNonZero(table_size)) - - if input_size == 0 { - assert(is_last) - writeBits(1, 1, storage_ix, storage) /* islast */ - writeBits(1, 1, storage_ix, storage) /* isempty */ - *storage_ix = (*storage_ix + 7) &^ 7 - return - } - - compressFragmentFastImpl(input, input_size, is_last, table, table_bits, cmd_depth, cmd_bits, cmd_code_numbits, cmd_code, storage_ix, storage) - - /* If output is larger than single uncompressed block, rewrite it. */ - if *storage_ix-initial_storage_ix > 31+(input_size<<3) { - emitUncompressedMetaBlock1(input, input[input_size:], initial_storage_ix, storage_ix, storage) - } - - if is_last { - writeBits(1, 1, storage_ix, storage) /* islast */ - writeBits(1, 1, storage_ix, storage) /* isempty */ - *storage_ix = (*storage_ix + 7) &^ 7 - } -} diff --git a/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go b/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go deleted file mode 100644 index ffeb321644b..00000000000 --- a/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go +++ /dev/null @@ -1,749 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function for fast encoding of an input fragment, independently from the input - history. This function uses two-pass processing: in the first pass we save - the found backward matches and literal bytes into a buffer, and in the - second pass we emit them into the bit stream using prefix codes built based - on the actual command and literal byte histograms. */ - -const kCompressFragmentTwoPassBlockSize uint = 1 << 17 - -func hash1(p []byte, shift uint, length uint) uint32 { - var h uint64 = (binary.LittleEndian.Uint64(p) << ((8 - length) * 8)) * uint64(kHashMul32) - return uint32(h >> shift) -} - -func hashBytesAtOffset(v uint64, offset uint, shift uint, length uint) uint32 { - assert(offset <= 8-length) - { - var h uint64 = ((v >> (8 * offset)) << ((8 - length) * 8)) * uint64(kHashMul32) - return uint32(h >> shift) - } -} - -func isMatch1(p1 []byte, p2 []byte, length uint) bool { - var i uint - for i = 0; i < length && i < 6; i++ { - if p1[i] != p2[i] { - return false - } - } - - return true -} - -/* Builds a command and distance prefix code (each 64 symbols) into "depth" and - "bits" based on "histogram" and stores it into the bit stream. */ -func buildAndStoreCommandPrefixCode(histogram []uint32, depth []byte, bits []uint16, storage_ix *uint, storage []byte) { - var tree [129]huffmanTree - var cmd_depth = [numCommandSymbols]byte{0} - /* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */ - - var cmd_bits [64]uint16 - createHuffmanTree(histogram, 64, 15, tree[:], depth) - createHuffmanTree(histogram[64:], 64, 14, tree[:], depth[64:]) - - /* We have to jump through a few hoops here in order to compute - the command bits because the symbols are in a different order than in - the full alphabet. This looks complicated, but having the symbols - in this order in the command bits saves a few branches in the Emit* - functions. */ - copy(cmd_depth[:], depth[24:][:24]) - - copy(cmd_depth[24:][:], depth[:8]) - copy(cmd_depth[32:][:], depth[48:][:8]) - copy(cmd_depth[40:][:], depth[8:][:8]) - copy(cmd_depth[48:][:], depth[56:][:8]) - copy(cmd_depth[56:][:], depth[16:][:8]) - convertBitDepthsToSymbols(cmd_depth[:], 64, cmd_bits[:]) - copy(bits, cmd_bits[24:][:8]) - copy(bits[8:], cmd_bits[40:][:8]) - copy(bits[16:], cmd_bits[56:][:8]) - copy(bits[24:], cmd_bits[:24]) - copy(bits[48:], cmd_bits[32:][:8]) - copy(bits[56:], cmd_bits[48:][:8]) - convertBitDepthsToSymbols(depth[64:], 64, bits[64:]) - { - /* Create the bit length array for the full command alphabet. */ - var i uint - for i := 0; i < int(64); i++ { - cmd_depth[i] = 0 - } /* only 64 first values were used */ - copy(cmd_depth[:], depth[24:][:8]) - copy(cmd_depth[64:][:], depth[32:][:8]) - copy(cmd_depth[128:][:], depth[40:][:8]) - copy(cmd_depth[192:][:], depth[48:][:8]) - copy(cmd_depth[384:][:], depth[56:][:8]) - for i = 0; i < 8; i++ { - cmd_depth[128+8*i] = depth[i] - cmd_depth[256+8*i] = depth[8+i] - cmd_depth[448+8*i] = depth[16+i] - } - - storeHuffmanTree(cmd_depth[:], numCommandSymbols, tree[:], storage_ix, storage) - } - - storeHuffmanTree(depth[64:], 64, tree[:], storage_ix, storage) -} - -func emitInsertLen(insertlen uint32, commands *[]uint32) { - if insertlen < 6 { - (*commands)[0] = insertlen - } else if insertlen < 130 { - var tail uint32 = insertlen - 2 - var nbits uint32 = log2FloorNonZero(uint(tail)) - 1 - var prefix uint32 = tail >> nbits - var inscode uint32 = (nbits << 1) + prefix + 2 - var extra uint32 = tail - (prefix << nbits) - (*commands)[0] = inscode | extra<<8 - } else if insertlen < 2114 { - var tail uint32 = insertlen - 66 - var nbits uint32 = log2FloorNonZero(uint(tail)) - var code uint32 = nbits + 10 - var extra uint32 = tail - (1 << nbits) - (*commands)[0] = code | extra<<8 - } else if insertlen < 6210 { - var extra uint32 = insertlen - 2114 - (*commands)[0] = 21 | extra<<8 - } else if insertlen < 22594 { - var extra uint32 = insertlen - 6210 - (*commands)[0] = 22 | extra<<8 - } else { - var extra uint32 = insertlen - 22594 - (*commands)[0] = 23 | extra<<8 - } - - *commands = (*commands)[1:] -} - -func emitCopyLen(copylen uint, commands *[]uint32) { - if copylen < 10 { - (*commands)[0] = uint32(copylen + 38) - } else if copylen < 134 { - var tail uint = copylen - 6 - var nbits uint = uint(log2FloorNonZero(tail) - 1) - var prefix uint = tail >> nbits - var code uint = (nbits << 1) + prefix + 44 - var extra uint = tail - (prefix << nbits) - (*commands)[0] = uint32(code | extra<<8) - } else if copylen < 2118 { - var tail uint = copylen - 70 - var nbits uint = uint(log2FloorNonZero(tail)) - var code uint = nbits + 52 - var extra uint = tail - (uint(1) << nbits) - (*commands)[0] = uint32(code | extra<<8) - } else { - var extra uint = copylen - 2118 - (*commands)[0] = uint32(63 | extra<<8) - } - - *commands = (*commands)[1:] -} - -func emitCopyLenLastDistance(copylen uint, commands *[]uint32) { - if copylen < 12 { - (*commands)[0] = uint32(copylen + 20) - *commands = (*commands)[1:] - } else if copylen < 72 { - var tail uint = copylen - 8 - var nbits uint = uint(log2FloorNonZero(tail) - 1) - var prefix uint = tail >> nbits - var code uint = (nbits << 1) + prefix + 28 - var extra uint = tail - (prefix << nbits) - (*commands)[0] = uint32(code | extra<<8) - *commands = (*commands)[1:] - } else if copylen < 136 { - var tail uint = copylen - 8 - var code uint = (tail >> 5) + 54 - var extra uint = tail & 31 - (*commands)[0] = uint32(code | extra<<8) - *commands = (*commands)[1:] - (*commands)[0] = 64 - *commands = (*commands)[1:] - } else if copylen < 2120 { - var tail uint = copylen - 72 - var nbits uint = uint(log2FloorNonZero(tail)) - var code uint = nbits + 52 - var extra uint = tail - (uint(1) << nbits) - (*commands)[0] = uint32(code | extra<<8) - *commands = (*commands)[1:] - (*commands)[0] = 64 - *commands = (*commands)[1:] - } else { - var extra uint = copylen - 2120 - (*commands)[0] = uint32(63 | extra<<8) - *commands = (*commands)[1:] - (*commands)[0] = 64 - *commands = (*commands)[1:] - } -} - -func emitDistance(distance uint32, commands *[]uint32) { - var d uint32 = distance + 3 - var nbits uint32 = log2FloorNonZero(uint(d)) - 1 - var prefix uint32 = (d >> nbits) & 1 - var offset uint32 = (2 + prefix) << nbits - var distcode uint32 = 2*(nbits-1) + prefix + 80 - var extra uint32 = d - offset - (*commands)[0] = distcode | extra<<8 - *commands = (*commands)[1:] -} - -/* REQUIRES: len <= 1 << 24. */ -func storeMetaBlockHeader(len uint, is_uncompressed bool, storage_ix *uint, storage []byte) { - var nibbles uint = 6 - - /* ISLAST */ - writeBits(1, 0, storage_ix, storage) - - if len <= 1<<16 { - nibbles = 4 - } else if len <= 1<<20 { - nibbles = 5 - } - - writeBits(2, uint64(nibbles)-4, storage_ix, storage) - writeBits(nibbles*4, uint64(len)-1, storage_ix, storage) - - /* ISUNCOMPRESSED */ - writeSingleBit(is_uncompressed, storage_ix, storage) -} - -func createCommands(input []byte, block_size uint, input_size uint, base_ip_ptr []byte, table []int, table_bits uint, min_match uint, literals *[]byte, commands *[]uint32) { - var ip int = 0 - var shift uint = 64 - table_bits - var ip_end int = int(block_size) - var base_ip int = -cap(base_ip_ptr) + cap(input) - var next_emit int = 0 - var last_distance int = -1 - /* "ip" is the input pointer. */ - - const kInputMarginBytes uint = windowGap - - /* "next_emit" is a pointer to the first byte that is not covered by a - previous copy. Bytes between "next_emit" and the start of the next copy or - the end of the input will be emitted as literal bytes. */ - if block_size >= kInputMarginBytes { - var len_limit uint = brotli_min_size_t(block_size-min_match, input_size-kInputMarginBytes) - var ip_limit int = int(len_limit) - /* For the last block, we need to keep a 16 bytes margin so that we can be - sure that all distances are at most window size - 16. - For all other blocks, we only need to keep a margin of 5 bytes so that - we don't go over the block size with a copy. */ - - var next_hash uint32 - ip++ - for next_hash = hash1(input[ip:], shift, min_match); ; { - var skip uint32 = 32 - var next_ip int = ip - /* Step 1: Scan forward in the input looking for a 6-byte-long match. - If we get close to exhausting the input then goto emit_remainder. - - Heuristic match skipping: If 32 bytes are scanned with no matches - found, start looking only at every other byte. If 32 more bytes are - scanned, look at every third byte, etc.. When a match is found, - immediately go back to looking at every byte. This is a small loss - (~5% performance, ~0.1% density) for compressible data due to more - bookkeeping, but for non-compressible data (such as JPEG) it's a huge - win since the compressor quickly "realizes" the data is incompressible - and doesn't bother looking for matches everywhere. - - The "skip" variable keeps track of how many bytes there are since the - last match; dividing it by 32 (ie. right-shifting by five) gives the - number of bytes to move ahead for each iteration. */ - - var candidate int - - assert(next_emit < ip) - - trawl: - for { - var hash uint32 = next_hash - var bytes_between_hash_lookups uint32 = skip >> 5 - skip++ - ip = next_ip - assert(hash == hash1(input[ip:], shift, min_match)) - next_ip = int(uint32(ip) + bytes_between_hash_lookups) - if next_ip > ip_limit { - goto emit_remainder - } - - next_hash = hash1(input[next_ip:], shift, min_match) - candidate = ip - last_distance - if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) { - if candidate < ip { - table[hash] = int(ip - base_ip) - break - } - } - - candidate = base_ip + table[hash] - assert(candidate >= base_ip) - assert(candidate < ip) - - table[hash] = int(ip - base_ip) - if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) { - break - } - } - - /* Check copy distance. If candidate is not feasible, continue search. - Checking is done outside of hot loop to reduce overhead. */ - if ip-candidate > maxDistance_compress_fragment { - goto trawl - } - - /* Step 2: Emit the found match together with the literal bytes from - "next_emit", and then see if we can find a next match immediately - afterwards. Repeat until we find no match for the input - without emitting some literal bytes. */ - { - var base int = ip - /* > 0 */ - var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match) - var distance int = int(base - candidate) - /* We have a 6-byte match at ip, and we need to emit bytes in - [next_emit, ip). */ - - var insert int = int(base - next_emit) - ip += int(matched) - emitInsertLen(uint32(insert), commands) - copy(*literals, input[next_emit:][:uint(insert)]) - *literals = (*literals)[insert:] - if distance == last_distance { - (*commands)[0] = 64 - *commands = (*commands)[1:] - } else { - emitDistance(uint32(distance), commands) - last_distance = distance - } - - emitCopyLenLastDistance(matched, commands) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - { - var input_bytes uint64 - var cur_hash uint32 - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some - positions within the last copy. */ - - var prev_hash uint32 - if min_match == 4 { - input_bytes = binary.LittleEndian.Uint64(input[ip-3:]) - cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } else { - input_bytes = binary.LittleEndian.Uint64(input[ip-5:]) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 5) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 4) - prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - input_bytes = binary.LittleEndian.Uint64(input[ip-2:]) - cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - for ip-candidate <= maxDistance_compress_fragment && isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) { - var base int = ip - /* We have a 6-byte match at ip, and no need to emit any - literal bytes prior to ip. */ - - var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match) - ip += int(matched) - last_distance = int(base - candidate) /* > 0 */ - emitCopyLen(matched, commands) - emitDistance(uint32(last_distance), commands) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - { - var input_bytes uint64 - var cur_hash uint32 - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some - positions within the last copy. */ - - var prev_hash uint32 - if min_match == 4 { - input_bytes = binary.LittleEndian.Uint64(input[ip-3:]) - cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } else { - input_bytes = binary.LittleEndian.Uint64(input[ip-5:]) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 5) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 4) - prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - input_bytes = binary.LittleEndian.Uint64(input[ip-2:]) - cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - ip++ - next_hash = hash1(input[ip:], shift, min_match) - } - } - -emit_remainder: - assert(next_emit <= ip_end) - - /* Emit the remaining bytes as literals. */ - if next_emit < ip_end { - var insert uint32 = uint32(ip_end - next_emit) - emitInsertLen(insert, commands) - copy(*literals, input[next_emit:][:insert]) - *literals = (*literals)[insert:] - } -} - -var storeCommands_kNumExtraBits = [128]uint32{ - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 5, - 5, - 6, - 7, - 8, - 9, - 10, - 12, - 14, - 24, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 5, - 5, - 6, - 7, - 8, - 9, - 10, - 24, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 5, - 5, - 6, - 6, - 7, - 7, - 8, - 8, - 9, - 9, - 10, - 10, - 11, - 11, - 12, - 12, - 13, - 13, - 14, - 14, - 15, - 15, - 16, - 16, - 17, - 17, - 18, - 18, - 19, - 19, - 20, - 20, - 21, - 21, - 22, - 22, - 23, - 23, - 24, - 24, -} -var storeCommands_kInsertOffset = [24]uint32{ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 8, - 10, - 14, - 18, - 26, - 34, - 50, - 66, - 98, - 130, - 194, - 322, - 578, - 1090, - 2114, - 6210, - 22594, -} - -func storeCommands(literals []byte, num_literals uint, commands []uint32, num_commands uint, storage_ix *uint, storage []byte) { - var lit_depths [256]byte - var lit_bits [256]uint16 - var lit_histo = [256]uint32{0} - var cmd_depths = [128]byte{0} - var cmd_bits = [128]uint16{0} - var cmd_histo = [128]uint32{0} - var i uint - for i = 0; i < num_literals; i++ { - lit_histo[literals[i]]++ - } - - buildAndStoreHuffmanTreeFast(lit_histo[:], num_literals, /* max_bits = */ - 8, lit_depths[:], lit_bits[:], storage_ix, storage) - - for i = 0; i < num_commands; i++ { - var code uint32 = commands[i] & 0xFF - assert(code < 128) - cmd_histo[code]++ - } - - cmd_histo[1] += 1 - cmd_histo[2] += 1 - cmd_histo[64] += 1 - cmd_histo[84] += 1 - buildAndStoreCommandPrefixCode(cmd_histo[:], cmd_depths[:], cmd_bits[:], storage_ix, storage) - - for i = 0; i < num_commands; i++ { - var cmd uint32 = commands[i] - var code uint32 = cmd & 0xFF - var extra uint32 = cmd >> 8 - assert(code < 128) - writeBits(uint(cmd_depths[code]), uint64(cmd_bits[code]), storage_ix, storage) - writeBits(uint(storeCommands_kNumExtraBits[code]), uint64(extra), storage_ix, storage) - if code < 24 { - var insert uint32 = storeCommands_kInsertOffset[code] + extra - var j uint32 - for j = 0; j < insert; j++ { - var lit byte = literals[0] - writeBits(uint(lit_depths[lit]), uint64(lit_bits[lit]), storage_ix, storage) - literals = literals[1:] - } - } - } -} - -/* Acceptable loss for uncompressible speedup is 2% */ -const minRatio = 0.98 - -const sampleRate = 43 - -func shouldCompress(input []byte, input_size uint, num_literals uint) bool { - var corpus_size float64 = float64(input_size) - if float64(num_literals) < minRatio*corpus_size { - return true - } else { - var literal_histo = [256]uint32{0} - var max_total_bit_cost float64 = corpus_size * 8 * minRatio / sampleRate - var i uint - for i = 0; i < input_size; i += sampleRate { - literal_histo[input[i]]++ - } - - return bitsEntropy(literal_histo[:], 256) < max_total_bit_cost - } -} - -func rewindBitPosition(new_storage_ix uint, storage_ix *uint, storage []byte) { - var bitpos uint = new_storage_ix & 7 - var mask uint = (1 << bitpos) - 1 - storage[new_storage_ix>>3] &= byte(mask) - *storage_ix = new_storage_ix -} - -func emitUncompressedMetaBlock(input []byte, input_size uint, storage_ix *uint, storage []byte) { - storeMetaBlockHeader(input_size, true, storage_ix, storage) - *storage_ix = (*storage_ix + 7) &^ 7 - copy(storage[*storage_ix>>3:], input[:input_size]) - *storage_ix += input_size << 3 - storage[*storage_ix>>3] = 0 -} - -func compressFragmentTwoPassImpl(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_bits uint, min_match uint, storage_ix *uint, storage []byte) { - /* Save the start of the first block for position and distance computations. - */ - var base_ip []byte = input - - for input_size > 0 { - var block_size uint = brotli_min_size_t(input_size, kCompressFragmentTwoPassBlockSize) - var commands []uint32 = command_buf - var literals []byte = literal_buf - var num_literals uint - createCommands(input, block_size, input_size, base_ip, table, table_bits, min_match, &literals, &commands) - num_literals = uint(-cap(literals) + cap(literal_buf)) - if shouldCompress(input, block_size, num_literals) { - var num_commands uint = uint(-cap(commands) + cap(command_buf)) - storeMetaBlockHeader(block_size, false, storage_ix, storage) - - /* No block splits, no contexts. */ - writeBits(13, 0, storage_ix, storage) - - storeCommands(literal_buf, num_literals, command_buf, num_commands, storage_ix, storage) - } else { - /* Since we did not find many backward references and the entropy of - the data is close to 8 bits, we can simply emit an uncompressed block. - This makes compression speed of uncompressible data about 3x faster. */ - emitUncompressedMetaBlock(input, block_size, storage_ix, storage) - } - - input = input[block_size:] - input_size -= block_size - } -} - -/* Compresses "input" string to the "*storage" buffer as one or more complete - meta-blocks, and updates the "*storage_ix" bit position. - - If "is_last" is 1, emits an additional empty last meta-block. - - REQUIRES: "input_size" is greater than zero, or "is_last" is 1. - REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24). - REQUIRES: "command_buf" and "literal_buf" point to at least - kCompressFragmentTwoPassBlockSize long arrays. - REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero. - REQUIRES: "table_size" is a power of two - OUTPUT: maximal copy distance <= |input_size| - OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */ -func compressFragmentTwoPass(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_size uint, storage_ix *uint, storage []byte) { - var initial_storage_ix uint = *storage_ix - var table_bits uint = uint(log2FloorNonZero(table_size)) - var min_match uint - if table_bits <= 15 { - min_match = 4 - } else { - min_match = 6 - } - compressFragmentTwoPassImpl(input, input_size, is_last, command_buf, literal_buf, table, table_bits, min_match, storage_ix, storage) - - /* If output is larger than single uncompressed block, rewrite it. */ - if *storage_ix-initial_storage_ix > 31+(input_size<<3) { - rewindBitPosition(initial_storage_ix, storage_ix, storage) - emitUncompressedMetaBlock(input, input_size, storage_ix, storage) - } - - if is_last { - writeBits(1, 1, storage_ix, storage) /* islast */ - writeBits(1, 1, storage_ix, storage) /* isempty */ - *storage_ix = (*storage_ix + 7) &^ 7 - } -} diff --git a/vendor/github.com/andybalholm/brotli/constants.go b/vendor/github.com/andybalholm/brotli/constants.go deleted file mode 100644 index a880dff789d..00000000000 --- a/vendor/github.com/andybalholm/brotli/constants.go +++ /dev/null @@ -1,77 +0,0 @@ -package brotli - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Specification: 7.3. Encoding of the context map */ -const contextMapMaxRle = 16 - -/* Specification: 2. Compressed representation overview */ -const maxNumberOfBlockTypes = 256 - -/* Specification: 3.3. Alphabet sizes: insert-and-copy length */ -const numLiteralSymbols = 256 - -const numCommandSymbols = 704 - -const numBlockLenSymbols = 26 - -const maxContextMapSymbols = (maxNumberOfBlockTypes + contextMapMaxRle) - -const maxBlockTypeSymbols = (maxNumberOfBlockTypes + 2) - -/* Specification: 3.5. Complex prefix codes */ -const repeatPreviousCodeLength = 16 - -const repeatZeroCodeLength = 17 - -const codeLengthCodes = (repeatZeroCodeLength + 1) - -/* "code length of 8 is repeated" */ -const initialRepeatedCodeLength = 8 - -/* "Large Window Brotli" */ -const largeMaxDistanceBits = 62 - -const largeMinWbits = 10 - -const largeMaxWbits = 30 - -/* Specification: 4. Encoding of distances */ -const numDistanceShortCodes = 16 - -const maxNpostfix = 3 - -const maxNdirect = 120 - -const maxDistanceBits = 24 - -func distanceAlphabetSize(NPOSTFIX uint, NDIRECT uint, MAXNBITS uint) uint { - return numDistanceShortCodes + NDIRECT + uint(MAXNBITS<<(NPOSTFIX+1)) -} - -/* numDistanceSymbols == 1128 */ -const numDistanceSymbols = 1128 - -const maxDistance = 0x3FFFFFC - -const maxAllowedDistance = 0x7FFFFFFC - -/* 7.1. Context modes and context ID lookup for literals */ -/* "context IDs for literals are in the range of 0..63" */ -const literalContextBits = 6 - -/* 7.2. Context ID for distances */ -const distanceContextBits = 2 - -/* 9.1. Format of the Stream Header */ -/* Number of slack bytes for window size. Don't confuse - with BROTLI_NUM_DISTANCE_SHORT_CODES. */ -const windowGap = 16 - -func maxBackwardLimit(W uint) uint { - return (uint(1) << W) - windowGap -} diff --git a/vendor/github.com/andybalholm/brotli/context.go b/vendor/github.com/andybalholm/brotli/context.go deleted file mode 100644 index 884ff8a2d69..00000000000 --- a/vendor/github.com/andybalholm/brotli/context.go +++ /dev/null @@ -1,2176 +0,0 @@ -package brotli - -/* Lookup table to map the previous two bytes to a context id. - -There are four different context modeling modes defined here: - contextLSB6: context id is the least significant 6 bits of the last byte, - contextMSB6: context id is the most significant 6 bits of the last byte, - contextUTF8: second-order context model tuned for UTF8-encoded text, - contextSigned: second-order context model tuned for signed integers. - -If |p1| and |p2| are the previous two bytes, and |mode| is current context -mode, we calculate the context as: - - context = ContextLut(mode)[p1] | ContextLut(mode)[p2 + 256]. - -For contextUTF8 mode, if the previous two bytes are ASCII characters -(i.e. < 128), this will be equivalent to - - context = 4 * context1(p1) + context2(p2), - -where context1 is based on the previous byte in the following way: - - 0 : non-ASCII control - 1 : \t, \n, \r - 2 : space - 3 : other punctuation - 4 : " ' - 5 : % - 6 : ( < [ { - 7 : ) > ] } - 8 : , ; : - 9 : . - 10 : = - 11 : number - 12 : upper-case vowel - 13 : upper-case consonant - 14 : lower-case vowel - 15 : lower-case consonant - -and context2 is based on the second last byte: - - 0 : control, space - 1 : punctuation - 2 : upper-case letter, number - 3 : lower-case letter - -If the last byte is ASCII, and the second last byte is not (in a valid UTF8 -stream it will be a continuation byte, value between 128 and 191), the -context is the same as if the second last byte was an ASCII control or space. - -If the last byte is a UTF8 lead byte (value >= 192), then the next byte will -be a continuation byte and the context id is 2 or 3 depending on the LSB of -the last byte and to a lesser extent on the second last byte if it is ASCII. - -If the last byte is a UTF8 continuation byte, the second last byte can be: - - continuation byte: the next byte is probably ASCII or lead byte (assuming - 4-byte UTF8 characters are rare) and the context id is 0 or 1. - - lead byte (192 - 207): next byte is ASCII or lead byte, context is 0 or 1 - - lead byte (208 - 255): next byte is continuation byte, context is 2 or 3 - -The possible value combinations of the previous two bytes, the range of -context ids and the type of the next byte is summarized in the table below: - -|--------\-----------------------------------------------------------------| -| \ Last byte | -| Second \---------------------------------------------------------------| -| last byte \ ASCII | cont. byte | lead byte | -| \ (0-127) | (128-191) | (192-) | -|=============|===================|=====================|==================| -| ASCII | next: ASCII/lead | not valid | next: cont. | -| (0-127) | context: 4 - 63 | | context: 2 - 3 | -|-------------|-------------------|---------------------|------------------| -| cont. byte | next: ASCII/lead | next: ASCII/lead | next: cont. | -| (128-191) | context: 4 - 63 | context: 0 - 1 | context: 2 - 3 | -|-------------|-------------------|---------------------|------------------| -| lead byte | not valid | next: ASCII/lead | not valid | -| (192-207) | | context: 0 - 1 | | -|-------------|-------------------|---------------------|------------------| -| lead byte | not valid | next: cont. | not valid | -| (208-) | | context: 2 - 3 | | -|-------------|-------------------|---------------------|------------------| -*/ - -const ( - contextLSB6 = 0 - contextMSB6 = 1 - contextUTF8 = 2 - contextSigned = 3 -) - -/* Common context lookup table for all context modes. */ -var kContextLookup = [2048]byte{ - /* CONTEXT_LSB6, last byte. */ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - - /* CONTEXT_LSB6, second last byte, */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - - /* CONTEXT_MSB6, last byte. */ - 0, - 0, - 0, - 0, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 3, - 3, - 3, - 3, - 4, - 4, - 4, - 4, - 5, - 5, - 5, - 5, - 6, - 6, - 6, - 6, - 7, - 7, - 7, - 7, - 8, - 8, - 8, - 8, - 9, - 9, - 9, - 9, - 10, - 10, - 10, - 10, - 11, - 11, - 11, - 11, - 12, - 12, - 12, - 12, - 13, - 13, - 13, - 13, - 14, - 14, - 14, - 14, - 15, - 15, - 15, - 15, - 16, - 16, - 16, - 16, - 17, - 17, - 17, - 17, - 18, - 18, - 18, - 18, - 19, - 19, - 19, - 19, - 20, - 20, - 20, - 20, - 21, - 21, - 21, - 21, - 22, - 22, - 22, - 22, - 23, - 23, - 23, - 23, - 24, - 24, - 24, - 24, - 25, - 25, - 25, - 25, - 26, - 26, - 26, - 26, - 27, - 27, - 27, - 27, - 28, - 28, - 28, - 28, - 29, - 29, - 29, - 29, - 30, - 30, - 30, - 30, - 31, - 31, - 31, - 31, - 32, - 32, - 32, - 32, - 33, - 33, - 33, - 33, - 34, - 34, - 34, - 34, - 35, - 35, - 35, - 35, - 36, - 36, - 36, - 36, - 37, - 37, - 37, - 37, - 38, - 38, - 38, - 38, - 39, - 39, - 39, - 39, - 40, - 40, - 40, - 40, - 41, - 41, - 41, - 41, - 42, - 42, - 42, - 42, - 43, - 43, - 43, - 43, - 44, - 44, - 44, - 44, - 45, - 45, - 45, - 45, - 46, - 46, - 46, - 46, - 47, - 47, - 47, - 47, - 48, - 48, - 48, - 48, - 49, - 49, - 49, - 49, - 50, - 50, - 50, - 50, - 51, - 51, - 51, - 51, - 52, - 52, - 52, - 52, - 53, - 53, - 53, - 53, - 54, - 54, - 54, - 54, - 55, - 55, - 55, - 55, - 56, - 56, - 56, - 56, - 57, - 57, - 57, - 57, - 58, - 58, - 58, - 58, - 59, - 59, - 59, - 59, - 60, - 60, - 60, - 60, - 61, - 61, - 61, - 61, - 62, - 62, - 62, - 62, - 63, - 63, - 63, - 63, - - /* CONTEXT_MSB6, second last byte, */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - - /* CONTEXT_UTF8, last byte. */ - /* ASCII range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4, - 4, - 0, - 0, - 4, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8, - 12, - 16, - 12, - 12, - 20, - 12, - 16, - 24, - 28, - 12, - 12, - 32, - 12, - 36, - 12, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 32, - 32, - 24, - 40, - 28, - 12, - 12, - 48, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 52, - 52, - 24, - 12, - 28, - 12, - 12, - 12, - 56, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 60, - 60, - 24, - 12, - 28, - 12, - 0, - - /* UTF8 continuation byte range. */ - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - - /* UTF8 lead byte range. */ - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - - /* CONTEXT_UTF8 second last byte. */ - /* ASCII range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 1, - 1, - 1, - 1, - 1, - 1, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 1, - 1, - 1, - 1, - 0, - - /* UTF8 continuation byte range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - - /* UTF8 lead byte range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - - /* CONTEXT_SIGNED, last byte, same as the above values shifted by 3 bits. */ - 0, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 56, - - /* CONTEXT_SIGNED, second last byte. */ - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 7, -} - -type contextLUT []byte - -func getContextLUT(mode int) contextLUT { - return kContextLookup[mode<<9:] -} - -func getContext(p1 byte, p2 byte, lut contextLUT) byte { - return lut[p1] | lut[256+int(p2)] -} diff --git a/vendor/github.com/andybalholm/brotli/decode.go b/vendor/github.com/andybalholm/brotli/decode.go deleted file mode 100644 index d2f39a051c6..00000000000 --- a/vendor/github.com/andybalholm/brotli/decode.go +++ /dev/null @@ -1,2632 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -const ( - decoderResultError = 0 - decoderResultSuccess = 1 - decoderResultNeedsMoreInput = 2 - decoderResultNeedsMoreOutput = 3 -) - -/** - * Error code for detailed logging / production debugging. - * - * See ::BrotliDecoderGetErrorCode and ::BROTLI_LAST_ERROR_CODE. - */ -const ( - decoderNoError = 0 - decoderSuccess = 1 - decoderNeedsMoreInput = 2 - decoderNeedsMoreOutput = 3 - decoderErrorFormatExuberantNibble = -1 - decoderErrorFormatReserved = -2 - decoderErrorFormatExuberantMetaNibble = -3 - decoderErrorFormatSimpleHuffmanAlphabet = -4 - decoderErrorFormatSimpleHuffmanSame = -5 - decoderErrorFormatClSpace = -6 - decoderErrorFormatHuffmanSpace = -7 - decoderErrorFormatContextMapRepeat = -8 - decoderErrorFormatBlockLength1 = -9 - decoderErrorFormatBlockLength2 = -10 - decoderErrorFormatTransform = -11 - decoderErrorFormatDictionary = -12 - decoderErrorFormatWindowBits = -13 - decoderErrorFormatPadding1 = -14 - decoderErrorFormatPadding2 = -15 - decoderErrorFormatDistance = -16 - decoderErrorDictionaryNotSet = -19 - decoderErrorInvalidArguments = -20 - decoderErrorAllocContextModes = -21 - decoderErrorAllocTreeGroups = -22 - decoderErrorAllocContextMap = -25 - decoderErrorAllocRingBuffer1 = -26 - decoderErrorAllocRingBuffer2 = -27 - decoderErrorAllocBlockTypeTrees = -30 - decoderErrorUnreachable = -31 -) - -/** - * The value of the last error code, negative integer. - * - * All other error code values are in the range from ::lastErrorCode - * to @c -1. There are also 4 other possible non-error codes @c 0 .. @c 3 in - * ::BrotliDecoderErrorCode enumeration. - */ -const lastErrorCode = decoderErrorUnreachable - -/** Options to be used with ::BrotliDecoderSetParameter. */ -const ( - decoderParamDisableRingBufferReallocation = 0 - decoderParamLargeWindow = 1 -) - -const huffmanTableBits = 8 - -const huffmanTableMask = 0xFF - -/* We need the slack region for the following reasons: - - doing up to two 16-byte copies for fast backward copying - - inserting transformed dictionary word (5 prefix + 24 base + 8 suffix) */ -const kRingBufferWriteAheadSlack uint32 = 42 - -var kCodeLengthCodeOrder = [codeLengthCodes]byte{1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15} - -/* Static prefix code for the complex code length code lengths. */ -var kCodeLengthPrefixLength = [16]byte{2, 2, 2, 3, 2, 2, 2, 4, 2, 2, 2, 3, 2, 2, 2, 4} - -var kCodeLengthPrefixValue = [16]byte{0, 4, 3, 2, 0, 4, 3, 1, 0, 4, 3, 2, 0, 4, 3, 5} - -func decoderSetParameter(state *Reader, p int, value uint32) bool { - if state.state != stateUninited { - return false - } - switch p { - case decoderParamDisableRingBufferReallocation: - if !(value == 0) { - state.canny_ringbuffer_allocation = 0 - } else { - state.canny_ringbuffer_allocation = 1 - } - return true - - case decoderParamLargeWindow: - state.large_window = (!(value == 0)) - return true - - default: - return false - } -} - -/* Saves error code and converts it to BrotliDecoderResult. */ -func saveErrorCode(s *Reader, e int) int { - s.error_code = int(e) - switch e { - case decoderSuccess: - return decoderResultSuccess - - case decoderNeedsMoreInput: - return decoderResultNeedsMoreInput - - case decoderNeedsMoreOutput: - return decoderResultNeedsMoreOutput - - default: - return decoderResultError - } -} - -/* Decodes WBITS by reading 1 - 7 bits, or 0x11 for "Large Window Brotli". - Precondition: bit-reader accumulator has at least 8 bits. */ -func decodeWindowBits(s *Reader, br *bitReader) int { - var n uint32 - var large_window bool = s.large_window - s.large_window = false - takeBits(br, 1, &n) - if n == 0 { - s.window_bits = 16 - return decoderSuccess - } - - takeBits(br, 3, &n) - if n != 0 { - s.window_bits = 17 + n - return decoderSuccess - } - - takeBits(br, 3, &n) - if n == 1 { - if large_window { - takeBits(br, 1, &n) - if n == 1 { - return decoderErrorFormatWindowBits - } - - s.large_window = true - return decoderSuccess - } else { - return decoderErrorFormatWindowBits - } - } - - if n != 0 { - s.window_bits = 8 + n - return decoderSuccess - } - - s.window_bits = 17 - return decoderSuccess -} - -/* Decodes a number in the range [0..255], by reading 1 - 11 bits. */ -func decodeVarLenUint8(s *Reader, br *bitReader, value *uint32) int { - var bits uint32 - switch s.substate_decode_uint8 { - case stateDecodeUint8None: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits == 0 { - *value = 0 - return decoderSuccess - } - fallthrough - - /* Fall through. */ - case stateDecodeUint8Short: - if !safeReadBits(br, 3, &bits) { - s.substate_decode_uint8 = stateDecodeUint8Short - return decoderNeedsMoreInput - } - - if bits == 0 { - *value = 1 - s.substate_decode_uint8 = stateDecodeUint8None - return decoderSuccess - } - - /* Use output value as a temporary storage. It MUST be persisted. */ - *value = bits - fallthrough - - /* Fall through. */ - case stateDecodeUint8Long: - if !safeReadBits(br, *value, &bits) { - s.substate_decode_uint8 = stateDecodeUint8Long - return decoderNeedsMoreInput - } - - *value = (1 << *value) + bits - s.substate_decode_uint8 = stateDecodeUint8None - return decoderSuccess - - default: - return decoderErrorUnreachable - } -} - -/* Decodes a metablock length and flags by reading 2 - 31 bits. */ -func decodeMetaBlockLength(s *Reader, br *bitReader) int { - var bits uint32 - var i int - for { - switch s.substate_metablock_header { - case stateMetablockHeaderNone: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - s.is_last_metablock = 1 - } else { - s.is_last_metablock = 0 - } - s.meta_block_remaining_len = 0 - s.is_uncompressed = 0 - s.is_metadata = 0 - if s.is_last_metablock == 0 { - s.substate_metablock_header = stateMetablockHeaderNibbles - break - } - - s.substate_metablock_header = stateMetablockHeaderEmpty - fallthrough - - /* Fall through. */ - case stateMetablockHeaderEmpty: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - } - - s.substate_metablock_header = stateMetablockHeaderNibbles - fallthrough - - /* Fall through. */ - case stateMetablockHeaderNibbles: - if !safeReadBits(br, 2, &bits) { - return decoderNeedsMoreInput - } - - s.size_nibbles = uint(byte(bits + 4)) - s.loop_counter = 0 - if bits == 3 { - s.is_metadata = 1 - s.substate_metablock_header = stateMetablockHeaderReserved - break - } - - s.substate_metablock_header = stateMetablockHeaderSize - fallthrough - - /* Fall through. */ - case stateMetablockHeaderSize: - i = s.loop_counter - - for ; i < int(s.size_nibbles); i++ { - if !safeReadBits(br, 4, &bits) { - s.loop_counter = i - return decoderNeedsMoreInput - } - - if uint(i+1) == s.size_nibbles && s.size_nibbles > 4 && bits == 0 { - return decoderErrorFormatExuberantNibble - } - - s.meta_block_remaining_len |= int(bits << uint(i*4)) - } - - s.substate_metablock_header = stateMetablockHeaderUncompressed - fallthrough - - /* Fall through. */ - case stateMetablockHeaderUncompressed: - if s.is_last_metablock == 0 { - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - s.is_uncompressed = 1 - } else { - s.is_uncompressed = 0 - } - } - - s.meta_block_remaining_len++ - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - - case stateMetablockHeaderReserved: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - return decoderErrorFormatReserved - } - - s.substate_metablock_header = stateMetablockHeaderBytes - fallthrough - - /* Fall through. */ - case stateMetablockHeaderBytes: - if !safeReadBits(br, 2, &bits) { - return decoderNeedsMoreInput - } - - if bits == 0 { - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - } - - s.size_nibbles = uint(byte(bits)) - s.substate_metablock_header = stateMetablockHeaderMetadata - fallthrough - - /* Fall through. */ - case stateMetablockHeaderMetadata: - i = s.loop_counter - - for ; i < int(s.size_nibbles); i++ { - if !safeReadBits(br, 8, &bits) { - s.loop_counter = i - return decoderNeedsMoreInput - } - - if uint(i+1) == s.size_nibbles && s.size_nibbles > 1 && bits == 0 { - return decoderErrorFormatExuberantMetaNibble - } - - s.meta_block_remaining_len |= int(bits << uint(i*8)) - } - - s.meta_block_remaining_len++ - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - - default: - return decoderErrorUnreachable - } - } -} - -/* Decodes the Huffman code. - This method doesn't read data from the bit reader, BUT drops the amount of - bits that correspond to the decoded symbol. - bits MUST contain at least 15 (BROTLI_HUFFMAN_MAX_CODE_LENGTH) valid bits. */ -func decodeSymbol(bits uint32, table []huffmanCode, br *bitReader) uint32 { - table = table[bits&huffmanTableMask:] - if table[0].bits > huffmanTableBits { - var nbits uint32 = uint32(table[0].bits) - huffmanTableBits - dropBits(br, huffmanTableBits) - table = table[uint32(table[0].value)+((bits>>huffmanTableBits)&bitMask(nbits)):] - } - - dropBits(br, uint32(table[0].bits)) - return uint32(table[0].value) -} - -/* Reads and decodes the next Huffman code from bit-stream. - This method peeks 16 bits of input and drops 0 - 15 of them. */ -func readSymbol(table []huffmanCode, br *bitReader) uint32 { - return decodeSymbol(get16BitsUnmasked(br), table, br) -} - -/* Same as DecodeSymbol, but it is known that there is less than 15 bits of - input are currently available. */ -func safeDecodeSymbol(table []huffmanCode, br *bitReader, result *uint32) bool { - var val uint32 - var available_bits uint32 = getAvailableBits(br) - if available_bits == 0 { - if table[0].bits == 0 { - *result = uint32(table[0].value) - return true - } - - return false /* No valid bits at all. */ - } - - val = uint32(getBitsUnmasked(br)) - table = table[val&huffmanTableMask:] - if table[0].bits <= huffmanTableBits { - if uint32(table[0].bits) <= available_bits { - dropBits(br, uint32(table[0].bits)) - *result = uint32(table[0].value) - return true - } else { - return false /* Not enough bits for the first level. */ - } - } - - if available_bits <= huffmanTableBits { - return false /* Not enough bits to move to the second level. */ - } - - /* Speculatively drop HUFFMAN_TABLE_BITS. */ - val = (val & bitMask(uint32(table[0].bits))) >> huffmanTableBits - - available_bits -= huffmanTableBits - table = table[uint32(table[0].value)+val:] - if available_bits < uint32(table[0].bits) { - return false /* Not enough bits for the second level. */ - } - - dropBits(br, huffmanTableBits+uint32(table[0].bits)) - *result = uint32(table[0].value) - return true -} - -func safeReadSymbol(table []huffmanCode, br *bitReader, result *uint32) bool { - var val uint32 - if safeGetBits(br, 15, &val) { - *result = decodeSymbol(val, table, br) - return true - } - - return safeDecodeSymbol(table, br, result) -} - -/* Makes a look-up in first level Huffman table. Peeks 8 bits. */ -func preloadSymbol(safe int, table []huffmanCode, br *bitReader, bits *uint32, value *uint32) { - if safe != 0 { - return - } - - table = table[getBits(br, huffmanTableBits):] - *bits = uint32(table[0].bits) - *value = uint32(table[0].value) -} - -/* Decodes the next Huffman code using data prepared by PreloadSymbol. - Reads 0 - 15 bits. Also peeks 8 following bits. */ -func readPreloadedSymbol(table []huffmanCode, br *bitReader, bits *uint32, value *uint32) uint32 { - var result uint32 = *value - var ext []huffmanCode - if *bits > huffmanTableBits { - var val uint32 = get16BitsUnmasked(br) - ext = table[val&huffmanTableMask:][*value:] - var mask uint32 = bitMask((*bits - huffmanTableBits)) - dropBits(br, huffmanTableBits) - ext = ext[(val>>huffmanTableBits)&mask:] - dropBits(br, uint32(ext[0].bits)) - result = uint32(ext[0].value) - } else { - dropBits(br, *bits) - } - - preloadSymbol(0, table, br, bits, value) - return result -} - -func log2Floor(x uint32) uint32 { - var result uint32 = 0 - for x != 0 { - x >>= 1 - result++ - } - - return result -} - -/* Reads (s->symbol + 1) symbols. - Totally 1..4 symbols are read, 1..11 bits each. - The list of symbols MUST NOT contain duplicates. */ -func readSimpleHuffmanSymbols(alphabet_size uint32, max_symbol uint32, s *Reader) int { - var br *bitReader = &s.br - var max_bits uint32 = log2Floor(alphabet_size - 1) - var i uint32 = s.sub_loop_counter - /* max_bits == 1..11; symbol == 0..3; 1..44 bits will be read. */ - - var num_symbols uint32 = s.symbol - for i <= num_symbols { - var v uint32 - if !safeReadBits(br, max_bits, &v) { - s.sub_loop_counter = i - s.substate_huffman = stateHuffmanSimpleRead - return decoderNeedsMoreInput - } - - if v >= max_symbol { - return decoderErrorFormatSimpleHuffmanAlphabet - } - - s.symbols_lists_array[i] = uint16(v) - i++ - } - - for i = 0; i < num_symbols; i++ { - var k uint32 = i + 1 - for ; k <= num_symbols; k++ { - if s.symbols_lists_array[i] == s.symbols_lists_array[k] { - return decoderErrorFormatSimpleHuffmanSame - } - } - } - - return decoderSuccess -} - -/* Process single decoded symbol code length: - A) reset the repeat variable - B) remember code length (if it is not 0) - C) extend corresponding index-chain - D) reduce the Huffman space - E) update the histogram */ -func processSingleCodeLength(code_len uint32, symbol *uint32, repeat *uint32, space *uint32, prev_code_len *uint32, symbol_lists symbolList, code_length_histo []uint16, next_symbol []int) { - *repeat = 0 - if code_len != 0 { /* code_len == 1..15 */ - symbolListPut(symbol_lists, next_symbol[code_len], uint16(*symbol)) - next_symbol[code_len] = int(*symbol) - *prev_code_len = code_len - *space -= 32768 >> code_len - code_length_histo[code_len]++ - } - - (*symbol)++ -} - -/* Process repeated symbol code length. - A) Check if it is the extension of previous repeat sequence; if the decoded - value is not BROTLI_REPEAT_PREVIOUS_CODE_LENGTH, then it is a new - symbol-skip - B) Update repeat variable - C) Check if operation is feasible (fits alphabet) - D) For each symbol do the same operations as in ProcessSingleCodeLength - - PRECONDITION: code_len == BROTLI_REPEAT_PREVIOUS_CODE_LENGTH or - code_len == BROTLI_REPEAT_ZERO_CODE_LENGTH */ -func processRepeatedCodeLength(code_len uint32, repeat_delta uint32, alphabet_size uint32, symbol *uint32, repeat *uint32, space *uint32, prev_code_len *uint32, repeat_code_len *uint32, symbol_lists symbolList, code_length_histo []uint16, next_symbol []int) { - var old_repeat uint32 /* for BROTLI_REPEAT_ZERO_CODE_LENGTH */ /* for BROTLI_REPEAT_ZERO_CODE_LENGTH */ - var extra_bits uint32 = 3 - var new_len uint32 = 0 - if code_len == repeatPreviousCodeLength { - new_len = *prev_code_len - extra_bits = 2 - } - - if *repeat_code_len != new_len { - *repeat = 0 - *repeat_code_len = new_len - } - - old_repeat = *repeat - if *repeat > 0 { - *repeat -= 2 - *repeat <<= extra_bits - } - - *repeat += repeat_delta + 3 - repeat_delta = *repeat - old_repeat - if *symbol+repeat_delta > alphabet_size { - *symbol = alphabet_size - *space = 0xFFFFF - return - } - - if *repeat_code_len != 0 { - var last uint = uint(*symbol + repeat_delta) - var next int = next_symbol[*repeat_code_len] - for { - symbolListPut(symbol_lists, next, uint16(*symbol)) - next = int(*symbol) - (*symbol)++ - if (*symbol) == uint32(last) { - break - } - } - - next_symbol[*repeat_code_len] = next - *space -= repeat_delta << (15 - *repeat_code_len) - code_length_histo[*repeat_code_len] = uint16(uint32(code_length_histo[*repeat_code_len]) + repeat_delta) - } else { - *symbol += repeat_delta - } -} - -/* Reads and decodes symbol codelengths. */ -func readSymbolCodeLengths(alphabet_size uint32, s *Reader) int { - var br *bitReader = &s.br - var symbol uint32 = s.symbol - var repeat uint32 = s.repeat - var space uint32 = s.space - var prev_code_len uint32 = s.prev_code_len - var repeat_code_len uint32 = s.repeat_code_len - var symbol_lists symbolList = s.symbol_lists - var code_length_histo []uint16 = s.code_length_histo[:] - var next_symbol []int = s.next_symbol[:] - if !warmupBitReader(br) { - return decoderNeedsMoreInput - } - var p []huffmanCode - for symbol < alphabet_size && space > 0 { - p = s.table[:] - var code_len uint32 - if !checkInputAmount(br, shortFillBitWindowRead) { - s.symbol = symbol - s.repeat = repeat - s.prev_code_len = prev_code_len - s.repeat_code_len = repeat_code_len - s.space = space - return decoderNeedsMoreInput - } - - fillBitWindow16(br) - p = p[getBitsUnmasked(br)&uint64(bitMask(huffmanMaxCodeLengthCodeLength)):] - dropBits(br, uint32(p[0].bits)) /* Use 1..5 bits. */ - code_len = uint32(p[0].value) /* code_len == 0..17 */ - if code_len < repeatPreviousCodeLength { - processSingleCodeLength(code_len, &symbol, &repeat, &space, &prev_code_len, symbol_lists, code_length_histo, next_symbol) /* code_len == 16..17, extra_bits == 2..3 */ - } else { - var extra_bits uint32 - if code_len == repeatPreviousCodeLength { - extra_bits = 2 - } else { - extra_bits = 3 - } - var repeat_delta uint32 = uint32(getBitsUnmasked(br)) & bitMask(extra_bits) - dropBits(br, extra_bits) - processRepeatedCodeLength(code_len, repeat_delta, alphabet_size, &symbol, &repeat, &space, &prev_code_len, &repeat_code_len, symbol_lists, code_length_histo, next_symbol) - } - } - - s.space = space - return decoderSuccess -} - -func safeReadSymbolCodeLengths(alphabet_size uint32, s *Reader) int { - var br *bitReader = &s.br - var get_byte bool = false - var p []huffmanCode - for s.symbol < alphabet_size && s.space > 0 { - p = s.table[:] - var code_len uint32 - var available_bits uint32 - var bits uint32 = 0 - if get_byte && !pullByte(br) { - return decoderNeedsMoreInput - } - get_byte = false - available_bits = getAvailableBits(br) - if available_bits != 0 { - bits = uint32(getBitsUnmasked(br)) - } - - p = p[bits&bitMask(huffmanMaxCodeLengthCodeLength):] - if uint32(p[0].bits) > available_bits { - get_byte = true - continue - } - - code_len = uint32(p[0].value) /* code_len == 0..17 */ - if code_len < repeatPreviousCodeLength { - dropBits(br, uint32(p[0].bits)) - processSingleCodeLength(code_len, &s.symbol, &s.repeat, &s.space, &s.prev_code_len, s.symbol_lists, s.code_length_histo[:], s.next_symbol[:]) /* code_len == 16..17, extra_bits == 2..3 */ - } else { - var extra_bits uint32 = code_len - 14 - var repeat_delta uint32 = (bits >> p[0].bits) & bitMask(extra_bits) - if available_bits < uint32(p[0].bits)+extra_bits { - get_byte = true - continue - } - - dropBits(br, uint32(p[0].bits)+extra_bits) - processRepeatedCodeLength(code_len, repeat_delta, alphabet_size, &s.symbol, &s.repeat, &s.space, &s.prev_code_len, &s.repeat_code_len, s.symbol_lists, s.code_length_histo[:], s.next_symbol[:]) - } - } - - return decoderSuccess -} - -/* Reads and decodes 15..18 codes using static prefix code. - Each code is 2..4 bits long. In total 30..72 bits are used. */ -func readCodeLengthCodeLengths(s *Reader) int { - var br *bitReader = &s.br - var num_codes uint32 = s.repeat - var space uint32 = s.space - var i uint32 = s.sub_loop_counter - for ; i < codeLengthCodes; i++ { - var code_len_idx byte = kCodeLengthCodeOrder[i] - var ix uint32 - var v uint32 - if !safeGetBits(br, 4, &ix) { - var available_bits uint32 = getAvailableBits(br) - if available_bits != 0 { - ix = uint32(getBitsUnmasked(br) & 0xF) - } else { - ix = 0 - } - - if uint32(kCodeLengthPrefixLength[ix]) > available_bits { - s.sub_loop_counter = i - s.repeat = num_codes - s.space = space - s.substate_huffman = stateHuffmanComplex - return decoderNeedsMoreInput - } - } - - v = uint32(kCodeLengthPrefixValue[ix]) - dropBits(br, uint32(kCodeLengthPrefixLength[ix])) - s.code_length_code_lengths[code_len_idx] = byte(v) - if v != 0 { - space = space - (32 >> v) - num_codes++ - s.code_length_histo[v]++ - if space-1 >= 32 { - /* space is 0 or wrapped around. */ - break - } - } - } - - if num_codes != 1 && space != 0 { - return decoderErrorFormatClSpace - } - - return decoderSuccess -} - -/* Decodes the Huffman tables. - There are 2 scenarios: - A) Huffman code contains only few symbols (1..4). Those symbols are read - directly; their code lengths are defined by the number of symbols. - For this scenario 4 - 49 bits will be read. - - B) 2-phase decoding: - B.1) Small Huffman table is decoded; it is specified with code lengths - encoded with predefined entropy code. 32 - 74 bits are used. - B.2) Decoded table is used to decode code lengths of symbols in resulting - Huffman table. In worst case 3520 bits are read. */ -func readHuffmanCode(alphabet_size uint32, max_symbol uint32, table []huffmanCode, opt_table_size *uint32, s *Reader) int { - var br *bitReader = &s.br - - /* Unnecessary masking, but might be good for safety. */ - alphabet_size &= 0x7FF - - /* State machine. */ - for { - switch s.substate_huffman { - case stateHuffmanNone: - if !safeReadBits(br, 2, &s.sub_loop_counter) { - return decoderNeedsMoreInput - } - - /* The value is used as follows: - 1 for simple code; - 0 for no skipping, 2 skips 2 code lengths, 3 skips 3 code lengths */ - if s.sub_loop_counter != 1 { - s.space = 32 - s.repeat = 0 /* num_codes */ - var i int - for i = 0; i <= huffmanMaxCodeLengthCodeLength; i++ { - s.code_length_histo[i] = 0 - } - - for i = 0; i < codeLengthCodes; i++ { - s.code_length_code_lengths[i] = 0 - } - - s.substate_huffman = stateHuffmanComplex - continue - } - fallthrough - - /* Read symbols, codes & code lengths directly. */ - case stateHuffmanSimpleSize: - if !safeReadBits(br, 2, &s.symbol) { /* num_symbols */ - s.substate_huffman = stateHuffmanSimpleSize - return decoderNeedsMoreInput - } - - s.sub_loop_counter = 0 - fallthrough - - case stateHuffmanSimpleRead: - { - var result int = readSimpleHuffmanSymbols(alphabet_size, max_symbol, s) - if result != decoderSuccess { - return result - } - } - fallthrough - - case stateHuffmanSimpleBuild: - var table_size uint32 - if s.symbol == 3 { - var bits uint32 - if !safeReadBits(br, 1, &bits) { - s.substate_huffman = stateHuffmanSimpleBuild - return decoderNeedsMoreInput - } - - s.symbol += bits - } - - table_size = buildSimpleHuffmanTable(table, huffmanTableBits, s.symbols_lists_array[:], s.symbol) - if opt_table_size != nil { - *opt_table_size = table_size - } - - s.substate_huffman = stateHuffmanNone - return decoderSuccess - - /* Decode Huffman-coded code lengths. */ - case stateHuffmanComplex: - { - var i uint32 - var result int = readCodeLengthCodeLengths(s) - if result != decoderSuccess { - return result - } - - buildCodeLengthsHuffmanTable(s.table[:], s.code_length_code_lengths[:], s.code_length_histo[:]) - for i = 0; i < 16; i++ { - s.code_length_histo[i] = 0 - } - - for i = 0; i <= huffmanMaxCodeLength; i++ { - s.next_symbol[i] = int(i) - (huffmanMaxCodeLength + 1) - symbolListPut(s.symbol_lists, s.next_symbol[i], 0xFFFF) - } - - s.symbol = 0 - s.prev_code_len = initialRepeatedCodeLength - s.repeat = 0 - s.repeat_code_len = 0 - s.space = 32768 - s.substate_huffman = stateHuffmanLengthSymbols - } - fallthrough - - case stateHuffmanLengthSymbols: - var table_size uint32 - var result int = readSymbolCodeLengths(max_symbol, s) - if result == decoderNeedsMoreInput { - result = safeReadSymbolCodeLengths(max_symbol, s) - } - - if result != decoderSuccess { - return result - } - - if s.space != 0 { - return decoderErrorFormatHuffmanSpace - } - - table_size = buildHuffmanTable(table, huffmanTableBits, s.symbol_lists, s.code_length_histo[:]) - if opt_table_size != nil { - *opt_table_size = table_size - } - - s.substate_huffman = stateHuffmanNone - return decoderSuccess - - default: - return decoderErrorUnreachable - } - } -} - -/* Decodes a block length by reading 3..39 bits. */ -func readBlockLength(table []huffmanCode, br *bitReader) uint32 { - var code uint32 - var nbits uint32 - code = readSymbol(table, br) - nbits = kBlockLengthPrefixCode[code].nbits /* nbits == 2..24 */ - return kBlockLengthPrefixCode[code].offset + readBits(br, nbits) -} - -/* WARNING: if state is not BROTLI_STATE_READ_BLOCK_LENGTH_NONE, then - reading can't be continued with ReadBlockLength. */ -func safeReadBlockLength(s *Reader, result *uint32, table []huffmanCode, br *bitReader) bool { - var index uint32 - if s.substate_read_block_length == stateReadBlockLengthNone { - if !safeReadSymbol(table, br, &index) { - return false - } - } else { - index = s.block_length_index - } - { - var bits uint32 /* nbits == 2..24 */ - var nbits uint32 = kBlockLengthPrefixCode[index].nbits - if !safeReadBits(br, nbits, &bits) { - s.block_length_index = index - s.substate_read_block_length = stateReadBlockLengthSuffix - return false - } - - *result = kBlockLengthPrefixCode[index].offset + bits - s.substate_read_block_length = stateReadBlockLengthNone - return true - } -} - -/* Transform: - 1) initialize list L with values 0, 1,... 255 - 2) For each input element X: - 2.1) let Y = L[X] - 2.2) remove X-th element from L - 2.3) prepend Y to L - 2.4) append Y to output - - In most cases max(Y) <= 7, so most of L remains intact. - To reduce the cost of initialization, we reuse L, remember the upper bound - of Y values, and reinitialize only first elements in L. - - Most of input values are 0 and 1. To reduce number of branches, we replace - inner for loop with do-while. */ -func inverseMoveToFrontTransform(v []byte, v_len uint32, state *Reader) { - var mtf [256]byte - var i int - for i = 1; i < 256; i++ { - mtf[i] = byte(i) - } - var mtf_1 byte - - /* Transform the input. */ - for i = 0; uint32(i) < v_len; i++ { - var index int = int(v[i]) - var value byte = mtf[index] - v[i] = value - mtf_1 = value - for index >= 1 { - index-- - mtf[index+1] = mtf[index] - } - - mtf[0] = mtf_1 - } -} - -/* Decodes a series of Huffman table using ReadHuffmanCode function. */ -func huffmanTreeGroupDecode(group *huffmanTreeGroup, s *Reader) int { - if s.substate_tree_group != stateTreeGroupLoop { - s.next = group.codes - s.htree_index = 0 - s.substate_tree_group = stateTreeGroupLoop - } - - for s.htree_index < int(group.num_htrees) { - var table_size uint32 - var result int = readHuffmanCode(uint32(group.alphabet_size), uint32(group.max_symbol), s.next, &table_size, s) - if result != decoderSuccess { - return result - } - group.htrees[s.htree_index] = s.next - s.next = s.next[table_size:] - s.htree_index++ - } - - s.substate_tree_group = stateTreeGroupNone - return decoderSuccess -} - -/* Decodes a context map. - Decoding is done in 4 phases: - 1) Read auxiliary information (6..16 bits) and allocate memory. - In case of trivial context map, decoding is finished at this phase. - 2) Decode Huffman table using ReadHuffmanCode function. - This table will be used for reading context map items. - 3) Read context map items; "0" values could be run-length encoded. - 4) Optionally, apply InverseMoveToFront transform to the resulting map. */ -func decodeContextMap(context_map_size uint32, num_htrees *uint32, context_map_arg *[]byte, s *Reader) int { - var br *bitReader = &s.br - var result int = decoderSuccess - - switch int(s.substate_context_map) { - case stateContextMapNone: - result = decodeVarLenUint8(s, br, num_htrees) - if result != decoderSuccess { - return result - } - - (*num_htrees)++ - s.context_index = 0 - *context_map_arg = make([]byte, uint(context_map_size)) - if *context_map_arg == nil { - return decoderErrorAllocContextMap - } - - if *num_htrees <= 1 { - for i := 0; i < int(context_map_size); i++ { - (*context_map_arg)[i] = 0 - } - return decoderSuccess - } - - s.substate_context_map = stateContextMapReadPrefix - fallthrough - /* Fall through. */ - case stateContextMapReadPrefix: - { - var bits uint32 - - /* In next stage ReadHuffmanCode uses at least 4 bits, so it is safe - to peek 4 bits ahead. */ - if !safeGetBits(br, 5, &bits) { - return decoderNeedsMoreInput - } - - if bits&1 != 0 { /* Use RLE for zeros. */ - s.max_run_length_prefix = (bits >> 1) + 1 - dropBits(br, 5) - } else { - s.max_run_length_prefix = 0 - dropBits(br, 1) - } - - s.substate_context_map = stateContextMapHuffman - } - fallthrough - - /* Fall through. */ - case stateContextMapHuffman: - { - var alphabet_size uint32 = *num_htrees + s.max_run_length_prefix - result = readHuffmanCode(alphabet_size, alphabet_size, s.context_map_table[:], nil, s) - if result != decoderSuccess { - return result - } - s.code = 0xFFFF - s.substate_context_map = stateContextMapDecode - } - fallthrough - - /* Fall through. */ - case stateContextMapDecode: - { - var context_index uint32 = s.context_index - var max_run_length_prefix uint32 = s.max_run_length_prefix - var context_map []byte = *context_map_arg - var code uint32 = s.code - var skip_preamble bool = (code != 0xFFFF) - for context_index < context_map_size || skip_preamble { - if !skip_preamble { - if !safeReadSymbol(s.context_map_table[:], br, &code) { - s.code = 0xFFFF - s.context_index = context_index - return decoderNeedsMoreInput - } - - if code == 0 { - context_map[context_index] = 0 - context_index++ - continue - } - - if code > max_run_length_prefix { - context_map[context_index] = byte(code - max_run_length_prefix) - context_index++ - continue - } - } else { - skip_preamble = false - } - - /* RLE sub-stage. */ - { - var reps uint32 - if !safeReadBits(br, code, &reps) { - s.code = code - s.context_index = context_index - return decoderNeedsMoreInput - } - - reps += 1 << code - if context_index+reps > context_map_size { - return decoderErrorFormatContextMapRepeat - } - - for { - context_map[context_index] = 0 - context_index++ - reps-- - if reps == 0 { - break - } - } - } - } - } - fallthrough - - case stateContextMapTransform: - var bits uint32 - if !safeReadBits(br, 1, &bits) { - s.substate_context_map = stateContextMapTransform - return decoderNeedsMoreInput - } - - if bits != 0 { - inverseMoveToFrontTransform(*context_map_arg, context_map_size, s) - } - - s.substate_context_map = stateContextMapNone - return decoderSuccess - - default: - return decoderErrorUnreachable - } -} - -/* Decodes a command or literal and updates block type ring-buffer. - Reads 3..54 bits. */ -func decodeBlockTypeAndLength(safe int, s *Reader, tree_type int) bool { - var max_block_type uint32 = s.num_block_types[tree_type] - var type_tree []huffmanCode - type_tree = s.block_type_trees[tree_type*huffmanMaxSize258:] - var len_tree []huffmanCode - len_tree = s.block_len_trees[tree_type*huffmanMaxSize26:] - var br *bitReader = &s.br - var ringbuffer []uint32 = s.block_type_rb[tree_type*2:] - var block_type uint32 - if max_block_type <= 1 { - return false - } - - /* Read 0..15 + 3..39 bits. */ - if safe == 0 { - block_type = readSymbol(type_tree, br) - s.block_length[tree_type] = readBlockLength(len_tree, br) - } else { - var memento bitReaderState - bitReaderSaveState(br, &memento) - if !safeReadSymbol(type_tree, br, &block_type) { - return false - } - if !safeReadBlockLength(s, &s.block_length[tree_type], len_tree, br) { - s.substate_read_block_length = stateReadBlockLengthNone - bitReaderRestoreState(br, &memento) - return false - } - } - - if block_type == 1 { - block_type = ringbuffer[1] + 1 - } else if block_type == 0 { - block_type = ringbuffer[0] - } else { - block_type -= 2 - } - - if block_type >= max_block_type { - block_type -= max_block_type - } - - ringbuffer[0] = ringbuffer[1] - ringbuffer[1] = block_type - return true -} - -func detectTrivialLiteralBlockTypes(s *Reader) { - var i uint - for i = 0; i < 8; i++ { - s.trivial_literal_contexts[i] = 0 - } - for i = 0; uint32(i) < s.num_block_types[0]; i++ { - var offset uint = i << literalContextBits - var error uint = 0 - var sample uint = uint(s.context_map[offset]) - var j uint - for j = 0; j < 1<>5] |= 1 << (i & 31) - } - } -} - -func prepareLiteralDecoding(s *Reader) { - var context_mode byte - var trivial uint - var block_type uint32 = s.block_type_rb[1] - var context_offset uint32 = block_type << literalContextBits - s.context_map_slice = s.context_map[context_offset:] - trivial = uint(s.trivial_literal_contexts[block_type>>5]) - s.trivial_literal_context = int((trivial >> (block_type & 31)) & 1) - s.literal_htree = []huffmanCode(s.literal_hgroup.htrees[s.context_map_slice[0]]) - context_mode = s.context_modes[block_type] & 3 - s.context_lookup = getContextLUT(int(context_mode)) -} - -/* Decodes the block type and updates the state for literal context. - Reads 3..54 bits. */ -func decodeLiteralBlockSwitchInternal(safe int, s *Reader) bool { - if !decodeBlockTypeAndLength(safe, s, 0) { - return false - } - - prepareLiteralDecoding(s) - return true -} - -func decodeLiteralBlockSwitch(s *Reader) { - decodeLiteralBlockSwitchInternal(0, s) -} - -func safeDecodeLiteralBlockSwitch(s *Reader) bool { - return decodeLiteralBlockSwitchInternal(1, s) -} - -/* Block switch for insert/copy length. - Reads 3..54 bits. */ -func decodeCommandBlockSwitchInternal(safe int, s *Reader) bool { - if !decodeBlockTypeAndLength(safe, s, 1) { - return false - } - - s.htree_command = []huffmanCode(s.insert_copy_hgroup.htrees[s.block_type_rb[3]]) - return true -} - -func decodeCommandBlockSwitch(s *Reader) { - decodeCommandBlockSwitchInternal(0, s) -} - -func safeDecodeCommandBlockSwitch(s *Reader) bool { - return decodeCommandBlockSwitchInternal(1, s) -} - -/* Block switch for distance codes. - Reads 3..54 bits. */ -func decodeDistanceBlockSwitchInternal(safe int, s *Reader) bool { - if !decodeBlockTypeAndLength(safe, s, 2) { - return false - } - - s.dist_context_map_slice = s.dist_context_map[s.block_type_rb[5]< s.ringbuffer_size { - pos = uint(s.ringbuffer_size) - } else { - pos = uint(s.pos) - } - var partial_pos_rb uint = (s.rb_roundtrips * uint(s.ringbuffer_size)) + pos - return partial_pos_rb - s.partial_pos_out -} - -/* Dumps output. - Returns BROTLI_DECODER_NEEDS_MORE_OUTPUT only if there is more output to push - and either ring-buffer is as big as window size, or |force| is true. */ -func writeRingBuffer(s *Reader, available_out *uint, next_out *[]byte, total_out *uint, force bool) int { - var start []byte - start = s.ringbuffer[s.partial_pos_out&uint(s.ringbuffer_mask):] - var to_write uint = unwrittenBytes(s, true) - var num_written uint = *available_out - if num_written > to_write { - num_written = to_write - } - - if s.meta_block_remaining_len < 0 { - return decoderErrorFormatBlockLength1 - } - - if next_out != nil && *next_out == nil { - *next_out = start - } else { - if next_out != nil { - copy(*next_out, start[:num_written]) - *next_out = (*next_out)[num_written:] - } - } - - *available_out -= num_written - s.partial_pos_out += num_written - if total_out != nil { - *total_out = s.partial_pos_out - } - - if num_written < to_write { - if s.ringbuffer_size == 1<= s.ringbuffer_size { - s.pos -= s.ringbuffer_size - s.rb_roundtrips++ - if uint(s.pos) != 0 { - s.should_wrap_ringbuffer = 1 - } else { - s.should_wrap_ringbuffer = 0 - } - } - - return decoderSuccess -} - -func wrapRingBuffer(s *Reader) { - if s.should_wrap_ringbuffer != 0 { - copy(s.ringbuffer, s.ringbuffer_end[:uint(s.pos)]) - s.should_wrap_ringbuffer = 0 - } -} - -/* Allocates ring-buffer. - - s->ringbuffer_size MUST be updated by BrotliCalculateRingBufferSize before - this function is called. - - Last two bytes of ring-buffer are initialized to 0, so context calculation - could be done uniformly for the first two and all other positions. */ -func ensureRingBuffer(s *Reader) bool { - var old_ringbuffer []byte = s.ringbuffer - if s.ringbuffer_size == s.new_ringbuffer_size { - return true - } - - s.ringbuffer = make([]byte, uint(s.new_ringbuffer_size)+uint(kRingBufferWriteAheadSlack)) - if s.ringbuffer == nil { - /* Restore previous value. */ - s.ringbuffer = old_ringbuffer - - return false - } - - s.ringbuffer[s.new_ringbuffer_size-2] = 0 - s.ringbuffer[s.new_ringbuffer_size-1] = 0 - - if !(old_ringbuffer == nil) { - copy(s.ringbuffer, old_ringbuffer[:uint(s.pos)]) - - old_ringbuffer = nil - } - - s.ringbuffer_size = s.new_ringbuffer_size - s.ringbuffer_mask = s.new_ringbuffer_size - 1 - s.ringbuffer_end = s.ringbuffer[s.ringbuffer_size:] - - return true -} - -func copyUncompressedBlockToOutput(available_out *uint, next_out *[]byte, total_out *uint, s *Reader) int { - /* TODO: avoid allocation for single uncompressed block. */ - if !ensureRingBuffer(s) { - return decoderErrorAllocRingBuffer1 - } - - /* State machine */ - for { - switch s.substate_uncompressed { - case stateUncompressedNone: - { - var nbytes int = int(getRemainingBytes(&s.br)) - if nbytes > s.meta_block_remaining_len { - nbytes = s.meta_block_remaining_len - } - - if s.pos+nbytes > s.ringbuffer_size { - nbytes = s.ringbuffer_size - s.pos - } - - /* Copy remaining bytes from s->br.buf_ to ring-buffer. */ - copyBytes(s.ringbuffer[s.pos:], &s.br, uint(nbytes)) - - s.pos += nbytes - s.meta_block_remaining_len -= nbytes - if s.pos < 1<>1 >= min_size { - new_ringbuffer_size >>= 1 - } - } - - s.new_ringbuffer_size = new_ringbuffer_size -} - -/* Reads 1..256 2-bit context modes. */ -func readContextModes(s *Reader) int { - var br *bitReader = &s.br - var i int = s.loop_counter - - for i < int(s.num_block_types[0]) { - var bits uint32 - if !safeReadBits(br, 2, &bits) { - s.loop_counter = i - return decoderNeedsMoreInput - } - - s.context_modes[i] = byte(bits) - i++ - } - - return decoderSuccess -} - -func takeDistanceFromRingBuffer(s *Reader) { - if s.distance_code == 0 { - s.dist_rb_idx-- - s.distance_code = s.dist_rb[s.dist_rb_idx&3] - - /* Compensate double distance-ring-buffer roll for dictionary items. */ - s.distance_context = 1 - } else { - var distance_code int = s.distance_code << 1 - const kDistanceShortCodeIndexOffset uint32 = 0xAAAFFF1B - const kDistanceShortCodeValueOffset uint32 = 0xFA5FA500 - var v int = (s.dist_rb_idx + int(kDistanceShortCodeIndexOffset>>uint(distance_code))) & 0x3 - /* kDistanceShortCodeIndexOffset has 2-bit values from LSB: - 3, 2, 1, 0, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2 */ - - /* kDistanceShortCodeValueOffset has 2-bit values from LSB: - -0, 0,-0, 0,-1, 1,-2, 2,-3, 3,-1, 1,-2, 2,-3, 3 */ - s.distance_code = s.dist_rb[v] - - v = int(kDistanceShortCodeValueOffset>>uint(distance_code)) & 0x3 - if distance_code&0x3 != 0 { - s.distance_code += v - } else { - s.distance_code -= v - if s.distance_code <= 0 { - /* A huge distance will cause a () soon. - This is a little faster than failing here. */ - s.distance_code = 0x7FFFFFFF - } - } - } -} - -func safeReadBitsMaybeZero(br *bitReader, n_bits uint32, val *uint32) bool { - if n_bits != 0 { - return safeReadBits(br, n_bits, val) - } else { - *val = 0 - return true - } -} - -/* Precondition: s->distance_code < 0. */ -func readDistanceInternal(safe int, s *Reader, br *bitReader) bool { - var distval int - var memento bitReaderState - var distance_tree []huffmanCode = []huffmanCode(s.distance_hgroup.htrees[s.dist_htree_index]) - if safe == 0 { - s.distance_code = int(readSymbol(distance_tree, br)) - } else { - var code uint32 - bitReaderSaveState(br, &memento) - if !safeReadSymbol(distance_tree, br, &code) { - return false - } - - s.distance_code = int(code) - } - - /* Convert the distance code to the actual distance by possibly - looking up past distances from the s->ringbuffer. */ - s.distance_context = 0 - - if s.distance_code&^0xF == 0 { - takeDistanceFromRingBuffer(s) - s.block_length[2]-- - return true - } - - distval = s.distance_code - int(s.num_direct_distance_codes) - if distval >= 0 { - var nbits uint32 - var postfix int - var offset int - if safe == 0 && (s.distance_postfix_bits == 0) { - nbits = (uint32(distval) >> 1) + 1 - offset = ((2 + (distval & 1)) << nbits) - 4 - s.distance_code = int(s.num_direct_distance_codes) + offset + int(readBits(br, nbits)) - } else { - /* This branch also works well when s->distance_postfix_bits == 0. */ - var bits uint32 - postfix = distval & s.distance_postfix_mask - distval >>= s.distance_postfix_bits - nbits = (uint32(distval) >> 1) + 1 - if safe != 0 { - if !safeReadBitsMaybeZero(br, nbits, &bits) { - s.distance_code = -1 /* Restore precondition. */ - bitReaderRestoreState(br, &memento) - return false - } - } else { - bits = readBits(br, nbits) - } - - offset = ((2 + (distval & 1)) << nbits) - 4 - s.distance_code = int(s.num_direct_distance_codes) + ((offset + int(bits)) << s.distance_postfix_bits) + postfix - } - } - - s.distance_code = s.distance_code - numDistanceShortCodes + 1 - s.block_length[2]-- - return true -} - -func readDistance(s *Reader, br *bitReader) { - readDistanceInternal(0, s, br) -} - -func safeReadDistance(s *Reader, br *bitReader) bool { - return readDistanceInternal(1, s, br) -} - -func readCommandInternal(safe int, s *Reader, br *bitReader, insert_length *int) bool { - var cmd_code uint32 - var insert_len_extra uint32 = 0 - var copy_length uint32 - var v cmdLutElement - var memento bitReaderState - if safe == 0 { - cmd_code = readSymbol(s.htree_command, br) - } else { - bitReaderSaveState(br, &memento) - if !safeReadSymbol(s.htree_command, br, &cmd_code) { - return false - } - } - - v = kCmdLut[cmd_code] - s.distance_code = int(v.distance_code) - s.distance_context = int(v.context) - s.dist_htree_index = s.dist_context_map_slice[s.distance_context] - *insert_length = int(v.insert_len_offset) - if safe == 0 { - if v.insert_len_extra_bits != 0 { - insert_len_extra = readBits(br, uint32(v.insert_len_extra_bits)) - } - - copy_length = readBits(br, uint32(v.copy_len_extra_bits)) - } else { - if !safeReadBitsMaybeZero(br, uint32(v.insert_len_extra_bits), &insert_len_extra) || !safeReadBitsMaybeZero(br, uint32(v.copy_len_extra_bits), ©_length) { - bitReaderRestoreState(br, &memento) - return false - } - } - - s.copy_length = int(copy_length) + int(v.copy_len_offset) - s.block_length[1]-- - *insert_length += int(insert_len_extra) - return true -} - -func readCommand(s *Reader, br *bitReader, insert_length *int) { - readCommandInternal(0, s, br, insert_length) -} - -func safeReadCommand(s *Reader, br *bitReader, insert_length *int) bool { - return readCommandInternal(1, s, br, insert_length) -} - -func checkInputAmountMaybeSafe(safe int, br *bitReader, num uint) bool { - if safe != 0 { - return true - } - - return checkInputAmount(br, num) -} - -func processCommandsInternal(safe int, s *Reader) int { - var pos int = s.pos - var i int = s.loop_counter - var result int = decoderSuccess - var br *bitReader = &s.br - var hc []huffmanCode - - if !checkInputAmountMaybeSafe(safe, br, 28) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if safe == 0 { - warmupBitReader(br) - } - - /* Jump into state machine. */ - if s.state == stateCommandBegin { - goto CommandBegin - } else if s.state == stateCommandInner { - goto CommandInner - } else if s.state == stateCommandPostDecodeLiterals { - goto CommandPostDecodeLiterals - } else if s.state == stateCommandPostWrapCopy { - goto CommandPostWrapCopy - } else { - return decoderErrorUnreachable - } - -CommandBegin: - if safe != 0 { - s.state = stateCommandBegin - } - - if !checkInputAmountMaybeSafe(safe, br, 28) { /* 156 bits + 7 bytes */ - s.state = stateCommandBegin - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if s.block_length[1] == 0 { - if safe != 0 { - if !safeDecodeCommandBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeCommandBlockSwitch(s) - } - - goto CommandBegin - } - - /* Read the insert/copy length in the command. */ - if safe != 0 { - if !safeReadCommand(s, br, &i) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - readCommand(s, br, &i) - } - - if i == 0 { - goto CommandPostDecodeLiterals - } - - s.meta_block_remaining_len -= i - -CommandInner: - if safe != 0 { - s.state = stateCommandInner - } - - /* Read the literals in the command. */ - if s.trivial_literal_context != 0 { - var bits uint32 - var value uint32 - preloadSymbol(safe, s.literal_htree, br, &bits, &value) - for { - if !checkInputAmountMaybeSafe(safe, br, 28) { /* 162 bits + 7 bytes */ - s.state = stateCommandInner - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if s.block_length[0] == 0 { - if safe != 0 { - if !safeDecodeLiteralBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeLiteralBlockSwitch(s) - } - - preloadSymbol(safe, s.literal_htree, br, &bits, &value) - if s.trivial_literal_context == 0 { - goto CommandInner - } - } - - if safe == 0 { - s.ringbuffer[pos] = byte(readPreloadedSymbol(s.literal_htree, br, &bits, &value)) - } else { - var literal uint32 - if !safeReadSymbol(s.literal_htree, br, &literal) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - s.ringbuffer[pos] = byte(literal) - } - - s.block_length[0]-- - pos++ - if pos == s.ringbuffer_size { - s.state = stateCommandInnerWrite - i-- - goto saveStateAndReturn - } - i-- - if i == 0 { - break - } - } - } else { - var p1 byte = s.ringbuffer[(pos-1)&s.ringbuffer_mask] - var p2 byte = s.ringbuffer[(pos-2)&s.ringbuffer_mask] - for { - var context byte - if !checkInputAmountMaybeSafe(safe, br, 28) { /* 162 bits + 7 bytes */ - s.state = stateCommandInner - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if s.block_length[0] == 0 { - if safe != 0 { - if !safeDecodeLiteralBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeLiteralBlockSwitch(s) - } - - if s.trivial_literal_context != 0 { - goto CommandInner - } - } - - context = getContext(p1, p2, s.context_lookup) - hc = []huffmanCode(s.literal_hgroup.htrees[s.context_map_slice[context]]) - p2 = p1 - if safe == 0 { - p1 = byte(readSymbol(hc, br)) - } else { - var literal uint32 - if !safeReadSymbol(hc, br, &literal) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - p1 = byte(literal) - } - - s.ringbuffer[pos] = p1 - s.block_length[0]-- - pos++ - if pos == s.ringbuffer_size { - s.state = stateCommandInnerWrite - i-- - goto saveStateAndReturn - } - i-- - if i == 0 { - break - } - } - } - - if s.meta_block_remaining_len <= 0 { - s.state = stateMetablockDone - goto saveStateAndReturn - } - -CommandPostDecodeLiterals: - if safe != 0 { - s.state = stateCommandPostDecodeLiterals - } - - if s.distance_code >= 0 { - /* Implicit distance case. */ - if s.distance_code != 0 { - s.distance_context = 0 - } else { - s.distance_context = 1 - } - - s.dist_rb_idx-- - s.distance_code = s.dist_rb[s.dist_rb_idx&3] - } else { - /* Read distance code in the command, unless it was implicitly zero. */ - if s.block_length[2] == 0 { - if safe != 0 { - if !safeDecodeDistanceBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeDistanceBlockSwitch(s) - } - } - - if safe != 0 { - if !safeReadDistance(s, br) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - readDistance(s, br) - } - } - - if s.max_distance != s.max_backward_distance { - if pos < s.max_backward_distance { - s.max_distance = pos - } else { - s.max_distance = s.max_backward_distance - } - } - - i = s.copy_length - - /* Apply copy of LZ77 back-reference, or static dictionary reference if - the distance is larger than the max LZ77 distance */ - if s.distance_code > s.max_distance { - /* The maximum allowed distance is BROTLI_MAX_ALLOWED_DISTANCE = 0x7FFFFFFC. - With this choice, no signed overflow can occur after decoding - a special distance code (e.g., after adding 3 to the last distance). */ - if s.distance_code > maxAllowedDistance { - return decoderErrorFormatDistance - } - - if i >= minDictionaryWordLength && i <= maxDictionaryWordLength { - var address int = s.distance_code - s.max_distance - 1 - var words *dictionary = s.dictionary - var trans *transforms = s.transforms - var offset int = int(s.dictionary.offsets_by_length[i]) - var shift uint32 = uint32(s.dictionary.size_bits_by_length[i]) - var mask int = int(bitMask(shift)) - var word_idx int = address & mask - var transform_idx int = address >> shift - - /* Compensate double distance-ring-buffer roll. */ - s.dist_rb_idx += s.distance_context - - offset += word_idx * i - if words.data == nil { - return decoderErrorDictionaryNotSet - } - - if transform_idx < int(trans.num_transforms) { - var word []byte - word = words.data[offset:] - var len int = i - if transform_idx == int(trans.cutOffTransforms[0]) { - copy(s.ringbuffer[pos:], word[:uint(len)]) - } else { - len = transformDictionaryWord(s.ringbuffer[pos:], word, int(len), trans, transform_idx) - } - - pos += int(len) - s.meta_block_remaining_len -= int(len) - if pos >= s.ringbuffer_size { - s.state = stateCommandPostWrite1 - goto saveStateAndReturn - } - } else { - return decoderErrorFormatTransform - } - } else { - return decoderErrorFormatDictionary - } - } else { - var src_start int = (pos - s.distance_code) & s.ringbuffer_mask - var copy_dst []byte - copy_dst = s.ringbuffer[pos:] - var copy_src []byte - copy_src = s.ringbuffer[src_start:] - var dst_end int = pos + i - var src_end int = src_start + i - - /* Update the recent distances cache. */ - s.dist_rb[s.dist_rb_idx&3] = s.distance_code - - s.dist_rb_idx++ - s.meta_block_remaining_len -= i - - /* There are 32+ bytes of slack in the ring-buffer allocation. - Also, we have 16 short codes, that make these 16 bytes irrelevant - in the ring-buffer. Let's copy over them as a first guess. */ - copy(copy_dst, copy_src[:16]) - - if src_end > pos && dst_end > src_start { - /* Regions intersect. */ - goto CommandPostWrapCopy - } - - if dst_end >= s.ringbuffer_size || src_end >= s.ringbuffer_size { - /* At least one region wraps. */ - goto CommandPostWrapCopy - } - - pos += i - if i > 16 { - if i > 32 { - copy(copy_dst[16:], copy_src[16:][:uint(i-16)]) - } else { - /* This branch covers about 45% cases. - Fixed size short copy allows more compiler optimizations. */ - copy(copy_dst[16:], copy_src[16:][:16]) - } - } - } - - if s.meta_block_remaining_len <= 0 { - /* Next metablock, if any. */ - s.state = stateMetablockDone - - goto saveStateAndReturn - } else { - goto CommandBegin - } -CommandPostWrapCopy: - { - var wrap_guard int = s.ringbuffer_size - pos - for { - i-- - if i < 0 { - break - } - s.ringbuffer[pos] = s.ringbuffer[(pos-s.distance_code)&s.ringbuffer_mask] - pos++ - wrap_guard-- - if wrap_guard == 0 { - s.state = stateCommandPostWrite2 - goto saveStateAndReturn - } - } - } - - if s.meta_block_remaining_len <= 0 { - /* Next metablock, if any. */ - s.state = stateMetablockDone - - goto saveStateAndReturn - } else { - goto CommandBegin - } - -saveStateAndReturn: - s.pos = pos - s.loop_counter = i - return result -} - -func processCommands(s *Reader) int { - return processCommandsInternal(0, s) -} - -func safeProcessCommands(s *Reader) int { - return processCommandsInternal(1, s) -} - -/* Returns the maximum number of distance symbols which can only represent - distances not exceeding BROTLI_MAX_ALLOWED_DISTANCE. */ - -var maxDistanceSymbol_bound = [maxNpostfix + 1]uint32{0, 4, 12, 28} -var maxDistanceSymbol_diff = [maxNpostfix + 1]uint32{73, 126, 228, 424} - -func maxDistanceSymbol(ndirect uint32, npostfix uint32) uint32 { - var postfix uint32 = 1 << npostfix - if ndirect < maxDistanceSymbol_bound[npostfix] { - return ndirect + maxDistanceSymbol_diff[npostfix] + postfix - } else if ndirect > maxDistanceSymbol_bound[npostfix]+postfix { - return ndirect + maxDistanceSymbol_diff[npostfix] - } else { - return maxDistanceSymbol_bound[npostfix] + maxDistanceSymbol_diff[npostfix] + postfix - } -} - -/* Invariant: input stream is never overconsumed: - - invalid input implies that the whole stream is invalid -> any amount of - input could be read and discarded - - when result is "needs more input", then at least one more byte is REQUIRED - to complete decoding; all input data MUST be consumed by decoder, so - client could swap the input buffer - - when result is "needs more output" decoder MUST ensure that it doesn't - hold more than 7 bits in bit reader; this saves client from swapping input - buffer ahead of time - - when result is "success" decoder MUST return all unused data back to input - buffer; this is possible because the invariant is held on enter */ -func decoderDecompressStream(s *Reader, available_in *uint, next_in *[]byte, available_out *uint, next_out *[]byte) int { - var result int = decoderSuccess - var br *bitReader = &s.br - - /* Do not try to process further in a case of unrecoverable error. */ - if int(s.error_code) < 0 { - return decoderResultError - } - - if *available_out != 0 && (next_out == nil || *next_out == nil) { - return saveErrorCode(s, decoderErrorInvalidArguments) - } - - if *available_out == 0 { - next_out = nil - } - if s.buffer_length == 0 { /* Just connect bit reader to input stream. */ - br.input_len = *available_in - br.input = *next_in - br.byte_pos = 0 - } else { - /* At least one byte of input is required. More than one byte of input may - be required to complete the transaction -> reading more data must be - done in a loop -> do it in a main loop. */ - result = decoderNeedsMoreInput - - br.input = s.buffer.u8[:] - br.byte_pos = 0 - } - - /* State machine */ - for { - if result != decoderSuccess { - /* Error, needs more input/output. */ - if result == decoderNeedsMoreInput { - if s.ringbuffer != nil { /* Pro-actively push output. */ - var intermediate_result int = writeRingBuffer(s, available_out, next_out, nil, true) - - /* WriteRingBuffer checks s->meta_block_remaining_len validity. */ - if int(intermediate_result) < 0 { - result = intermediate_result - break - } - } - - if s.buffer_length != 0 { /* Used with internal buffer. */ - if br.byte_pos == br.input_len { - /* Successfully finished read transaction. - Accumulator contains less than 8 bits, because internal buffer - is expanded byte-by-byte until it is enough to complete read. */ - s.buffer_length = 0 - - /* Switch to input stream and restart. */ - result = decoderSuccess - - br.input_len = *available_in - br.input = *next_in - br.byte_pos = 0 - continue - } else if *available_in != 0 { - /* Not enough data in buffer, but can take one more byte from - input stream. */ - result = decoderSuccess - - s.buffer.u8[s.buffer_length] = (*next_in)[0] - s.buffer_length++ - br.input_len = uint(s.buffer_length) - *next_in = (*next_in)[1:] - (*available_in)-- - - /* Retry with more data in buffer. */ - continue - } - - /* Can't finish reading and no more input. */ - break - /* Input stream doesn't contain enough input. */ - } else { - /* Copy tail to internal buffer and return. */ - *next_in = br.input[br.byte_pos:] - - *available_in = br.input_len - br.byte_pos - for *available_in != 0 { - s.buffer.u8[s.buffer_length] = (*next_in)[0] - s.buffer_length++ - *next_in = (*next_in)[1:] - (*available_in)-- - } - - break - } - } - - /* Unreachable. */ - - /* Fail or needs more output. */ - if s.buffer_length != 0 { - /* Just consumed the buffered input and produced some output. Otherwise - it would result in "needs more input". Reset internal buffer. */ - s.buffer_length = 0 - } else { - /* Using input stream in last iteration. When decoder switches to input - stream it has less than 8 bits in accumulator, so it is safe to - return unused accumulator bits there. */ - bitReaderUnload(br) - - *available_in = br.input_len - br.byte_pos - *next_in = br.input[br.byte_pos:] - } - - break - } - - switch s.state { - /* Prepare to the first read. */ - case stateUninited: - if !warmupBitReader(br) { - result = decoderNeedsMoreInput - break - } - - /* Decode window size. */ - result = decodeWindowBits(s, br) /* Reads 1..8 bits. */ - if result != decoderSuccess { - break - } - - if s.large_window { - s.state = stateLargeWindowBits - break - } - - s.state = stateInitialize - - case stateLargeWindowBits: - if !safeReadBits(br, 6, &s.window_bits) { - result = decoderNeedsMoreInput - break - } - - if s.window_bits < largeMinWbits || s.window_bits > largeMaxWbits { - result = decoderErrorFormatWindowBits - break - } - - s.state = stateInitialize - fallthrough - - /* Maximum distance, see section 9.1. of the spec. */ - /* Fall through. */ - case stateInitialize: - s.max_backward_distance = (1 << s.window_bits) - windowGap - - /* Allocate memory for both block_type_trees and block_len_trees. */ - s.block_type_trees = make([]huffmanCode, (3 * (huffmanMaxSize258 + huffmanMaxSize26))) - - if s.block_type_trees == nil { - result = decoderErrorAllocBlockTypeTrees - break - } - - s.block_len_trees = s.block_type_trees[3*huffmanMaxSize258:] - - s.state = stateMetablockBegin - fallthrough - - /* Fall through. */ - case stateMetablockBegin: - decoderStateMetablockBegin(s) - - s.state = stateMetablockHeader - fallthrough - - /* Fall through. */ - case stateMetablockHeader: - result = decodeMetaBlockLength(s, br) - /* Reads 2 - 31 bits. */ - if result != decoderSuccess { - break - } - - if s.is_metadata != 0 || s.is_uncompressed != 0 { - if !bitReaderJumpToByteBoundary(br) { - result = decoderErrorFormatPadding1 - break - } - } - - if s.is_metadata != 0 { - s.state = stateMetadata - break - } - - if s.meta_block_remaining_len == 0 { - s.state = stateMetablockDone - break - } - - calculateRingBufferSize(s) - if s.is_uncompressed != 0 { - s.state = stateUncompressed - break - } - - s.loop_counter = 0 - s.state = stateHuffmanCode0 - - case stateUncompressed: - result = copyUncompressedBlockToOutput(available_out, next_out, nil, s) - if result == decoderSuccess { - s.state = stateMetablockDone - } - - case stateMetadata: - for ; s.meta_block_remaining_len > 0; s.meta_block_remaining_len-- { - var bits uint32 - - /* Read one byte and ignore it. */ - if !safeReadBits(br, 8, &bits) { - result = decoderNeedsMoreInput - break - } - } - - if result == decoderSuccess { - s.state = stateMetablockDone - } - - case stateHuffmanCode0: - if s.loop_counter >= 3 { - s.state = stateMetablockHeader2 - break - } - - /* Reads 1..11 bits. */ - result = decodeVarLenUint8(s, br, &s.num_block_types[s.loop_counter]) - - if result != decoderSuccess { - break - } - - s.num_block_types[s.loop_counter]++ - if s.num_block_types[s.loop_counter] < 2 { - s.loop_counter++ - break - } - - s.state = stateHuffmanCode1 - fallthrough - - case stateHuffmanCode1: - { - var alphabet_size uint32 = s.num_block_types[s.loop_counter] + 2 - var tree_offset int = s.loop_counter * huffmanMaxSize258 - result = readHuffmanCode(alphabet_size, alphabet_size, s.block_type_trees[tree_offset:], nil, s) - if result != decoderSuccess { - break - } - s.state = stateHuffmanCode2 - } - fallthrough - - case stateHuffmanCode2: - { - var alphabet_size uint32 = numBlockLenSymbols - var tree_offset int = s.loop_counter * huffmanMaxSize26 - result = readHuffmanCode(alphabet_size, alphabet_size, s.block_len_trees[tree_offset:], nil, s) - if result != decoderSuccess { - break - } - s.state = stateHuffmanCode3 - } - fallthrough - - case stateHuffmanCode3: - var tree_offset int = s.loop_counter * huffmanMaxSize26 - if !safeReadBlockLength(s, &s.block_length[s.loop_counter], s.block_len_trees[tree_offset:], br) { - result = decoderNeedsMoreInput - break - } - - s.loop_counter++ - s.state = stateHuffmanCode0 - - case stateMetablockHeader2: - { - var bits uint32 - if !safeReadBits(br, 6, &bits) { - result = decoderNeedsMoreInput - break - } - - s.distance_postfix_bits = bits & bitMask(2) - bits >>= 2 - s.num_direct_distance_codes = numDistanceShortCodes + (bits << s.distance_postfix_bits) - s.distance_postfix_mask = int(bitMask(s.distance_postfix_bits)) - s.context_modes = make([]byte, uint(s.num_block_types[0])) - if s.context_modes == nil { - result = decoderErrorAllocContextModes - break - } - - s.loop_counter = 0 - s.state = stateContextModes - } - fallthrough - - case stateContextModes: - result = readContextModes(s) - - if result != decoderSuccess { - break - } - - s.state = stateContextMap1 - fallthrough - - case stateContextMap1: - result = decodeContextMap(s.num_block_types[0]<= 3 { - prepareLiteralDecoding(s) - s.dist_context_map_slice = s.dist_context_map - s.htree_command = []huffmanCode(s.insert_copy_hgroup.htrees[0]) - if !ensureRingBuffer(s) { - result = decoderErrorAllocRingBuffer2 - break - } - - s.state = stateCommandBegin - } - - case stateCommandBegin, stateCommandInner, stateCommandPostDecodeLiterals, stateCommandPostWrapCopy: - result = processCommands(s) - - if result == decoderNeedsMoreInput { - result = safeProcessCommands(s) - } - - case stateCommandInnerWrite, stateCommandPostWrite1, stateCommandPostWrite2: - result = writeRingBuffer(s, available_out, next_out, nil, false) - - if result != decoderSuccess { - break - } - - wrapRingBuffer(s) - if s.ringbuffer_size == 1<= uint64(block_size) { - return 0 - } - return block_size - uint(delta) -} - -/* Wraps 64-bit input position to 32-bit ring-buffer position preserving - "not-a-first-lap" feature. */ -func wrapPosition(position uint64) uint32 { - var result uint32 = uint32(position) - var gb uint64 = position >> 30 - if gb > 2 { - /* Wrap every 2GiB; The first 3GB are continuous. */ - result = result&((1<<30)-1) | (uint32((gb-1)&1)+1)<<30 - } - - return result -} - -func getBrotliStorage(s *Writer, size uint) []byte { - if s.storage_size_ < size { - s.storage_ = nil - s.storage_ = make([]byte, size) - s.storage_size_ = size - } - - return s.storage_ -} - -func hashTableSize(max_table_size uint, input_size uint) uint { - var htsize uint = 256 - for htsize < max_table_size && htsize < input_size { - htsize <<= 1 - } - - return htsize -} - -func getHashTable(s *Writer, quality int, input_size uint, table_size *uint) []int { - var max_table_size uint = maxHashTableSize(quality) - var htsize uint = hashTableSize(max_table_size, input_size) - /* Use smaller hash table when input.size() is smaller, since we - fill the table, incurring O(hash table size) overhead for - compression, and if the input is short, we won't need that - many hash table entries anyway. */ - - var table []int - assert(max_table_size >= 256) - if quality == fastOnePassCompressionQuality { - /* Only odd shifts are supported by fast-one-pass. */ - if htsize&0xAAAAA == 0 { - htsize <<= 1 - } - } - - if htsize <= uint(len(s.small_table_)) { - table = s.small_table_[:] - } else { - if htsize > s.large_table_size_ { - s.large_table_size_ = htsize - s.large_table_ = nil - s.large_table_ = make([]int, htsize) - } - - table = s.large_table_ - } - - *table_size = htsize - for i := 0; i < int(htsize); i++ { - table[i] = 0 - } - return table -} - -func encodeWindowBits(lgwin int, large_window bool, last_bytes *uint16, last_bytes_bits *byte) { - if large_window { - *last_bytes = uint16((lgwin&0x3F)<<8 | 0x11) - *last_bytes_bits = 14 - } else { - if lgwin == 16 { - *last_bytes = 0 - *last_bytes_bits = 1 - } else if lgwin == 17 { - *last_bytes = 1 - *last_bytes_bits = 7 - } else if lgwin > 17 { - *last_bytes = uint16((lgwin-17)<<1 | 0x01) - *last_bytes_bits = 4 - } else { - *last_bytes = uint16((lgwin-8)<<4 | 0x01) - *last_bytes_bits = 7 - } - } -} - -/* Initializes the command and distance prefix codes for the first block. */ - -var initCommandPrefixCodes_kDefaultCommandDepths = [128]byte{ - 0, - 4, - 4, - 5, - 6, - 6, - 7, - 7, - 7, - 7, - 7, - 8, - 8, - 8, - 8, - 8, - 0, - 0, - 0, - 4, - 4, - 4, - 4, - 4, - 5, - 5, - 6, - 6, - 6, - 6, - 7, - 7, - 7, - 7, - 10, - 10, - 10, - 10, - 10, - 10, - 0, - 4, - 4, - 5, - 5, - 5, - 6, - 6, - 7, - 8, - 8, - 9, - 10, - 10, - 10, - 10, - 10, - 10, - 10, - 10, - 10, - 10, - 10, - 10, - 5, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6, - 6, - 6, - 6, - 6, - 6, - 5, - 5, - 5, - 5, - 5, - 5, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 5, - 5, - 5, - 5, - 5, - 5, - 6, - 6, - 7, - 7, - 7, - 8, - 10, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, -} -var initCommandPrefixCodes_kDefaultCommandBits = [128]uint16{ - 0, - 0, - 8, - 9, - 3, - 35, - 7, - 71, - 39, - 103, - 23, - 47, - 175, - 111, - 239, - 31, - 0, - 0, - 0, - 4, - 12, - 2, - 10, - 6, - 13, - 29, - 11, - 43, - 27, - 59, - 87, - 55, - 15, - 79, - 319, - 831, - 191, - 703, - 447, - 959, - 0, - 14, - 1, - 25, - 5, - 21, - 19, - 51, - 119, - 159, - 95, - 223, - 479, - 991, - 63, - 575, - 127, - 639, - 383, - 895, - 255, - 767, - 511, - 1023, - 14, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27, - 59, - 7, - 39, - 23, - 55, - 30, - 1, - 17, - 9, - 25, - 5, - 0, - 8, - 4, - 12, - 2, - 10, - 6, - 21, - 13, - 29, - 3, - 19, - 11, - 15, - 47, - 31, - 95, - 63, - 127, - 255, - 767, - 2815, - 1791, - 3839, - 511, - 2559, - 1535, - 3583, - 1023, - 3071, - 2047, - 4095, -} -var initCommandPrefixCodes_kDefaultCommandCode = []byte{ - 0xff, - 0x77, - 0xd5, - 0xbf, - 0xe7, - 0xde, - 0xea, - 0x9e, - 0x51, - 0x5d, - 0xde, - 0xc6, - 0x70, - 0x57, - 0xbc, - 0x58, - 0x58, - 0x58, - 0xd8, - 0xd8, - 0x58, - 0xd5, - 0xcb, - 0x8c, - 0xea, - 0xe0, - 0xc3, - 0x87, - 0x1f, - 0x83, - 0xc1, - 0x60, - 0x1c, - 0x67, - 0xb2, - 0xaa, - 0x06, - 0x83, - 0xc1, - 0x60, - 0x30, - 0x18, - 0xcc, - 0xa1, - 0xce, - 0x88, - 0x54, - 0x94, - 0x46, - 0xe1, - 0xb0, - 0xd0, - 0x4e, - 0xb2, - 0xf7, - 0x04, - 0x00, -} -var initCommandPrefixCodes_kDefaultCommandCodeNumBits uint = 448 - -func initCommandPrefixCodes(cmd_depths []byte, cmd_bits []uint16, cmd_code []byte, cmd_code_numbits *uint) { - copy(cmd_depths, initCommandPrefixCodes_kDefaultCommandDepths[:]) - copy(cmd_bits, initCommandPrefixCodes_kDefaultCommandBits[:]) - - /* Initialize the pre-compressed form of the command and distance prefix - codes. */ - copy(cmd_code, initCommandPrefixCodes_kDefaultCommandCode) - - *cmd_code_numbits = initCommandPrefixCodes_kDefaultCommandCodeNumBits -} - -/* Decide about the context map based on the ability of the prediction - ability of the previous byte UTF8-prefix on the next byte. The - prediction ability is calculated as Shannon entropy. Here we need - Shannon entropy instead of 'BitsEntropy' since the prefix will be - encoded with the remaining 6 bits of the following byte, and - BitsEntropy will assume that symbol to be stored alone using Huffman - coding. */ - -var kStaticContextMapContinuation = [64]uint32{ - 1, - 1, - 2, - 2, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -} -var kStaticContextMapSimpleUTF8 = [64]uint32{ - 0, - 0, - 1, - 1, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -} - -func chooseContextMap(quality int, bigram_histo []uint32, num_literal_contexts *uint, literal_context_map *[]uint32) { - var monogram_histo = [3]uint32{0} - var two_prefix_histo = [6]uint32{0} - var total uint - var i uint - var dummy uint - var entropy [4]float64 - for i = 0; i < 9; i++ { - monogram_histo[i%3] += bigram_histo[i] - two_prefix_histo[i%6] += bigram_histo[i] - } - - entropy[1] = shannonEntropy(monogram_histo[:], 3, &dummy) - entropy[2] = (shannonEntropy(two_prefix_histo[:], 3, &dummy) + shannonEntropy(two_prefix_histo[3:], 3, &dummy)) - entropy[3] = 0 - for i = 0; i < 3; i++ { - entropy[3] += shannonEntropy(bigram_histo[3*i:], 3, &dummy) - } - - total = uint(monogram_histo[0] + monogram_histo[1] + monogram_histo[2]) - assert(total != 0) - entropy[0] = 1.0 / float64(total) - entropy[1] *= entropy[0] - entropy[2] *= entropy[0] - entropy[3] *= entropy[0] - - if quality < minQualityForHqContextModeling { - /* 3 context models is a bit slower, don't use it at lower qualities. */ - entropy[3] = entropy[1] * 10 - } - - /* If expected savings by symbol are less than 0.2 bits, skip the - context modeling -- in exchange for faster decoding speed. */ - if entropy[1]-entropy[2] < 0.2 && entropy[1]-entropy[3] < 0.2 { - *num_literal_contexts = 1 - } else if entropy[2]-entropy[3] < 0.02 { - *num_literal_contexts = 2 - *literal_context_map = kStaticContextMapSimpleUTF8[:] - } else { - *num_literal_contexts = 3 - *literal_context_map = kStaticContextMapContinuation[:] - } -} - -/* Decide if we want to use a more complex static context map containing 13 - context values, based on the entropy reduction of histograms over the - first 5 bits of literals. */ - -var kStaticContextMapComplexUTF8 = [64]uint32{ - 11, - 11, - 12, - 12, - 0, - 0, - 0, - 0, - 1, - 1, - 9, - 9, - 2, - 2, - 2, - 2, - 1, - 1, - 1, - 1, - 8, - 3, - 3, - 3, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 8, - 4, - 4, - 4, - 8, - 7, - 4, - 4, - 8, - 0, - 0, - 0, - 3, - 3, - 3, - 3, - 5, - 5, - 10, - 5, - 5, - 5, - 10, - 5, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, -} - -func shouldUseComplexStaticContextMap(input []byte, start_pos uint, length uint, mask uint, quality int, size_hint uint, num_literal_contexts *uint, literal_context_map *[]uint32) bool { - /* Try the more complex static context map only for long data. */ - if size_hint < 1<<20 { - return false - } else { - var end_pos uint = start_pos + length - var combined_histo = [32]uint32{0} - var context_histo = [13][32]uint32{[32]uint32{0}} - var total uint32 = 0 - var entropy [3]float64 - var dummy uint - var i uint - var utf8_lut contextLUT = getContextLUT(contextUTF8) - /* To make entropy calculations faster and to fit on the stack, we collect - histograms over the 5 most significant bits of literals. One histogram - without context and 13 additional histograms for each context value. */ - for ; start_pos+64 <= end_pos; start_pos += 4096 { - var stride_end_pos uint = start_pos + 64 - var prev2 byte = input[start_pos&mask] - var prev1 byte = input[(start_pos+1)&mask] - var pos uint - - /* To make the analysis of the data faster we only examine 64 byte long - strides at every 4kB intervals. */ - for pos = start_pos + 2; pos < stride_end_pos; pos++ { - var literal byte = input[pos&mask] - var context byte = byte(kStaticContextMapComplexUTF8[getContext(prev1, prev2, utf8_lut)]) - total++ - combined_histo[literal>>3]++ - context_histo[context][literal>>3]++ - prev2 = prev1 - prev1 = literal - } - } - - entropy[1] = shannonEntropy(combined_histo[:], 32, &dummy) - entropy[2] = 0 - for i = 0; i < 13; i++ { - entropy[2] += shannonEntropy(context_histo[i][0:], 32, &dummy) - } - - entropy[0] = 1.0 / float64(total) - entropy[1] *= entropy[0] - entropy[2] *= entropy[0] - - /* The triggering heuristics below were tuned by compressing the individual - files of the silesia corpus. If we skip this kind of context modeling - for not very well compressible input (i.e. entropy using context modeling - is 60% of maximal entropy) or if expected savings by symbol are less - than 0.2 bits, then in every case when it triggers, the final compression - ratio is improved. Note however that this heuristics might be too strict - for some cases and could be tuned further. */ - if entropy[2] > 3.0 || entropy[1]-entropy[2] < 0.2 { - return false - } else { - *num_literal_contexts = 13 - *literal_context_map = kStaticContextMapComplexUTF8[:] - return true - } - } -} - -func decideOverLiteralContextModeling(input []byte, start_pos uint, length uint, mask uint, quality int, size_hint uint, num_literal_contexts *uint, literal_context_map *[]uint32) { - if quality < minQualityForContextModeling || length < 64 { - return - } else if shouldUseComplexStaticContextMap(input, start_pos, length, mask, quality, size_hint, num_literal_contexts, literal_context_map) { - } else /* Context map was already set, nothing else to do. */ - { - var end_pos uint = start_pos + length - /* Gather bi-gram data of the UTF8 byte prefixes. To make the analysis of - UTF8 data faster we only examine 64 byte long strides at every 4kB - intervals. */ - - var bigram_prefix_histo = [9]uint32{0} - for ; start_pos+64 <= end_pos; start_pos += 4096 { - var lut = [4]int{0, 0, 1, 2} - var stride_end_pos uint = start_pos + 64 - var prev int = lut[input[start_pos&mask]>>6] * 3 - var pos uint - for pos = start_pos + 1; pos < stride_end_pos; pos++ { - var literal byte = input[pos&mask] - bigram_prefix_histo[prev+lut[literal>>6]]++ - prev = lut[literal>>6] * 3 - } - } - - chooseContextMap(quality, bigram_prefix_histo[0:], num_literal_contexts, literal_context_map) - } -} - -func shouldCompress_encode(data []byte, mask uint, last_flush_pos uint64, bytes uint, num_literals uint, num_commands uint) bool { - /* TODO: find more precise minimal block overhead. */ - if bytes <= 2 { - return false - } - if num_commands < (bytes>>8)+2 { - if float64(num_literals) > 0.99*float64(bytes) { - var literal_histo = [256]uint32{0} - const kSampleRate uint32 = 13 - const kMinEntropy float64 = 7.92 - var bit_cost_threshold float64 = float64(bytes) * kMinEntropy / float64(kSampleRate) - var t uint = uint((uint32(bytes) + kSampleRate - 1) / kSampleRate) - var pos uint32 = uint32(last_flush_pos) - var i uint - for i = 0; i < t; i++ { - literal_histo[data[pos&uint32(mask)]]++ - pos += kSampleRate - } - - if bitsEntropy(literal_histo[:], 256) > bit_cost_threshold { - return false - } - } - } - - return true -} - -/* Chooses the literal context mode for a metablock */ -func chooseContextMode(params *encoderParams, data []byte, pos uint, mask uint, length uint) int { - /* We only do the computation for the option of something else than - CONTEXT_UTF8 for the highest qualities */ - if params.quality >= minQualityForHqBlockSplitting && !isMostlyUTF8(data, pos, mask, length, kMinUTF8Ratio) { - return contextSigned - } - - return contextUTF8 -} - -func writeMetaBlockInternal(data []byte, mask uint, last_flush_pos uint64, bytes uint, is_last bool, literal_context_mode int, params *encoderParams, prev_byte byte, prev_byte2 byte, num_literals uint, num_commands uint, commands []command, saved_dist_cache []int, dist_cache []int, storage_ix *uint, storage []byte) { - var wrapped_last_flush_pos uint32 = wrapPosition(last_flush_pos) - var last_bytes uint16 - var last_bytes_bits byte - var literal_context_lut contextLUT = getContextLUT(literal_context_mode) - var block_params encoderParams = *params - - if bytes == 0 { - /* Write the ISLAST and ISEMPTY bits. */ - writeBits(2, 3, storage_ix, storage) - - *storage_ix = (*storage_ix + 7) &^ 7 - return - } - - if !shouldCompress_encode(data, mask, last_flush_pos, bytes, num_literals, num_commands) { - /* Restore the distance cache, as its last update by - CreateBackwardReferences is now unused. */ - copy(dist_cache, saved_dist_cache[:4]) - - storeUncompressedMetaBlock(is_last, data, uint(wrapped_last_flush_pos), mask, bytes, storage_ix, storage) - return - } - - assert(*storage_ix <= 14) - last_bytes = uint16(storage[1])<<8 | uint16(storage[0]) - last_bytes_bits = byte(*storage_ix) - if params.quality <= maxQualityForStaticEntropyCodes { - storeMetaBlockFast(data, uint(wrapped_last_flush_pos), bytes, mask, is_last, params, commands, num_commands, storage_ix, storage) - } else if params.quality < minQualityForBlockSplit { - storeMetaBlockTrivial(data, uint(wrapped_last_flush_pos), bytes, mask, is_last, params, commands, num_commands, storage_ix, storage) - } else { - var mb metaBlockSplit - initMetaBlockSplit(&mb) - if params.quality < minQualityForHqBlockSplitting { - var num_literal_contexts uint = 1 - var literal_context_map []uint32 = nil - if !params.disable_literal_context_modeling { - decideOverLiteralContextModeling(data, uint(wrapped_last_flush_pos), bytes, mask, params.quality, params.size_hint, &num_literal_contexts, &literal_context_map) - } - - buildMetaBlockGreedy(data, uint(wrapped_last_flush_pos), mask, prev_byte, prev_byte2, literal_context_lut, num_literal_contexts, literal_context_map, commands, num_commands, &mb) - } else { - buildMetaBlock(data, uint(wrapped_last_flush_pos), mask, &block_params, prev_byte, prev_byte2, commands, num_commands, literal_context_mode, &mb) - } - - if params.quality >= minQualityForOptimizeHistograms { - /* The number of distance symbols effectively used for distance - histograms. It might be less than distance alphabet size - for "Large Window Brotli" (32-bit). */ - var num_effective_dist_codes uint32 = block_params.dist.alphabet_size - if num_effective_dist_codes > numHistogramDistanceSymbols { - num_effective_dist_codes = numHistogramDistanceSymbols - } - - optimizeHistograms(num_effective_dist_codes, &mb) - } - - storeMetaBlock(data, uint(wrapped_last_flush_pos), bytes, mask, prev_byte, prev_byte2, is_last, &block_params, literal_context_mode, commands, num_commands, &mb, storage_ix, storage) - destroyMetaBlockSplit(&mb) - } - - if bytes+4 < *storage_ix>>3 { - /* Restore the distance cache and last byte. */ - copy(dist_cache, saved_dist_cache[:4]) - - storage[0] = byte(last_bytes) - storage[1] = byte(last_bytes >> 8) - *storage_ix = uint(last_bytes_bits) - storeUncompressedMetaBlock(is_last, data, uint(wrapped_last_flush_pos), mask, bytes, storage_ix, storage) - } -} - -func chooseDistanceParams(params *encoderParams) { - var distance_postfix_bits uint32 = 0 - var num_direct_distance_codes uint32 = 0 - - if params.quality >= minQualityForNonzeroDistanceParams { - var ndirect_msb uint32 - if params.mode == modeFont { - distance_postfix_bits = 1 - num_direct_distance_codes = 12 - } else { - distance_postfix_bits = params.dist.distance_postfix_bits - num_direct_distance_codes = params.dist.num_direct_distance_codes - } - - ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0F - if distance_postfix_bits > maxNpostfix || num_direct_distance_codes > maxNdirect || ndirect_msb<>25)), (last_command.dist_prefix_&0x3FF == 0), &last_command.cmd_prefix_) - } -} - -/* - Processes the accumulated input data and sets |*out_size| to the length of - the new output meta-block, or to zero if no new output meta-block has been - created (in this case the processed input data is buffered internally). - If |*out_size| is positive, |*output| points to the start of the output - data. If |is_last| or |force_flush| is true, an output meta-block is - always created. However, until |is_last| is true encoder may retain up - to 7 bits of the last byte of output. To force encoder to dump the remaining - bits use WriteMetadata() to append an empty meta-data block. - Returns false if the size of the input data is larger than - input_block_size(). -*/ -func encodeData(s *Writer, is_last bool, force_flush bool, out_size *uint, output *[]byte) bool { - var delta uint64 = unprocessedInputSize(s) - var bytes uint32 = uint32(delta) - var wrapped_last_processed_pos uint32 = wrapPosition(s.last_processed_pos_) - var data []byte - var mask uint32 - var literal_context_mode int - - data = s.ringbuffer_.buffer_ - mask = s.ringbuffer_.mask_ - - /* Adding more blocks after "last" block is forbidden. */ - if s.is_last_block_emitted_ { - return false - } - if is_last { - s.is_last_block_emitted_ = true - } - - if delta > uint64(inputBlockSize(s)) { - return false - } - - if s.params.quality == fastTwoPassCompressionQuality && s.command_buf_ == nil { - s.command_buf_ = make([]uint32, kCompressFragmentTwoPassBlockSize) - s.literal_buf_ = make([]byte, kCompressFragmentTwoPassBlockSize) - } - - if s.params.quality == fastOnePassCompressionQuality || s.params.quality == fastTwoPassCompressionQuality { - var storage []byte - var storage_ix uint = uint(s.last_bytes_bits_) - var table_size uint - var table []int - - if delta == 0 && !is_last { - /* We have no new input data and we don't have to finish the stream, so - nothing to do. */ - *out_size = 0 - - return true - } - - storage = getBrotliStorage(s, uint(2*bytes+503)) - storage[0] = byte(s.last_bytes_) - storage[1] = byte(s.last_bytes_ >> 8) - table = getHashTable(s, s.params.quality, uint(bytes), &table_size) - if s.params.quality == fastOnePassCompressionQuality { - compressFragmentFast(data[wrapped_last_processed_pos&mask:], uint(bytes), is_last, table, table_size, s.cmd_depths_[:], s.cmd_bits_[:], &s.cmd_code_numbits_, s.cmd_code_[:], &storage_ix, storage) - } else { - compressFragmentTwoPass(data[wrapped_last_processed_pos&mask:], uint(bytes), is_last, s.command_buf_, s.literal_buf_, table, table_size, &storage_ix, storage) - } - - s.last_bytes_ = uint16(storage[storage_ix>>3]) - s.last_bytes_bits_ = byte(storage_ix & 7) - updateLastProcessedPos(s) - *output = storage[0:] - *out_size = storage_ix >> 3 - return true - } - { - /* Theoretical max number of commands is 1 per 2 bytes. */ - var newsize uint = uint(uint32(s.num_commands_) + bytes/2 + 1) - if newsize > s.cmd_alloc_size_ { - var new_commands []command - - /* Reserve a bit more memory to allow merging with a next block - without reallocation: that would impact speed. */ - newsize += uint((bytes / 4) + 16) - - s.cmd_alloc_size_ = newsize - new_commands = make([]command, newsize) - if s.commands_ != nil { - copy(new_commands, s.commands_[:s.num_commands_]) - s.commands_ = nil - } - - s.commands_ = new_commands - } - } - - initOrStitchToPreviousBlock(&s.hasher_, data, uint(mask), &s.params, uint(wrapped_last_processed_pos), uint(bytes), is_last) - - literal_context_mode = chooseContextMode(&s.params, data, uint(wrapPosition(s.last_flush_pos_)), uint(mask), uint(s.input_pos_-s.last_flush_pos_)) - - if s.num_commands_ != 0 && s.last_insert_len_ == 0 { - extendLastCommand(s, &bytes, &wrapped_last_processed_pos) - } - - if s.params.quality == zopflificationQuality { - assert(s.params.hasher.type_ == 10) - createZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_.(*h10), s.dist_cache_[:], &s.last_insert_len_, s.commands_[s.num_commands_:], &s.num_commands_, &s.num_literals_) - } else if s.params.quality == hqZopflificationQuality { - assert(s.params.hasher.type_ == 10) - createHqZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, s.commands_[s.num_commands_:], &s.num_commands_, &s.num_literals_) - } else { - createBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, s.commands_[s.num_commands_:], &s.num_commands_, &s.num_literals_) - } - { - var max_length uint = maxMetablockSize(&s.params) - var max_literals uint = max_length / 8 - var max_commands uint = max_length / 8 - var processed_bytes uint = uint(s.input_pos_ - s.last_flush_pos_) - var next_input_fits_metablock bool = (processed_bytes+inputBlockSize(s) <= max_length) - var should_flush bool = (s.params.quality < minQualityForBlockSplit && s.num_literals_+s.num_commands_ >= maxNumDelayedSymbols) - /* If maximal possible additional block doesn't fit metablock, flush now. */ - /* TODO: Postpone decision until next block arrives? */ - - /* If block splitting is not used, then flush as soon as there is some - amount of commands / literals produced. */ - if !is_last && !force_flush && !should_flush && next_input_fits_metablock && s.num_literals_ < max_literals && s.num_commands_ < max_commands { - /* Merge with next input block. Everything will happen later. */ - if updateLastProcessedPos(s) { - hasherReset(s.hasher_) - } - - *out_size = 0 - return true - } - } - - /* Create the last insert-only command. */ - if s.last_insert_len_ > 0 { - initInsertCommand(&s.commands_[s.num_commands_], s.last_insert_len_) - s.num_commands_++ - s.num_literals_ += s.last_insert_len_ - s.last_insert_len_ = 0 - } - - if !is_last && s.input_pos_ == s.last_flush_pos_ { - /* We have no new input data and we don't have to finish the stream, so - nothing to do. */ - *out_size = 0 - - return true - } - - assert(s.input_pos_ >= s.last_flush_pos_) - assert(s.input_pos_ > s.last_flush_pos_ || is_last) - assert(s.input_pos_-s.last_flush_pos_ <= 1<<24) - { - var metablock_size uint32 = uint32(s.input_pos_ - s.last_flush_pos_) - var storage []byte = getBrotliStorage(s, uint(2*metablock_size+503)) - var storage_ix uint = uint(s.last_bytes_bits_) - storage[0] = byte(s.last_bytes_) - storage[1] = byte(s.last_bytes_ >> 8) - writeMetaBlockInternal(data, uint(mask), s.last_flush_pos_, uint(metablock_size), is_last, literal_context_mode, &s.params, s.prev_byte_, s.prev_byte2_, s.num_literals_, s.num_commands_, s.commands_, s.saved_dist_cache_[:], s.dist_cache_[:], &storage_ix, storage) - s.last_bytes_ = uint16(storage[storage_ix>>3]) - s.last_bytes_bits_ = byte(storage_ix & 7) - s.last_flush_pos_ = s.input_pos_ - if updateLastProcessedPos(s) { - hasherReset(s.hasher_) - } - - if s.last_flush_pos_ > 0 { - s.prev_byte_ = data[(uint32(s.last_flush_pos_)-1)&mask] - } - - if s.last_flush_pos_ > 1 { - s.prev_byte2_ = data[uint32(s.last_flush_pos_-2)&mask] - } - - s.num_commands_ = 0 - s.num_literals_ = 0 - - /* Save the state of the distance cache in case we need to restore it for - emitting an uncompressed block. */ - copy(s.saved_dist_cache_[:], s.dist_cache_[:]) - - *output = storage[0:] - *out_size = storage_ix >> 3 - return true - } -} - -/* Dumps remaining output bits and metadata header to |header|. - Returns number of produced bytes. - REQUIRED: |header| should be 8-byte aligned and at least 16 bytes long. - REQUIRED: |block_size| <= (1 << 24). */ -func writeMetadataHeader(s *Writer, block_size uint, header []byte) uint { - var storage_ix uint - storage_ix = uint(s.last_bytes_bits_) - header[0] = byte(s.last_bytes_) - header[1] = byte(s.last_bytes_ >> 8) - s.last_bytes_ = 0 - s.last_bytes_bits_ = 0 - - writeBits(1, 0, &storage_ix, header) - writeBits(2, 3, &storage_ix, header) - writeBits(1, 0, &storage_ix, header) - if block_size == 0 { - writeBits(2, 0, &storage_ix, header) - } else { - var nbits uint32 - if block_size == 1 { - nbits = 0 - } else { - nbits = log2FloorNonZero(uint(uint32(block_size)-1)) + 1 - } - var nbytes uint32 = (nbits + 7) / 8 - writeBits(2, uint64(nbytes), &storage_ix, header) - writeBits(uint(8*nbytes), uint64(block_size)-1, &storage_ix, header) - } - - return (storage_ix + 7) >> 3 -} - -func injectBytePaddingBlock(s *Writer) { - var seal uint32 = uint32(s.last_bytes_) - var seal_bits uint = uint(s.last_bytes_bits_) - var destination []byte - s.last_bytes_ = 0 - s.last_bytes_bits_ = 0 - - /* is_last = 0, data_nibbles = 11, reserved = 0, meta_nibbles = 00 */ - seal |= 0x6 << seal_bits - - seal_bits += 6 - - /* If we have already created storage, then append to it. - Storage is valid until next block is being compressed. */ - if s.next_out_ != nil { - destination = s.next_out_[s.available_out_:] - } else { - destination = s.tiny_buf_.u8[:] - s.next_out_ = destination - } - - destination[0] = byte(seal) - if seal_bits > 8 { - destination[1] = byte(seal >> 8) - } - if seal_bits > 16 { - destination[2] = byte(seal >> 16) - } - s.available_out_ += (seal_bits + 7) >> 3 -} - -func checkFlushComplete(s *Writer) { - if s.stream_state_ == streamFlushRequested && s.available_out_ == 0 { - s.stream_state_ = streamProcessing - s.next_out_ = nil - } -} - -func encoderCompressStreamFast(s *Writer, op int, available_in *uint, next_in *[]byte) bool { - var block_size_limit uint = uint(1) << s.params.lgwin - var buf_size uint = brotli_min_size_t(kCompressFragmentTwoPassBlockSize, brotli_min_size_t(*available_in, block_size_limit)) - var tmp_command_buf []uint32 = nil - var command_buf []uint32 = nil - var tmp_literal_buf []byte = nil - var literal_buf []byte = nil - if s.params.quality != fastOnePassCompressionQuality && s.params.quality != fastTwoPassCompressionQuality { - return false - } - - if s.params.quality == fastTwoPassCompressionQuality { - if s.command_buf_ == nil && buf_size == kCompressFragmentTwoPassBlockSize { - s.command_buf_ = make([]uint32, kCompressFragmentTwoPassBlockSize) - s.literal_buf_ = make([]byte, kCompressFragmentTwoPassBlockSize) - } - - if s.command_buf_ != nil { - command_buf = s.command_buf_ - literal_buf = s.literal_buf_ - } else { - tmp_command_buf = make([]uint32, buf_size) - tmp_literal_buf = make([]byte, buf_size) - command_buf = tmp_command_buf - literal_buf = tmp_literal_buf - } - } - - for { - if s.stream_state_ == streamFlushRequested && s.last_bytes_bits_ != 0 { - injectBytePaddingBlock(s) - continue - } - - /* Compress block only when internal output buffer is empty, stream is not - finished, there is no pending flush request, and there is either - additional input or pending operation. */ - if s.available_out_ == 0 && s.stream_state_ == streamProcessing && (*available_in != 0 || op != int(operationProcess)) { - var block_size uint = brotli_min_size_t(block_size_limit, *available_in) - var is_last bool = (*available_in == block_size) && (op == int(operationFinish)) - var force_flush bool = (*available_in == block_size) && (op == int(operationFlush)) - var max_out_size uint = 2*block_size + 503 - var storage []byte = nil - var storage_ix uint = uint(s.last_bytes_bits_) - var table_size uint - var table []int - - if force_flush && block_size == 0 { - s.stream_state_ = streamFlushRequested - continue - } - - storage = getBrotliStorage(s, max_out_size) - - storage[0] = byte(s.last_bytes_) - storage[1] = byte(s.last_bytes_ >> 8) - table = getHashTable(s, s.params.quality, block_size, &table_size) - - if s.params.quality == fastOnePassCompressionQuality { - compressFragmentFast(*next_in, block_size, is_last, table, table_size, s.cmd_depths_[:], s.cmd_bits_[:], &s.cmd_code_numbits_, s.cmd_code_[:], &storage_ix, storage) - } else { - compressFragmentTwoPass(*next_in, block_size, is_last, command_buf, literal_buf, table, table_size, &storage_ix, storage) - } - - *next_in = (*next_in)[block_size:] - *available_in -= block_size - var out_bytes uint = storage_ix >> 3 - s.next_out_ = storage - s.available_out_ = out_bytes - - s.last_bytes_ = uint16(storage[storage_ix>>3]) - s.last_bytes_bits_ = byte(storage_ix & 7) - - if force_flush { - s.stream_state_ = streamFlushRequested - } - if is_last { - s.stream_state_ = streamFinished - } - continue - } - - break - } - - tmp_command_buf = nil - tmp_literal_buf = nil - checkFlushComplete(s) - return true -} - -func processMetadata(s *Writer, available_in *uint, next_in *[]byte) bool { - if *available_in > 1<<24 { - return false - } - - /* Switch to metadata block workflow, if required. */ - if s.stream_state_ == streamProcessing { - s.remaining_metadata_bytes_ = uint32(*available_in) - s.stream_state_ = streamMetadataHead - } - - if s.stream_state_ != streamMetadataHead && s.stream_state_ != streamMetadataBody { - return false - } - - for { - if s.stream_state_ == streamFlushRequested && s.last_bytes_bits_ != 0 { - injectBytePaddingBlock(s) - continue - } - - if s.available_out_ != 0 { - break - } - - if s.input_pos_ != s.last_flush_pos_ { - var result bool = encodeData(s, false, true, &s.available_out_, &s.next_out_) - if !result { - return false - } - continue - } - - if s.stream_state_ == streamMetadataHead { - s.next_out_ = s.tiny_buf_.u8[:] - s.available_out_ = writeMetadataHeader(s, uint(s.remaining_metadata_bytes_), s.next_out_) - s.stream_state_ = streamMetadataBody - continue - } else { - /* Exit workflow only when there is no more input and no more output. - Otherwise client may continue producing empty metadata blocks. */ - if s.remaining_metadata_bytes_ == 0 { - s.remaining_metadata_bytes_ = math.MaxUint32 - s.stream_state_ = streamProcessing - break - } - - /* This guarantees progress in "TakeOutput" workflow. */ - var c uint32 = brotli_min_uint32_t(s.remaining_metadata_bytes_, 16) - s.next_out_ = s.tiny_buf_.u8[:] - copy(s.next_out_, (*next_in)[:c]) - *next_in = (*next_in)[c:] - *available_in -= uint(c) - s.remaining_metadata_bytes_ -= c - s.available_out_ = uint(c) - - continue - } - } - - return true -} - -func updateSizeHint(s *Writer, available_in uint) { - if s.params.size_hint == 0 { - var delta uint64 = unprocessedInputSize(s) - var tail uint64 = uint64(available_in) - var limit uint32 = 1 << 30 - var total uint32 - if (delta >= uint64(limit)) || (tail >= uint64(limit)) || ((delta + tail) >= uint64(limit)) { - total = limit - } else { - total = uint32(delta + tail) - } - - s.params.size_hint = uint(total) - } -} - -func encoderCompressStream(s *Writer, op int, available_in *uint, next_in *[]byte) bool { - if !ensureInitialized(s) { - return false - } - - /* Unfinished metadata block; check requirements. */ - if s.remaining_metadata_bytes_ != math.MaxUint32 { - if uint32(*available_in) != s.remaining_metadata_bytes_ { - return false - } - if op != int(operationEmitMetadata) { - return false - } - } - - if op == int(operationEmitMetadata) { - updateSizeHint(s, 0) /* First data metablock might be emitted here. */ - return processMetadata(s, available_in, next_in) - } - - if s.stream_state_ == streamMetadataHead || s.stream_state_ == streamMetadataBody { - return false - } - - if s.stream_state_ != streamProcessing && *available_in != 0 { - return false - } - - if s.params.quality == fastOnePassCompressionQuality || s.params.quality == fastTwoPassCompressionQuality { - return encoderCompressStreamFast(s, op, available_in, next_in) - } - - for { - var remaining_block_size uint = remainingInputBlockSize(s) - - if remaining_block_size != 0 && *available_in != 0 { - var copy_input_size uint = brotli_min_size_t(remaining_block_size, *available_in) - copyInputToRingBuffer(s, copy_input_size, *next_in) - *next_in = (*next_in)[copy_input_size:] - *available_in -= copy_input_size - continue - } - - if s.stream_state_ == streamFlushRequested && s.last_bytes_bits_ != 0 { - injectBytePaddingBlock(s) - continue - } - - /* Compress data only when internal output buffer is empty, stream is not - finished and there is no pending flush request. */ - if s.available_out_ == 0 && s.stream_state_ == streamProcessing { - if remaining_block_size == 0 || op != int(operationProcess) { - var is_last bool = ((*available_in == 0) && op == int(operationFinish)) - var force_flush bool = ((*available_in == 0) && op == int(operationFlush)) - var result bool - updateSizeHint(s, *available_in) - result = encodeData(s, is_last, force_flush, &s.available_out_, &s.next_out_) - if !result { - return false - } - if force_flush { - s.stream_state_ = streamFlushRequested - } - if is_last { - s.stream_state_ = streamFinished - } - continue - } - } - - break - } - - checkFlushComplete(s) - return true -} - -func encoderHasMoreOutput(s *Writer) bool { - return s.available_out_ != 0 -} - -func encoderTakeOutput(s *Writer) []byte { - if s.available_out_ == 0 { - return nil - } - result := s.next_out_[:s.available_out_] - s.total_out_ += s.available_out_ - s.available_out_ = 0 - checkFlushComplete(s) - - return result -} diff --git a/vendor/github.com/andybalholm/brotli/encoder_dict.go b/vendor/github.com/andybalholm/brotli/encoder_dict.go deleted file mode 100644 index 55c051c6238..00000000000 --- a/vendor/github.com/andybalholm/brotli/encoder_dict.go +++ /dev/null @@ -1,22 +0,0 @@ -package brotli - -/* Dictionary data (words and transforms) for 1 possible context */ -type encoderDictionary struct { - words *dictionary - cutoffTransformsCount uint32 - cutoffTransforms uint64 - hash_table []uint16 - buckets []uint16 - dict_words []dictWord -} - -func initEncoderDictionary(dict *encoderDictionary) { - dict.words = getDictionary() - - dict.hash_table = kStaticDictionaryHash[:] - dict.buckets = kStaticDictionaryBuckets[:] - dict.dict_words = kStaticDictionaryWords[:] - - dict.cutoffTransformsCount = kCutoffTransformsCount - dict.cutoffTransforms = kCutoffTransforms -} diff --git a/vendor/github.com/andybalholm/brotli/entropy_encode.go b/vendor/github.com/andybalholm/brotli/entropy_encode.go deleted file mode 100644 index d0c1dca250a..00000000000 --- a/vendor/github.com/andybalholm/brotli/entropy_encode.go +++ /dev/null @@ -1,593 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Entropy encoding (Huffman) utilities. */ - -/* A node of a Huffman tree. */ -type huffmanTree struct { - total_count_ uint32 - index_left_ int16 - index_right_or_value_ int16 -} - -func initHuffmanTree(self *huffmanTree, count uint32, left int16, right int16) { - self.total_count_ = count - self.index_left_ = left - self.index_right_or_value_ = right -} - -/* Input size optimized Shell sort. */ -type huffmanTreeComparator func(*huffmanTree, *huffmanTree) bool - -var sortHuffmanTreeItems_gaps = []uint{132, 57, 23, 10, 4, 1} - -func sortHuffmanTreeItems(items []huffmanTree, n uint, comparator huffmanTreeComparator) { - if n < 13 { - /* Insertion sort. */ - var i uint - for i = 1; i < n; i++ { - var tmp huffmanTree = items[i] - var k uint = i - var j uint = i - 1 - for comparator(&tmp, &items[j]) { - items[k] = items[j] - k = j - tmp10 := j - j-- - if tmp10 == 0 { - break - } - } - - items[k] = tmp - } - - return - } else { - var g int - if n < 57 { - g = 2 - } else { - g = 0 - } - for ; g < 6; g++ { - var gap uint = sortHuffmanTreeItems_gaps[g] - var i uint - for i = gap; i < n; i++ { - var j uint = i - var tmp huffmanTree = items[i] - for ; j >= gap && comparator(&tmp, &items[j-gap]); j -= gap { - items[j] = items[j-gap] - } - - items[j] = tmp - } - } - } -} - -/* Returns 1 if assignment of depths succeeded, otherwise 0. */ -func setDepth(p0 int, pool []huffmanTree, depth []byte, max_depth int) bool { - var stack [16]int - var level int = 0 - var p int = p0 - assert(max_depth <= 15) - stack[0] = -1 - for { - if pool[p].index_left_ >= 0 { - level++ - if level > max_depth { - return false - } - stack[level] = int(pool[p].index_right_or_value_) - p = int(pool[p].index_left_) - continue - } else { - depth[pool[p].index_right_or_value_] = byte(level) - } - - for level >= 0 && stack[level] == -1 { - level-- - } - if level < 0 { - return true - } - p = stack[level] - stack[level] = -1 - } -} - -/* Sort the root nodes, least popular first. */ -func sortHuffmanTree(v0 *huffmanTree, v1 *huffmanTree) bool { - if v0.total_count_ != v1.total_count_ { - return v0.total_count_ < v1.total_count_ - } - - return v0.index_right_or_value_ > v1.index_right_or_value_ -} - -/* This function will create a Huffman tree. - - The catch here is that the tree cannot be arbitrarily deep. - Brotli specifies a maximum depth of 15 bits for "code trees" - and 7 bits for "code length code trees." - - count_limit is the value that is to be faked as the minimum value - and this minimum value is raised until the tree matches the - maximum length requirement. - - This algorithm is not of excellent performance for very long data blocks, - especially when population counts are longer than 2**tree_limit, but - we are not planning to use this with extremely long blocks. - - See http://en.wikipedia.org/wiki/Huffman_coding */ -func createHuffmanTree(data []uint32, length uint, tree_limit int, tree []huffmanTree, depth []byte) { - var count_limit uint32 - var sentinel huffmanTree - initHuffmanTree(&sentinel, math.MaxUint32, -1, -1) - - /* For block sizes below 64 kB, we never need to do a second iteration - of this loop. Probably all of our block sizes will be smaller than - that, so this loop is mostly of academic interest. If we actually - would need this, we would be better off with the Katajainen algorithm. */ - for count_limit = 1; ; count_limit *= 2 { - var n uint = 0 - var i uint - var j uint - var k uint - for i = length; i != 0; { - i-- - if data[i] != 0 { - var count uint32 = brotli_max_uint32_t(data[i], count_limit) - initHuffmanTree(&tree[n], count, -1, int16(i)) - n++ - } - } - - if n == 1 { - depth[tree[0].index_right_or_value_] = 1 /* Only one element. */ - break - } - - sortHuffmanTreeItems(tree, n, huffmanTreeComparator(sortHuffmanTree)) - - /* The nodes are: - [0, n): the sorted leaf nodes that we start with. - [n]: we add a sentinel here. - [n + 1, 2n): new parent nodes are added here, starting from - (n+1). These are naturally in ascending order. - [2n]: we add a sentinel at the end as well. - There will be (2n+1) elements at the end. */ - tree[n] = sentinel - - tree[n+1] = sentinel - - i = 0 /* Points to the next leaf node. */ - j = n + 1 /* Points to the next non-leaf node. */ - for k = n - 1; k != 0; k-- { - var left uint - var right uint - if tree[i].total_count_ <= tree[j].total_count_ { - left = i - i++ - } else { - left = j - j++ - } - - if tree[i].total_count_ <= tree[j].total_count_ { - right = i - i++ - } else { - right = j - j++ - } - { - /* The sentinel node becomes the parent node. */ - var j_end uint = 2*n - k - tree[j_end].total_count_ = tree[left].total_count_ + tree[right].total_count_ - tree[j_end].index_left_ = int16(left) - tree[j_end].index_right_or_value_ = int16(right) - - /* Add back the last sentinel node. */ - tree[j_end+1] = sentinel - } - } - - if setDepth(int(2*n-1), tree[0:], depth, tree_limit) { - /* We need to pack the Huffman tree in tree_limit bits. If this was not - successful, add fake entities to the lowest values and retry. */ - break - } - } -} - -func reverse(v []byte, start uint, end uint) { - end-- - for start < end { - var tmp byte = v[start] - v[start] = v[end] - v[end] = tmp - start++ - end-- - } -} - -func writeHuffmanTreeRepetitions(previous_value byte, value byte, repetitions uint, tree_size *uint, tree []byte, extra_bits_data []byte) { - assert(repetitions > 0) - if previous_value != value { - tree[*tree_size] = value - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - repetitions-- - } - - if repetitions == 7 { - tree[*tree_size] = value - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - repetitions-- - } - - if repetitions < 3 { - var i uint - for i = 0; i < repetitions; i++ { - tree[*tree_size] = value - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - } - } else { - var start uint = *tree_size - repetitions -= 3 - for { - tree[*tree_size] = repeatPreviousCodeLength - extra_bits_data[*tree_size] = byte(repetitions & 0x3) - (*tree_size)++ - repetitions >>= 2 - if repetitions == 0 { - break - } - - repetitions-- - } - - reverse(tree, start, *tree_size) - reverse(extra_bits_data, start, *tree_size) - } -} - -func writeHuffmanTreeRepetitionsZeros(repetitions uint, tree_size *uint, tree []byte, extra_bits_data []byte) { - if repetitions == 11 { - tree[*tree_size] = 0 - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - repetitions-- - } - - if repetitions < 3 { - var i uint - for i = 0; i < repetitions; i++ { - tree[*tree_size] = 0 - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - } - } else { - var start uint = *tree_size - repetitions -= 3 - for { - tree[*tree_size] = repeatZeroCodeLength - extra_bits_data[*tree_size] = byte(repetitions & 0x7) - (*tree_size)++ - repetitions >>= 3 - if repetitions == 0 { - break - } - - repetitions-- - } - - reverse(tree, start, *tree_size) - reverse(extra_bits_data, start, *tree_size) - } -} - -/* Change the population counts in a way that the consequent - Huffman tree compression, especially its RLE-part will be more - likely to compress this data more efficiently. - - length contains the size of the histogram. - counts contains the population counts. - good_for_rle is a buffer of at least length size */ -func optimizeHuffmanCountsForRLE(length uint, counts []uint32, good_for_rle []byte) { - var nonzero_count uint = 0 - var stride uint - var limit uint - var sum uint - var streak_limit uint = 1240 - var i uint - /* Let's make the Huffman code more compatible with RLE encoding. */ - for i = 0; i < length; i++ { - if counts[i] != 0 { - nonzero_count++ - } - } - - if nonzero_count < 16 { - return - } - - for length != 0 && counts[length-1] == 0 { - length-- - } - - if length == 0 { - return /* All zeros. */ - } - - /* Now counts[0..length - 1] does not have trailing zeros. */ - { - var nonzeros uint = 0 - var smallest_nonzero uint32 = 1 << 30 - for i = 0; i < length; i++ { - if counts[i] != 0 { - nonzeros++ - if smallest_nonzero > counts[i] { - smallest_nonzero = counts[i] - } - } - } - - if nonzeros < 5 { - /* Small histogram will model it well. */ - return - } - - if smallest_nonzero < 4 { - var zeros uint = length - nonzeros - if zeros < 6 { - for i = 1; i < length-1; i++ { - if counts[i-1] != 0 && counts[i] == 0 && counts[i+1] != 0 { - counts[i] = 1 - } - } - } - } - - if nonzeros < 28 { - return - } - } - - /* 2) Let's mark all population counts that already can be encoded - with an RLE code. */ - for i := 0; i < int(length); i++ { - good_for_rle[i] = 0 - } - { - var symbol uint32 = counts[0] - /* Let's not spoil any of the existing good RLE codes. - Mark any seq of 0's that is longer as 5 as a good_for_rle. - Mark any seq of non-0's that is longer as 7 as a good_for_rle. */ - - var step uint = 0 - for i = 0; i <= length; i++ { - if i == length || counts[i] != symbol { - if (symbol == 0 && step >= 5) || (symbol != 0 && step >= 7) { - var k uint - for k = 0; k < step; k++ { - good_for_rle[i-k-1] = 1 - } - } - - step = 1 - if i != length { - symbol = counts[i] - } - } else { - step++ - } - } - } - - /* 3) Let's replace those population counts that lead to more RLE codes. - Math here is in 24.8 fixed point representation. */ - stride = 0 - - limit = uint(256*(counts[0]+counts[1]+counts[2])/3 + 420) - sum = 0 - for i = 0; i <= length; i++ { - if i == length || good_for_rle[i] != 0 || (i != 0 && good_for_rle[i-1] != 0) || (256*counts[i]-uint32(limit)+uint32(streak_limit)) >= uint32(2*streak_limit) { - if stride >= 4 || (stride >= 3 && sum == 0) { - var k uint - var count uint = (sum + stride/2) / stride - /* The stride must end, collapse what we have, if we have enough (4). */ - if count == 0 { - count = 1 - } - - if sum == 0 { - /* Don't make an all zeros stride to be upgraded to ones. */ - count = 0 - } - - for k = 0; k < stride; k++ { - /* We don't want to change value at counts[i], - that is already belonging to the next stride. Thus - 1. */ - counts[i-k-1] = uint32(count) - } - } - - stride = 0 - sum = 0 - if i < length-2 { - /* All interesting strides have a count of at least 4, */ - /* at least when non-zeros. */ - limit = uint(256*(counts[i]+counts[i+1]+counts[i+2])/3 + 420) - } else if i < length { - limit = uint(256 * counts[i]) - } else { - limit = 0 - } - } - - stride++ - if i != length { - sum += uint(counts[i]) - if stride >= 4 { - limit = (256*sum + stride/2) / stride - } - - if stride == 4 { - limit += 120 - } - } - } -} - -func decideOverRLEUse(depth []byte, length uint, use_rle_for_non_zero *bool, use_rle_for_zero *bool) { - var total_reps_zero uint = 0 - var total_reps_non_zero uint = 0 - var count_reps_zero uint = 1 - var count_reps_non_zero uint = 1 - var i uint - for i = 0; i < length; { - var value byte = depth[i] - var reps uint = 1 - var k uint - for k = i + 1; k < length && depth[k] == value; k++ { - reps++ - } - - if reps >= 3 && value == 0 { - total_reps_zero += reps - count_reps_zero++ - } - - if reps >= 4 && value != 0 { - total_reps_non_zero += reps - count_reps_non_zero++ - } - - i += reps - } - - *use_rle_for_non_zero = total_reps_non_zero > count_reps_non_zero*2 - *use_rle_for_zero = total_reps_zero > count_reps_zero*2 -} - -/* Write a Huffman tree from bit depths into the bit-stream representation - of a Huffman tree. The generated Huffman tree is to be compressed once - more using a Huffman tree */ -func writeHuffmanTree(depth []byte, length uint, tree_size *uint, tree []byte, extra_bits_data []byte) { - var previous_value byte = initialRepeatedCodeLength - var i uint - var use_rle_for_non_zero bool = false - var use_rle_for_zero bool = false - var new_length uint = length - /* Throw away trailing zeros. */ - for i = 0; i < length; i++ { - if depth[length-i-1] == 0 { - new_length-- - } else { - break - } - } - - /* First gather statistics on if it is a good idea to do RLE. */ - if length > 50 { - /* Find RLE coding for longer codes. - Shorter codes seem not to benefit from RLE. */ - decideOverRLEUse(depth, new_length, &use_rle_for_non_zero, &use_rle_for_zero) - } - - /* Actual RLE coding. */ - for i = 0; i < new_length; { - var value byte = depth[i] - var reps uint = 1 - if (value != 0 && use_rle_for_non_zero) || (value == 0 && use_rle_for_zero) { - var k uint - for k = i + 1; k < new_length && depth[k] == value; k++ { - reps++ - } - } - - if value == 0 { - writeHuffmanTreeRepetitionsZeros(reps, tree_size, tree, extra_bits_data) - } else { - writeHuffmanTreeRepetitions(previous_value, value, reps, tree_size, tree, extra_bits_data) - previous_value = value - } - - i += reps - } -} - -var reverseBits_kLut = [16]uint{ - 0x00, - 0x08, - 0x04, - 0x0C, - 0x02, - 0x0A, - 0x06, - 0x0E, - 0x01, - 0x09, - 0x05, - 0x0D, - 0x03, - 0x0B, - 0x07, - 0x0F, -} - -func reverseBits(num_bits uint, bits uint16) uint16 { - var retval uint = reverseBits_kLut[bits&0x0F] - var i uint - for i = 4; i < num_bits; i += 4 { - retval <<= 4 - bits = uint16(bits >> 4) - retval |= reverseBits_kLut[bits&0x0F] - } - - retval >>= ((0 - num_bits) & 0x03) - return uint16(retval) -} - -/* 0..15 are values for bits */ -const maxHuffmanBits = 16 - -/* Get the actual bit values for a tree of bit depths. */ -func convertBitDepthsToSymbols(depth []byte, len uint, bits []uint16) { - var bl_count = [maxHuffmanBits]uint16{0} - var next_code [maxHuffmanBits]uint16 - var i uint - /* In Brotli, all bit depths are [1..15] - 0 bit depth means that the symbol does not exist. */ - - var code int = 0 - for i = 0; i < len; i++ { - bl_count[depth[i]]++ - } - - bl_count[0] = 0 - next_code[0] = 0 - for i = 1; i < maxHuffmanBits; i++ { - code = (code + int(bl_count[i-1])) << 1 - next_code[i] = uint16(code) - } - - for i = 0; i < len; i++ { - if depth[i] != 0 { - bits[i] = reverseBits(uint(depth[i]), next_code[depth[i]]) - next_code[depth[i]]++ - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/entropy_encode_static.go b/vendor/github.com/andybalholm/brotli/entropy_encode_static.go deleted file mode 100644 index 5ddf3fcbaef..00000000000 --- a/vendor/github.com/andybalholm/brotli/entropy_encode_static.go +++ /dev/null @@ -1,4394 +0,0 @@ -package brotli - -var kCodeLengthDepth = [18]byte{4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 0, 4, 4} - -var kStaticCommandCodeDepth = [numCommandSymbols]byte{ - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, -} - -var kStaticDistanceCodeDepth = [64]byte{ - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, -} - -var kCodeLengthBits = [18]uint32{0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 15, 31, 0, 11, 7} - -func storeStaticCodeLengthCode(storage_ix *uint, storage []byte) { - writeBits(40, 0x0000FF55555554, storage_ix, storage) -} - -var kZeroRepsBits = [numCommandSymbols]uint64{ - 0x00000000, - 0x00000000, - 0x00000000, - 0x00000007, - 0x00000017, - 0x00000027, - 0x00000037, - 0x00000047, - 0x00000057, - 0x00000067, - 0x00000077, - 0x00000770, - 0x00000b87, - 0x00001387, - 0x00001b87, - 0x00002387, - 0x00002b87, - 0x00003387, - 0x00003b87, - 0x00000397, - 0x00000b97, - 0x00001397, - 0x00001b97, - 0x00002397, - 0x00002b97, - 0x00003397, - 0x00003b97, - 0x000003a7, - 0x00000ba7, - 0x000013a7, - 0x00001ba7, - 0x000023a7, - 0x00002ba7, - 0x000033a7, - 0x00003ba7, - 0x000003b7, - 0x00000bb7, - 0x000013b7, - 0x00001bb7, - 0x000023b7, - 0x00002bb7, - 0x000033b7, - 0x00003bb7, - 0x000003c7, - 0x00000bc7, - 0x000013c7, - 0x00001bc7, - 0x000023c7, - 0x00002bc7, - 0x000033c7, - 0x00003bc7, - 0x000003d7, - 0x00000bd7, - 0x000013d7, - 0x00001bd7, - 0x000023d7, - 0x00002bd7, - 0x000033d7, - 0x00003bd7, - 0x000003e7, - 0x00000be7, - 0x000013e7, - 0x00001be7, - 0x000023e7, - 0x00002be7, - 0x000033e7, - 0x00003be7, - 0x000003f7, - 0x00000bf7, - 0x000013f7, - 0x00001bf7, - 0x000023f7, - 0x00002bf7, - 0x000033f7, - 0x00003bf7, - 0x0001c387, - 0x0005c387, - 0x0009c387, - 0x000dc387, - 0x0011c387, - 0x0015c387, - 0x0019c387, - 0x001dc387, - 0x0001cb87, - 0x0005cb87, - 0x0009cb87, - 0x000dcb87, - 0x0011cb87, - 0x0015cb87, - 0x0019cb87, - 0x001dcb87, - 0x0001d387, - 0x0005d387, - 0x0009d387, - 0x000dd387, - 0x0011d387, - 0x0015d387, - 0x0019d387, - 0x001dd387, - 0x0001db87, - 0x0005db87, - 0x0009db87, - 0x000ddb87, - 0x0011db87, - 0x0015db87, - 0x0019db87, - 0x001ddb87, - 0x0001e387, - 0x0005e387, - 0x0009e387, - 0x000de387, - 0x0011e387, - 0x0015e387, - 0x0019e387, - 0x001de387, - 0x0001eb87, - 0x0005eb87, - 0x0009eb87, - 0x000deb87, - 0x0011eb87, - 0x0015eb87, - 0x0019eb87, - 0x001deb87, - 0x0001f387, - 0x0005f387, - 0x0009f387, - 0x000df387, - 0x0011f387, - 0x0015f387, - 0x0019f387, - 0x001df387, - 0x0001fb87, - 0x0005fb87, - 0x0009fb87, - 0x000dfb87, - 0x0011fb87, - 0x0015fb87, - 0x0019fb87, - 0x001dfb87, - 0x0001c397, - 0x0005c397, - 0x0009c397, - 0x000dc397, - 0x0011c397, - 0x0015c397, - 0x0019c397, - 0x001dc397, - 0x0001cb97, - 0x0005cb97, - 0x0009cb97, - 0x000dcb97, - 0x0011cb97, - 0x0015cb97, - 0x0019cb97, - 0x001dcb97, - 0x0001d397, - 0x0005d397, - 0x0009d397, - 0x000dd397, - 0x0011d397, - 0x0015d397, - 0x0019d397, - 0x001dd397, - 0x0001db97, - 0x0005db97, - 0x0009db97, - 0x000ddb97, - 0x0011db97, - 0x0015db97, - 0x0019db97, - 0x001ddb97, - 0x0001e397, - 0x0005e397, - 0x0009e397, - 0x000de397, - 0x0011e397, - 0x0015e397, - 0x0019e397, - 0x001de397, - 0x0001eb97, - 0x0005eb97, - 0x0009eb97, - 0x000deb97, - 0x0011eb97, - 0x0015eb97, - 0x0019eb97, - 0x001deb97, - 0x0001f397, - 0x0005f397, - 0x0009f397, - 0x000df397, - 0x0011f397, - 0x0015f397, - 0x0019f397, - 0x001df397, - 0x0001fb97, - 0x0005fb97, - 0x0009fb97, - 0x000dfb97, - 0x0011fb97, - 0x0015fb97, - 0x0019fb97, - 0x001dfb97, - 0x0001c3a7, - 0x0005c3a7, - 0x0009c3a7, - 0x000dc3a7, - 0x0011c3a7, - 0x0015c3a7, - 0x0019c3a7, - 0x001dc3a7, - 0x0001cba7, - 0x0005cba7, - 0x0009cba7, - 0x000dcba7, - 0x0011cba7, - 0x0015cba7, - 0x0019cba7, - 0x001dcba7, - 0x0001d3a7, - 0x0005d3a7, - 0x0009d3a7, - 0x000dd3a7, - 0x0011d3a7, - 0x0015d3a7, - 0x0019d3a7, - 0x001dd3a7, - 0x0001dba7, - 0x0005dba7, - 0x0009dba7, - 0x000ddba7, - 0x0011dba7, - 0x0015dba7, - 0x0019dba7, - 0x001ddba7, - 0x0001e3a7, - 0x0005e3a7, - 0x0009e3a7, - 0x000de3a7, - 0x0011e3a7, - 0x0015e3a7, - 0x0019e3a7, - 0x001de3a7, - 0x0001eba7, - 0x0005eba7, - 0x0009eba7, - 0x000deba7, - 0x0011eba7, - 0x0015eba7, - 0x0019eba7, - 0x001deba7, - 0x0001f3a7, - 0x0005f3a7, - 0x0009f3a7, - 0x000df3a7, - 0x0011f3a7, - 0x0015f3a7, - 0x0019f3a7, - 0x001df3a7, - 0x0001fba7, - 0x0005fba7, - 0x0009fba7, - 0x000dfba7, - 0x0011fba7, - 0x0015fba7, - 0x0019fba7, - 0x001dfba7, - 0x0001c3b7, - 0x0005c3b7, - 0x0009c3b7, - 0x000dc3b7, - 0x0011c3b7, - 0x0015c3b7, - 0x0019c3b7, - 0x001dc3b7, - 0x0001cbb7, - 0x0005cbb7, - 0x0009cbb7, - 0x000dcbb7, - 0x0011cbb7, - 0x0015cbb7, - 0x0019cbb7, - 0x001dcbb7, - 0x0001d3b7, - 0x0005d3b7, - 0x0009d3b7, - 0x000dd3b7, - 0x0011d3b7, - 0x0015d3b7, - 0x0019d3b7, - 0x001dd3b7, - 0x0001dbb7, - 0x0005dbb7, - 0x0009dbb7, - 0x000ddbb7, - 0x0011dbb7, - 0x0015dbb7, - 0x0019dbb7, - 0x001ddbb7, - 0x0001e3b7, - 0x0005e3b7, - 0x0009e3b7, - 0x000de3b7, - 0x0011e3b7, - 0x0015e3b7, - 0x0019e3b7, - 0x001de3b7, - 0x0001ebb7, - 0x0005ebb7, - 0x0009ebb7, - 0x000debb7, - 0x0011ebb7, - 0x0015ebb7, - 0x0019ebb7, - 0x001debb7, - 0x0001f3b7, - 0x0005f3b7, - 0x0009f3b7, - 0x000df3b7, - 0x0011f3b7, - 0x0015f3b7, - 0x0019f3b7, - 0x001df3b7, - 0x0001fbb7, - 0x0005fbb7, - 0x0009fbb7, - 0x000dfbb7, - 0x0011fbb7, - 0x0015fbb7, - 0x0019fbb7, - 0x001dfbb7, - 0x0001c3c7, - 0x0005c3c7, - 0x0009c3c7, - 0x000dc3c7, - 0x0011c3c7, - 0x0015c3c7, - 0x0019c3c7, - 0x001dc3c7, - 0x0001cbc7, - 0x0005cbc7, - 0x0009cbc7, - 0x000dcbc7, - 0x0011cbc7, - 0x0015cbc7, - 0x0019cbc7, - 0x001dcbc7, - 0x0001d3c7, - 0x0005d3c7, - 0x0009d3c7, - 0x000dd3c7, - 0x0011d3c7, - 0x0015d3c7, - 0x0019d3c7, - 0x001dd3c7, - 0x0001dbc7, - 0x0005dbc7, - 0x0009dbc7, - 0x000ddbc7, - 0x0011dbc7, - 0x0015dbc7, - 0x0019dbc7, - 0x001ddbc7, - 0x0001e3c7, - 0x0005e3c7, - 0x0009e3c7, - 0x000de3c7, - 0x0011e3c7, - 0x0015e3c7, - 0x0019e3c7, - 0x001de3c7, - 0x0001ebc7, - 0x0005ebc7, - 0x0009ebc7, - 0x000debc7, - 0x0011ebc7, - 0x0015ebc7, - 0x0019ebc7, - 0x001debc7, - 0x0001f3c7, - 0x0005f3c7, - 0x0009f3c7, - 0x000df3c7, - 0x0011f3c7, - 0x0015f3c7, - 0x0019f3c7, - 0x001df3c7, - 0x0001fbc7, - 0x0005fbc7, - 0x0009fbc7, - 0x000dfbc7, - 0x0011fbc7, - 0x0015fbc7, - 0x0019fbc7, - 0x001dfbc7, - 0x0001c3d7, - 0x0005c3d7, - 0x0009c3d7, - 0x000dc3d7, - 0x0011c3d7, - 0x0015c3d7, - 0x0019c3d7, - 0x001dc3d7, - 0x0001cbd7, - 0x0005cbd7, - 0x0009cbd7, - 0x000dcbd7, - 0x0011cbd7, - 0x0015cbd7, - 0x0019cbd7, - 0x001dcbd7, - 0x0001d3d7, - 0x0005d3d7, - 0x0009d3d7, - 0x000dd3d7, - 0x0011d3d7, - 0x0015d3d7, - 0x0019d3d7, - 0x001dd3d7, - 0x0001dbd7, - 0x0005dbd7, - 0x0009dbd7, - 0x000ddbd7, - 0x0011dbd7, - 0x0015dbd7, - 0x0019dbd7, - 0x001ddbd7, - 0x0001e3d7, - 0x0005e3d7, - 0x0009e3d7, - 0x000de3d7, - 0x0011e3d7, - 0x0015e3d7, - 0x0019e3d7, - 0x001de3d7, - 0x0001ebd7, - 0x0005ebd7, - 0x0009ebd7, - 0x000debd7, - 0x0011ebd7, - 0x0015ebd7, - 0x0019ebd7, - 0x001debd7, - 0x0001f3d7, - 0x0005f3d7, - 0x0009f3d7, - 0x000df3d7, - 0x0011f3d7, - 0x0015f3d7, - 0x0019f3d7, - 0x001df3d7, - 0x0001fbd7, - 0x0005fbd7, - 0x0009fbd7, - 0x000dfbd7, - 0x0011fbd7, - 0x0015fbd7, - 0x0019fbd7, - 0x001dfbd7, - 0x0001c3e7, - 0x0005c3e7, - 0x0009c3e7, - 0x000dc3e7, - 0x0011c3e7, - 0x0015c3e7, - 0x0019c3e7, - 0x001dc3e7, - 0x0001cbe7, - 0x0005cbe7, - 0x0009cbe7, - 0x000dcbe7, - 0x0011cbe7, - 0x0015cbe7, - 0x0019cbe7, - 0x001dcbe7, - 0x0001d3e7, - 0x0005d3e7, - 0x0009d3e7, - 0x000dd3e7, - 0x0011d3e7, - 0x0015d3e7, - 0x0019d3e7, - 0x001dd3e7, - 0x0001dbe7, - 0x0005dbe7, - 0x0009dbe7, - 0x000ddbe7, - 0x0011dbe7, - 0x0015dbe7, - 0x0019dbe7, - 0x001ddbe7, - 0x0001e3e7, - 0x0005e3e7, - 0x0009e3e7, - 0x000de3e7, - 0x0011e3e7, - 0x0015e3e7, - 0x0019e3e7, - 0x001de3e7, - 0x0001ebe7, - 0x0005ebe7, - 0x0009ebe7, - 0x000debe7, - 0x0011ebe7, - 0x0015ebe7, - 0x0019ebe7, - 0x001debe7, - 0x0001f3e7, - 0x0005f3e7, - 0x0009f3e7, - 0x000df3e7, - 0x0011f3e7, - 0x0015f3e7, - 0x0019f3e7, - 0x001df3e7, - 0x0001fbe7, - 0x0005fbe7, - 0x0009fbe7, - 0x000dfbe7, - 0x0011fbe7, - 0x0015fbe7, - 0x0019fbe7, - 0x001dfbe7, - 0x0001c3f7, - 0x0005c3f7, - 0x0009c3f7, - 0x000dc3f7, - 0x0011c3f7, - 0x0015c3f7, - 0x0019c3f7, - 0x001dc3f7, - 0x0001cbf7, - 0x0005cbf7, - 0x0009cbf7, - 0x000dcbf7, - 0x0011cbf7, - 0x0015cbf7, - 0x0019cbf7, - 0x001dcbf7, - 0x0001d3f7, - 0x0005d3f7, - 0x0009d3f7, - 0x000dd3f7, - 0x0011d3f7, - 0x0015d3f7, - 0x0019d3f7, - 0x001dd3f7, - 0x0001dbf7, - 0x0005dbf7, - 0x0009dbf7, - 0x000ddbf7, - 0x0011dbf7, - 0x0015dbf7, - 0x0019dbf7, - 0x001ddbf7, - 0x0001e3f7, - 0x0005e3f7, - 0x0009e3f7, - 0x000de3f7, - 0x0011e3f7, - 0x0015e3f7, - 0x0019e3f7, - 0x001de3f7, - 0x0001ebf7, - 0x0005ebf7, - 0x0009ebf7, - 0x000debf7, - 0x0011ebf7, - 0x0015ebf7, - 0x0019ebf7, - 0x001debf7, - 0x0001f3f7, - 0x0005f3f7, - 0x0009f3f7, - 0x000df3f7, - 0x0011f3f7, - 0x0015f3f7, - 0x0019f3f7, - 0x001df3f7, - 0x0001fbf7, - 0x0005fbf7, - 0x0009fbf7, - 0x000dfbf7, - 0x0011fbf7, - 0x0015fbf7, - 0x0019fbf7, - 0x001dfbf7, - 0x00e1c387, - 0x02e1c387, - 0x04e1c387, - 0x06e1c387, - 0x08e1c387, - 0x0ae1c387, - 0x0ce1c387, - 0x0ee1c387, - 0x00e5c387, - 0x02e5c387, - 0x04e5c387, - 0x06e5c387, - 0x08e5c387, - 0x0ae5c387, - 0x0ce5c387, - 0x0ee5c387, - 0x00e9c387, - 0x02e9c387, - 0x04e9c387, - 0x06e9c387, - 0x08e9c387, - 0x0ae9c387, - 0x0ce9c387, - 0x0ee9c387, - 0x00edc387, - 0x02edc387, - 0x04edc387, - 0x06edc387, - 0x08edc387, - 0x0aedc387, - 0x0cedc387, - 0x0eedc387, - 0x00f1c387, - 0x02f1c387, - 0x04f1c387, - 0x06f1c387, - 0x08f1c387, - 0x0af1c387, - 0x0cf1c387, - 0x0ef1c387, - 0x00f5c387, - 0x02f5c387, - 0x04f5c387, - 0x06f5c387, - 0x08f5c387, - 0x0af5c387, - 0x0cf5c387, - 0x0ef5c387, - 0x00f9c387, - 0x02f9c387, - 0x04f9c387, - 0x06f9c387, - 0x08f9c387, - 0x0af9c387, - 0x0cf9c387, - 0x0ef9c387, - 0x00fdc387, - 0x02fdc387, - 0x04fdc387, - 0x06fdc387, - 0x08fdc387, - 0x0afdc387, - 0x0cfdc387, - 0x0efdc387, - 0x00e1cb87, - 0x02e1cb87, - 0x04e1cb87, - 0x06e1cb87, - 0x08e1cb87, - 0x0ae1cb87, - 0x0ce1cb87, - 0x0ee1cb87, - 0x00e5cb87, - 0x02e5cb87, - 0x04e5cb87, - 0x06e5cb87, - 0x08e5cb87, - 0x0ae5cb87, - 0x0ce5cb87, - 0x0ee5cb87, - 0x00e9cb87, - 0x02e9cb87, - 0x04e9cb87, - 0x06e9cb87, - 0x08e9cb87, - 0x0ae9cb87, - 0x0ce9cb87, - 0x0ee9cb87, - 0x00edcb87, - 0x02edcb87, - 0x04edcb87, - 0x06edcb87, - 0x08edcb87, - 0x0aedcb87, - 0x0cedcb87, - 0x0eedcb87, - 0x00f1cb87, - 0x02f1cb87, - 0x04f1cb87, - 0x06f1cb87, - 0x08f1cb87, - 0x0af1cb87, - 0x0cf1cb87, - 0x0ef1cb87, - 0x00f5cb87, - 0x02f5cb87, - 0x04f5cb87, - 0x06f5cb87, - 0x08f5cb87, - 0x0af5cb87, - 0x0cf5cb87, - 0x0ef5cb87, - 0x00f9cb87, - 0x02f9cb87, - 0x04f9cb87, - 0x06f9cb87, - 0x08f9cb87, -} - -var kZeroRepsDepth = [numCommandSymbols]uint32{ - 0, - 4, - 8, - 7, - 7, - 7, - 7, - 7, - 7, - 7, - 7, - 11, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, -} - -var kNonZeroRepsBits = [numCommandSymbols]uint64{ - 0x0000000b, - 0x0000001b, - 0x0000002b, - 0x0000003b, - 0x000002cb, - 0x000006cb, - 0x00000acb, - 0x00000ecb, - 0x000002db, - 0x000006db, - 0x00000adb, - 0x00000edb, - 0x000002eb, - 0x000006eb, - 0x00000aeb, - 0x00000eeb, - 0x000002fb, - 0x000006fb, - 0x00000afb, - 0x00000efb, - 0x0000b2cb, - 0x0001b2cb, - 0x0002b2cb, - 0x0003b2cb, - 0x0000b6cb, - 0x0001b6cb, - 0x0002b6cb, - 0x0003b6cb, - 0x0000bacb, - 0x0001bacb, - 0x0002bacb, - 0x0003bacb, - 0x0000becb, - 0x0001becb, - 0x0002becb, - 0x0003becb, - 0x0000b2db, - 0x0001b2db, - 0x0002b2db, - 0x0003b2db, - 0x0000b6db, - 0x0001b6db, - 0x0002b6db, - 0x0003b6db, - 0x0000badb, - 0x0001badb, - 0x0002badb, - 0x0003badb, - 0x0000bedb, - 0x0001bedb, - 0x0002bedb, - 0x0003bedb, - 0x0000b2eb, - 0x0001b2eb, - 0x0002b2eb, - 0x0003b2eb, - 0x0000b6eb, - 0x0001b6eb, - 0x0002b6eb, - 0x0003b6eb, - 0x0000baeb, - 0x0001baeb, - 0x0002baeb, - 0x0003baeb, - 0x0000beeb, - 0x0001beeb, - 0x0002beeb, - 0x0003beeb, - 0x0000b2fb, - 0x0001b2fb, - 0x0002b2fb, - 0x0003b2fb, - 0x0000b6fb, - 0x0001b6fb, - 0x0002b6fb, - 0x0003b6fb, - 0x0000bafb, - 0x0001bafb, - 0x0002bafb, - 0x0003bafb, - 0x0000befb, - 0x0001befb, - 0x0002befb, - 0x0003befb, - 0x002cb2cb, - 0x006cb2cb, - 0x00acb2cb, - 0x00ecb2cb, - 0x002db2cb, - 0x006db2cb, - 0x00adb2cb, - 0x00edb2cb, - 0x002eb2cb, - 0x006eb2cb, - 0x00aeb2cb, - 0x00eeb2cb, - 0x002fb2cb, - 0x006fb2cb, - 0x00afb2cb, - 0x00efb2cb, - 0x002cb6cb, - 0x006cb6cb, - 0x00acb6cb, - 0x00ecb6cb, - 0x002db6cb, - 0x006db6cb, - 0x00adb6cb, - 0x00edb6cb, - 0x002eb6cb, - 0x006eb6cb, - 0x00aeb6cb, - 0x00eeb6cb, - 0x002fb6cb, - 0x006fb6cb, - 0x00afb6cb, - 0x00efb6cb, - 0x002cbacb, - 0x006cbacb, - 0x00acbacb, - 0x00ecbacb, - 0x002dbacb, - 0x006dbacb, - 0x00adbacb, - 0x00edbacb, - 0x002ebacb, - 0x006ebacb, - 0x00aebacb, - 0x00eebacb, - 0x002fbacb, - 0x006fbacb, - 0x00afbacb, - 0x00efbacb, - 0x002cbecb, - 0x006cbecb, - 0x00acbecb, - 0x00ecbecb, - 0x002dbecb, - 0x006dbecb, - 0x00adbecb, - 0x00edbecb, - 0x002ebecb, - 0x006ebecb, - 0x00aebecb, - 0x00eebecb, - 0x002fbecb, - 0x006fbecb, - 0x00afbecb, - 0x00efbecb, - 0x002cb2db, - 0x006cb2db, - 0x00acb2db, - 0x00ecb2db, - 0x002db2db, - 0x006db2db, - 0x00adb2db, - 0x00edb2db, - 0x002eb2db, - 0x006eb2db, - 0x00aeb2db, - 0x00eeb2db, - 0x002fb2db, - 0x006fb2db, - 0x00afb2db, - 0x00efb2db, - 0x002cb6db, - 0x006cb6db, - 0x00acb6db, - 0x00ecb6db, - 0x002db6db, - 0x006db6db, - 0x00adb6db, - 0x00edb6db, - 0x002eb6db, - 0x006eb6db, - 0x00aeb6db, - 0x00eeb6db, - 0x002fb6db, - 0x006fb6db, - 0x00afb6db, - 0x00efb6db, - 0x002cbadb, - 0x006cbadb, - 0x00acbadb, - 0x00ecbadb, - 0x002dbadb, - 0x006dbadb, - 0x00adbadb, - 0x00edbadb, - 0x002ebadb, - 0x006ebadb, - 0x00aebadb, - 0x00eebadb, - 0x002fbadb, - 0x006fbadb, - 0x00afbadb, - 0x00efbadb, - 0x002cbedb, - 0x006cbedb, - 0x00acbedb, - 0x00ecbedb, - 0x002dbedb, - 0x006dbedb, - 0x00adbedb, - 0x00edbedb, - 0x002ebedb, - 0x006ebedb, - 0x00aebedb, - 0x00eebedb, - 0x002fbedb, - 0x006fbedb, - 0x00afbedb, - 0x00efbedb, - 0x002cb2eb, - 0x006cb2eb, - 0x00acb2eb, - 0x00ecb2eb, - 0x002db2eb, - 0x006db2eb, - 0x00adb2eb, - 0x00edb2eb, - 0x002eb2eb, - 0x006eb2eb, - 0x00aeb2eb, - 0x00eeb2eb, - 0x002fb2eb, - 0x006fb2eb, - 0x00afb2eb, - 0x00efb2eb, - 0x002cb6eb, - 0x006cb6eb, - 0x00acb6eb, - 0x00ecb6eb, - 0x002db6eb, - 0x006db6eb, - 0x00adb6eb, - 0x00edb6eb, - 0x002eb6eb, - 0x006eb6eb, - 0x00aeb6eb, - 0x00eeb6eb, - 0x002fb6eb, - 0x006fb6eb, - 0x00afb6eb, - 0x00efb6eb, - 0x002cbaeb, - 0x006cbaeb, - 0x00acbaeb, - 0x00ecbaeb, - 0x002dbaeb, - 0x006dbaeb, - 0x00adbaeb, - 0x00edbaeb, - 0x002ebaeb, - 0x006ebaeb, - 0x00aebaeb, - 0x00eebaeb, - 0x002fbaeb, - 0x006fbaeb, - 0x00afbaeb, - 0x00efbaeb, - 0x002cbeeb, - 0x006cbeeb, - 0x00acbeeb, - 0x00ecbeeb, - 0x002dbeeb, - 0x006dbeeb, - 0x00adbeeb, - 0x00edbeeb, - 0x002ebeeb, - 0x006ebeeb, - 0x00aebeeb, - 0x00eebeeb, - 0x002fbeeb, - 0x006fbeeb, - 0x00afbeeb, - 0x00efbeeb, - 0x002cb2fb, - 0x006cb2fb, - 0x00acb2fb, - 0x00ecb2fb, - 0x002db2fb, - 0x006db2fb, - 0x00adb2fb, - 0x00edb2fb, - 0x002eb2fb, - 0x006eb2fb, - 0x00aeb2fb, - 0x00eeb2fb, - 0x002fb2fb, - 0x006fb2fb, - 0x00afb2fb, - 0x00efb2fb, - 0x002cb6fb, - 0x006cb6fb, - 0x00acb6fb, - 0x00ecb6fb, - 0x002db6fb, - 0x006db6fb, - 0x00adb6fb, - 0x00edb6fb, - 0x002eb6fb, - 0x006eb6fb, - 0x00aeb6fb, - 0x00eeb6fb, - 0x002fb6fb, - 0x006fb6fb, - 0x00afb6fb, - 0x00efb6fb, - 0x002cbafb, - 0x006cbafb, - 0x00acbafb, - 0x00ecbafb, - 0x002dbafb, - 0x006dbafb, - 0x00adbafb, - 0x00edbafb, - 0x002ebafb, - 0x006ebafb, - 0x00aebafb, - 0x00eebafb, - 0x002fbafb, - 0x006fbafb, - 0x00afbafb, - 0x00efbafb, - 0x002cbefb, - 0x006cbefb, - 0x00acbefb, - 0x00ecbefb, - 0x002dbefb, - 0x006dbefb, - 0x00adbefb, - 0x00edbefb, - 0x002ebefb, - 0x006ebefb, - 0x00aebefb, - 0x00eebefb, - 0x002fbefb, - 0x006fbefb, - 0x00afbefb, - 0x00efbefb, - 0x0b2cb2cb, - 0x1b2cb2cb, - 0x2b2cb2cb, - 0x3b2cb2cb, - 0x0b6cb2cb, - 0x1b6cb2cb, - 0x2b6cb2cb, - 0x3b6cb2cb, - 0x0bacb2cb, - 0x1bacb2cb, - 0x2bacb2cb, - 0x3bacb2cb, - 0x0becb2cb, - 0x1becb2cb, - 0x2becb2cb, - 0x3becb2cb, - 0x0b2db2cb, - 0x1b2db2cb, - 0x2b2db2cb, - 0x3b2db2cb, - 0x0b6db2cb, - 0x1b6db2cb, - 0x2b6db2cb, - 0x3b6db2cb, - 0x0badb2cb, - 0x1badb2cb, - 0x2badb2cb, - 0x3badb2cb, - 0x0bedb2cb, - 0x1bedb2cb, - 0x2bedb2cb, - 0x3bedb2cb, - 0x0b2eb2cb, - 0x1b2eb2cb, - 0x2b2eb2cb, - 0x3b2eb2cb, - 0x0b6eb2cb, - 0x1b6eb2cb, - 0x2b6eb2cb, - 0x3b6eb2cb, - 0x0baeb2cb, - 0x1baeb2cb, - 0x2baeb2cb, - 0x3baeb2cb, - 0x0beeb2cb, - 0x1beeb2cb, - 0x2beeb2cb, - 0x3beeb2cb, - 0x0b2fb2cb, - 0x1b2fb2cb, - 0x2b2fb2cb, - 0x3b2fb2cb, - 0x0b6fb2cb, - 0x1b6fb2cb, - 0x2b6fb2cb, - 0x3b6fb2cb, - 0x0bafb2cb, - 0x1bafb2cb, - 0x2bafb2cb, - 0x3bafb2cb, - 0x0befb2cb, - 0x1befb2cb, - 0x2befb2cb, - 0x3befb2cb, - 0x0b2cb6cb, - 0x1b2cb6cb, - 0x2b2cb6cb, - 0x3b2cb6cb, - 0x0b6cb6cb, - 0x1b6cb6cb, - 0x2b6cb6cb, - 0x3b6cb6cb, - 0x0bacb6cb, - 0x1bacb6cb, - 0x2bacb6cb, - 0x3bacb6cb, - 0x0becb6cb, - 0x1becb6cb, - 0x2becb6cb, - 0x3becb6cb, - 0x0b2db6cb, - 0x1b2db6cb, - 0x2b2db6cb, - 0x3b2db6cb, - 0x0b6db6cb, - 0x1b6db6cb, - 0x2b6db6cb, - 0x3b6db6cb, - 0x0badb6cb, - 0x1badb6cb, - 0x2badb6cb, - 0x3badb6cb, - 0x0bedb6cb, - 0x1bedb6cb, - 0x2bedb6cb, - 0x3bedb6cb, - 0x0b2eb6cb, - 0x1b2eb6cb, - 0x2b2eb6cb, - 0x3b2eb6cb, - 0x0b6eb6cb, - 0x1b6eb6cb, - 0x2b6eb6cb, - 0x3b6eb6cb, - 0x0baeb6cb, - 0x1baeb6cb, - 0x2baeb6cb, - 0x3baeb6cb, - 0x0beeb6cb, - 0x1beeb6cb, - 0x2beeb6cb, - 0x3beeb6cb, - 0x0b2fb6cb, - 0x1b2fb6cb, - 0x2b2fb6cb, - 0x3b2fb6cb, - 0x0b6fb6cb, - 0x1b6fb6cb, - 0x2b6fb6cb, - 0x3b6fb6cb, - 0x0bafb6cb, - 0x1bafb6cb, - 0x2bafb6cb, - 0x3bafb6cb, - 0x0befb6cb, - 0x1befb6cb, - 0x2befb6cb, - 0x3befb6cb, - 0x0b2cbacb, - 0x1b2cbacb, - 0x2b2cbacb, - 0x3b2cbacb, - 0x0b6cbacb, - 0x1b6cbacb, - 0x2b6cbacb, - 0x3b6cbacb, - 0x0bacbacb, - 0x1bacbacb, - 0x2bacbacb, - 0x3bacbacb, - 0x0becbacb, - 0x1becbacb, - 0x2becbacb, - 0x3becbacb, - 0x0b2dbacb, - 0x1b2dbacb, - 0x2b2dbacb, - 0x3b2dbacb, - 0x0b6dbacb, - 0x1b6dbacb, - 0x2b6dbacb, - 0x3b6dbacb, - 0x0badbacb, - 0x1badbacb, - 0x2badbacb, - 0x3badbacb, - 0x0bedbacb, - 0x1bedbacb, - 0x2bedbacb, - 0x3bedbacb, - 0x0b2ebacb, - 0x1b2ebacb, - 0x2b2ebacb, - 0x3b2ebacb, - 0x0b6ebacb, - 0x1b6ebacb, - 0x2b6ebacb, - 0x3b6ebacb, - 0x0baebacb, - 0x1baebacb, - 0x2baebacb, - 0x3baebacb, - 0x0beebacb, - 0x1beebacb, - 0x2beebacb, - 0x3beebacb, - 0x0b2fbacb, - 0x1b2fbacb, - 0x2b2fbacb, - 0x3b2fbacb, - 0x0b6fbacb, - 0x1b6fbacb, - 0x2b6fbacb, - 0x3b6fbacb, - 0x0bafbacb, - 0x1bafbacb, - 0x2bafbacb, - 0x3bafbacb, - 0x0befbacb, - 0x1befbacb, - 0x2befbacb, - 0x3befbacb, - 0x0b2cbecb, - 0x1b2cbecb, - 0x2b2cbecb, - 0x3b2cbecb, - 0x0b6cbecb, - 0x1b6cbecb, - 0x2b6cbecb, - 0x3b6cbecb, - 0x0bacbecb, - 0x1bacbecb, - 0x2bacbecb, - 0x3bacbecb, - 0x0becbecb, - 0x1becbecb, - 0x2becbecb, - 0x3becbecb, - 0x0b2dbecb, - 0x1b2dbecb, - 0x2b2dbecb, - 0x3b2dbecb, - 0x0b6dbecb, - 0x1b6dbecb, - 0x2b6dbecb, - 0x3b6dbecb, - 0x0badbecb, - 0x1badbecb, - 0x2badbecb, - 0x3badbecb, - 0x0bedbecb, - 0x1bedbecb, - 0x2bedbecb, - 0x3bedbecb, - 0x0b2ebecb, - 0x1b2ebecb, - 0x2b2ebecb, - 0x3b2ebecb, - 0x0b6ebecb, - 0x1b6ebecb, - 0x2b6ebecb, - 0x3b6ebecb, - 0x0baebecb, - 0x1baebecb, - 0x2baebecb, - 0x3baebecb, - 0x0beebecb, - 0x1beebecb, - 0x2beebecb, - 0x3beebecb, - 0x0b2fbecb, - 0x1b2fbecb, - 0x2b2fbecb, - 0x3b2fbecb, - 0x0b6fbecb, - 0x1b6fbecb, - 0x2b6fbecb, - 0x3b6fbecb, - 0x0bafbecb, - 0x1bafbecb, - 0x2bafbecb, - 0x3bafbecb, - 0x0befbecb, - 0x1befbecb, - 0x2befbecb, - 0x3befbecb, - 0x0b2cb2db, - 0x1b2cb2db, - 0x2b2cb2db, - 0x3b2cb2db, - 0x0b6cb2db, - 0x1b6cb2db, - 0x2b6cb2db, - 0x3b6cb2db, - 0x0bacb2db, - 0x1bacb2db, - 0x2bacb2db, - 0x3bacb2db, - 0x0becb2db, - 0x1becb2db, - 0x2becb2db, - 0x3becb2db, - 0x0b2db2db, - 0x1b2db2db, - 0x2b2db2db, - 0x3b2db2db, - 0x0b6db2db, - 0x1b6db2db, - 0x2b6db2db, - 0x3b6db2db, - 0x0badb2db, - 0x1badb2db, - 0x2badb2db, - 0x3badb2db, - 0x0bedb2db, - 0x1bedb2db, - 0x2bedb2db, - 0x3bedb2db, - 0x0b2eb2db, - 0x1b2eb2db, - 0x2b2eb2db, - 0x3b2eb2db, - 0x0b6eb2db, - 0x1b6eb2db, - 0x2b6eb2db, - 0x3b6eb2db, - 0x0baeb2db, - 0x1baeb2db, - 0x2baeb2db, - 0x3baeb2db, - 0x0beeb2db, - 0x1beeb2db, - 0x2beeb2db, - 0x3beeb2db, - 0x0b2fb2db, - 0x1b2fb2db, - 0x2b2fb2db, - 0x3b2fb2db, - 0x0b6fb2db, - 0x1b6fb2db, - 0x2b6fb2db, - 0x3b6fb2db, - 0x0bafb2db, - 0x1bafb2db, - 0x2bafb2db, - 0x3bafb2db, - 0x0befb2db, - 0x1befb2db, - 0x2befb2db, - 0x3befb2db, - 0x0b2cb6db, - 0x1b2cb6db, - 0x2b2cb6db, - 0x3b2cb6db, - 0x0b6cb6db, - 0x1b6cb6db, - 0x2b6cb6db, - 0x3b6cb6db, - 0x0bacb6db, - 0x1bacb6db, - 0x2bacb6db, - 0x3bacb6db, - 0x0becb6db, - 0x1becb6db, - 0x2becb6db, - 0x3becb6db, - 0x0b2db6db, - 0x1b2db6db, - 0x2b2db6db, - 0x3b2db6db, - 0x0b6db6db, - 0x1b6db6db, - 0x2b6db6db, - 0x3b6db6db, - 0x0badb6db, - 0x1badb6db, - 0x2badb6db, - 0x3badb6db, - 0x0bedb6db, - 0x1bedb6db, - 0x2bedb6db, - 0x3bedb6db, - 0x0b2eb6db, - 0x1b2eb6db, - 0x2b2eb6db, - 0x3b2eb6db, - 0x0b6eb6db, - 0x1b6eb6db, - 0x2b6eb6db, - 0x3b6eb6db, - 0x0baeb6db, - 0x1baeb6db, - 0x2baeb6db, - 0x3baeb6db, -} - -var kNonZeroRepsDepth = [numCommandSymbols]uint32{ - 6, - 6, - 6, - 6, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, -} - -var kStaticCommandCodeBits = [numCommandSymbols]uint16{ - 0, - 256, - 128, - 384, - 64, - 320, - 192, - 448, - 32, - 288, - 160, - 416, - 96, - 352, - 224, - 480, - 16, - 272, - 144, - 400, - 80, - 336, - 208, - 464, - 48, - 304, - 176, - 432, - 112, - 368, - 240, - 496, - 8, - 264, - 136, - 392, - 72, - 328, - 200, - 456, - 40, - 296, - 168, - 424, - 104, - 360, - 232, - 488, - 24, - 280, - 152, - 408, - 88, - 344, - 216, - 472, - 56, - 312, - 184, - 440, - 120, - 376, - 248, - 504, - 4, - 260, - 132, - 388, - 68, - 324, - 196, - 452, - 36, - 292, - 164, - 420, - 100, - 356, - 228, - 484, - 20, - 276, - 148, - 404, - 84, - 340, - 212, - 468, - 52, - 308, - 180, - 436, - 116, - 372, - 244, - 500, - 12, - 268, - 140, - 396, - 76, - 332, - 204, - 460, - 44, - 300, - 172, - 428, - 108, - 364, - 236, - 492, - 28, - 284, - 156, - 412, - 92, - 348, - 220, - 476, - 60, - 316, - 188, - 444, - 124, - 380, - 252, - 508, - 2, - 258, - 130, - 386, - 66, - 322, - 194, - 450, - 34, - 290, - 162, - 418, - 98, - 354, - 226, - 482, - 18, - 274, - 146, - 402, - 82, - 338, - 210, - 466, - 50, - 306, - 178, - 434, - 114, - 370, - 242, - 498, - 10, - 266, - 138, - 394, - 74, - 330, - 202, - 458, - 42, - 298, - 170, - 426, - 106, - 362, - 234, - 490, - 26, - 282, - 154, - 410, - 90, - 346, - 218, - 474, - 58, - 314, - 186, - 442, - 122, - 378, - 250, - 506, - 6, - 262, - 134, - 390, - 70, - 326, - 198, - 454, - 38, - 294, - 166, - 422, - 102, - 358, - 230, - 486, - 22, - 278, - 150, - 406, - 86, - 342, - 214, - 470, - 54, - 310, - 182, - 438, - 118, - 374, - 246, - 502, - 14, - 270, - 142, - 398, - 78, - 334, - 206, - 462, - 46, - 302, - 174, - 430, - 110, - 366, - 238, - 494, - 30, - 286, - 158, - 414, - 94, - 350, - 222, - 478, - 62, - 318, - 190, - 446, - 126, - 382, - 254, - 510, - 1, - 257, - 129, - 385, - 65, - 321, - 193, - 449, - 33, - 289, - 161, - 417, - 97, - 353, - 225, - 481, - 17, - 273, - 145, - 401, - 81, - 337, - 209, - 465, - 49, - 305, - 177, - 433, - 113, - 369, - 241, - 497, - 9, - 265, - 137, - 393, - 73, - 329, - 201, - 457, - 41, - 297, - 169, - 425, - 105, - 361, - 233, - 489, - 25, - 281, - 153, - 409, - 89, - 345, - 217, - 473, - 57, - 313, - 185, - 441, - 121, - 377, - 249, - 505, - 5, - 261, - 133, - 389, - 69, - 325, - 197, - 453, - 37, - 293, - 165, - 421, - 101, - 357, - 229, - 485, - 21, - 277, - 149, - 405, - 85, - 341, - 213, - 469, - 53, - 309, - 181, - 437, - 117, - 373, - 245, - 501, - 13, - 269, - 141, - 397, - 77, - 333, - 205, - 461, - 45, - 301, - 173, - 429, - 109, - 365, - 237, - 493, - 29, - 285, - 157, - 413, - 93, - 349, - 221, - 477, - 61, - 317, - 189, - 445, - 125, - 381, - 253, - 509, - 3, - 259, - 131, - 387, - 67, - 323, - 195, - 451, - 35, - 291, - 163, - 419, - 99, - 355, - 227, - 483, - 19, - 275, - 147, - 403, - 83, - 339, - 211, - 467, - 51, - 307, - 179, - 435, - 115, - 371, - 243, - 499, - 11, - 267, - 139, - 395, - 75, - 331, - 203, - 459, - 43, - 299, - 171, - 427, - 107, - 363, - 235, - 491, - 27, - 283, - 155, - 411, - 91, - 347, - 219, - 475, - 59, - 315, - 187, - 443, - 123, - 379, - 251, - 507, - 7, - 1031, - 519, - 1543, - 263, - 1287, - 775, - 1799, - 135, - 1159, - 647, - 1671, - 391, - 1415, - 903, - 1927, - 71, - 1095, - 583, - 1607, - 327, - 1351, - 839, - 1863, - 199, - 1223, - 711, - 1735, - 455, - 1479, - 967, - 1991, - 39, - 1063, - 551, - 1575, - 295, - 1319, - 807, - 1831, - 167, - 1191, - 679, - 1703, - 423, - 1447, - 935, - 1959, - 103, - 1127, - 615, - 1639, - 359, - 1383, - 871, - 1895, - 231, - 1255, - 743, - 1767, - 487, - 1511, - 999, - 2023, - 23, - 1047, - 535, - 1559, - 279, - 1303, - 791, - 1815, - 151, - 1175, - 663, - 1687, - 407, - 1431, - 919, - 1943, - 87, - 1111, - 599, - 1623, - 343, - 1367, - 855, - 1879, - 215, - 1239, - 727, - 1751, - 471, - 1495, - 983, - 2007, - 55, - 1079, - 567, - 1591, - 311, - 1335, - 823, - 1847, - 183, - 1207, - 695, - 1719, - 439, - 1463, - 951, - 1975, - 119, - 1143, - 631, - 1655, - 375, - 1399, - 887, - 1911, - 247, - 1271, - 759, - 1783, - 503, - 1527, - 1015, - 2039, - 15, - 1039, - 527, - 1551, - 271, - 1295, - 783, - 1807, - 143, - 1167, - 655, - 1679, - 399, - 1423, - 911, - 1935, - 79, - 1103, - 591, - 1615, - 335, - 1359, - 847, - 1871, - 207, - 1231, - 719, - 1743, - 463, - 1487, - 975, - 1999, - 47, - 1071, - 559, - 1583, - 303, - 1327, - 815, - 1839, - 175, - 1199, - 687, - 1711, - 431, - 1455, - 943, - 1967, - 111, - 1135, - 623, - 1647, - 367, - 1391, - 879, - 1903, - 239, - 1263, - 751, - 1775, - 495, - 1519, - 1007, - 2031, - 31, - 1055, - 543, - 1567, - 287, - 1311, - 799, - 1823, - 159, - 1183, - 671, - 1695, - 415, - 1439, - 927, - 1951, - 95, - 1119, - 607, - 1631, - 351, - 1375, - 863, - 1887, - 223, - 1247, - 735, - 1759, - 479, - 1503, - 991, - 2015, - 63, - 1087, - 575, - 1599, - 319, - 1343, - 831, - 1855, - 191, - 1215, - 703, - 1727, - 447, - 1471, - 959, - 1983, - 127, - 1151, - 639, - 1663, - 383, - 1407, - 895, - 1919, - 255, - 1279, - 767, - 1791, - 511, - 1535, - 1023, - 2047, -} - -func storeStaticCommandHuffmanTree(storage_ix *uint, storage []byte) { - writeBits(56, 0x92624416307003, storage_ix, storage) - writeBits(3, 0x00000000, storage_ix, storage) -} - -var kStaticDistanceCodeBits = [64]uint16{ - 0, - 32, - 16, - 48, - 8, - 40, - 24, - 56, - 4, - 36, - 20, - 52, - 12, - 44, - 28, - 60, - 2, - 34, - 18, - 50, - 10, - 42, - 26, - 58, - 6, - 38, - 22, - 54, - 14, - 46, - 30, - 62, - 1, - 33, - 17, - 49, - 9, - 41, - 25, - 57, - 5, - 37, - 21, - 53, - 13, - 45, - 29, - 61, - 3, - 35, - 19, - 51, - 11, - 43, - 27, - 59, - 7, - 39, - 23, - 55, - 15, - 47, - 31, - 63, -} - -func storeStaticDistanceHuffmanTree(storage_ix *uint, storage []byte) { - writeBits(28, 0x0369DC03, storage_ix, storage) -} diff --git a/vendor/github.com/andybalholm/brotli/fast_log.go b/vendor/github.com/andybalholm/brotli/fast_log.go deleted file mode 100644 index bbae3009be5..00000000000 --- a/vendor/github.com/andybalholm/brotli/fast_log.go +++ /dev/null @@ -1,296 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Utilities for fast computation of logarithms. */ - -func log2FloorNonZero(n uint) uint32 { - /* TODO: generalize and move to platform.h */ - var result uint32 = 0 - for { - n >>= 1 - if n == 0 { - break - } - result++ - } - return result -} - -/* A lookup table for small values of log2(int) to be used in entropy - computation. - - ", ".join(["%.16ff" % x for x in [0.0]+[log2(x) for x in range(1, 256)]]) */ -var kLog2Table = []float32{ - 0.0000000000000000, - 0.0000000000000000, - 1.0000000000000000, - 1.5849625007211563, - 2.0000000000000000, - 2.3219280948873622, - 2.5849625007211561, - 2.8073549220576042, - 3.0000000000000000, - 3.1699250014423126, - 3.3219280948873626, - 3.4594316186372978, - 3.5849625007211565, - 3.7004397181410922, - 3.8073549220576037, - 3.9068905956085187, - 4.0000000000000000, - 4.0874628412503400, - 4.1699250014423122, - 4.2479275134435852, - 4.3219280948873626, - 4.3923174227787607, - 4.4594316186372973, - 4.5235619560570131, - 4.5849625007211570, - 4.6438561897747244, - 4.7004397181410926, - 4.7548875021634691, - 4.8073549220576037, - 4.8579809951275728, - 4.9068905956085187, - 4.9541963103868758, - 5.0000000000000000, - 5.0443941193584534, - 5.0874628412503400, - 5.1292830169449664, - 5.1699250014423122, - 5.2094533656289501, - 5.2479275134435852, - 5.2854022188622487, - 5.3219280948873626, - 5.3575520046180838, - 5.3923174227787607, - 5.4262647547020979, - 5.4594316186372973, - 5.4918530963296748, - 5.5235619560570131, - 5.5545888516776376, - 5.5849625007211570, - 5.6147098441152083, - 5.6438561897747244, - 5.6724253419714961, - 5.7004397181410926, - 5.7279204545631996, - 5.7548875021634691, - 5.7813597135246599, - 5.8073549220576046, - 5.8328900141647422, - 5.8579809951275719, - 5.8826430493618416, - 5.9068905956085187, - 5.9307373375628867, - 5.9541963103868758, - 5.9772799234999168, - 6.0000000000000000, - 6.0223678130284544, - 6.0443941193584534, - 6.0660891904577721, - 6.0874628412503400, - 6.1085244567781700, - 6.1292830169449672, - 6.1497471195046822, - 6.1699250014423122, - 6.1898245588800176, - 6.2094533656289510, - 6.2288186904958804, - 6.2479275134435861, - 6.2667865406949019, - 6.2854022188622487, - 6.3037807481771031, - 6.3219280948873617, - 6.3398500028846252, - 6.3575520046180847, - 6.3750394313469254, - 6.3923174227787598, - 6.4093909361377026, - 6.4262647547020979, - 6.4429434958487288, - 6.4594316186372982, - 6.4757334309663976, - 6.4918530963296748, - 6.5077946401986964, - 6.5235619560570131, - 6.5391588111080319, - 6.5545888516776376, - 6.5698556083309478, - 6.5849625007211561, - 6.5999128421871278, - 6.6147098441152092, - 6.6293566200796095, - 6.6438561897747253, - 6.6582114827517955, - 6.6724253419714952, - 6.6865005271832185, - 6.7004397181410917, - 6.7142455176661224, - 6.7279204545631988, - 6.7414669864011465, - 6.7548875021634691, - 6.7681843247769260, - 6.7813597135246599, - 6.7944158663501062, - 6.8073549220576037, - 6.8201789624151887, - 6.8328900141647422, - 6.8454900509443757, - 6.8579809951275719, - 6.8703647195834048, - 6.8826430493618416, - 6.8948177633079437, - 6.9068905956085187, - 6.9188632372745955, - 6.9307373375628867, - 6.9425145053392399, - 6.9541963103868758, - 6.9657842846620879, - 6.9772799234999168, - 6.9886846867721664, - 7.0000000000000000, - 7.0112272554232540, - 7.0223678130284544, - 7.0334230015374501, - 7.0443941193584534, - 7.0552824355011898, - 7.0660891904577721, - 7.0768155970508317, - 7.0874628412503400, - 7.0980320829605272, - 7.1085244567781700, - 7.1189410727235076, - 7.1292830169449664, - 7.1395513523987937, - 7.1497471195046822, - 7.1598713367783891, - 7.1699250014423130, - 7.1799090900149345, - 7.1898245588800176, - 7.1996723448363644, - 7.2094533656289492, - 7.2191685204621621, - 7.2288186904958804, - 7.2384047393250794, - 7.2479275134435861, - 7.2573878426926521, - 7.2667865406949019, - 7.2761244052742384, - 7.2854022188622487, - 7.2946207488916270, - 7.3037807481771031, - 7.3128829552843557, - 7.3219280948873617, - 7.3309168781146177, - 7.3398500028846243, - 7.3487281542310781, - 7.3575520046180847, - 7.3663222142458151, - 7.3750394313469254, - 7.3837042924740528, - 7.3923174227787607, - 7.4008794362821844, - 7.4093909361377026, - 7.4178525148858991, - 7.4262647547020979, - 7.4346282276367255, - 7.4429434958487288, - 7.4512111118323299, - 7.4594316186372973, - 7.4676055500829976, - 7.4757334309663976, - 7.4838157772642564, - 7.4918530963296748, - 7.4998458870832057, - 7.5077946401986964, - 7.5156998382840436, - 7.5235619560570131, - 7.5313814605163119, - 7.5391588111080319, - 7.5468944598876373, - 7.5545888516776376, - 7.5622424242210728, - 7.5698556083309478, - 7.5774288280357487, - 7.5849625007211561, - 7.5924570372680806, - 7.5999128421871278, - 7.6073303137496113, - 7.6147098441152075, - 7.6220518194563764, - 7.6293566200796095, - 7.6366246205436488, - 7.6438561897747244, - 7.6510516911789290, - 7.6582114827517955, - 7.6653359171851765, - 7.6724253419714952, - 7.6794800995054464, - 7.6865005271832185, - 7.6934869574993252, - 7.7004397181410926, - 7.7073591320808825, - 7.7142455176661224, - 7.7210991887071856, - 7.7279204545631996, - 7.7347096202258392, - 7.7414669864011465, - 7.7481928495894596, - 7.7548875021634691, - 7.7615512324444795, - 7.7681843247769260, - 7.7747870596011737, - 7.7813597135246608, - 7.7879025593914317, - 7.7944158663501062, - 7.8008998999203047, - 7.8073549220576037, - 7.8137811912170374, - 7.8201789624151887, - 7.8265484872909159, - 7.8328900141647422, - 7.8392037880969445, - 7.8454900509443757, - 7.8517490414160571, - 7.8579809951275719, - 7.8641861446542798, - 7.8703647195834048, - 7.8765169465650002, - 7.8826430493618425, - 7.8887432488982601, - 7.8948177633079446, - 7.9008668079807496, - 7.9068905956085187, - 7.9128893362299619, - 7.9188632372745955, - 7.9248125036057813, - 7.9307373375628867, - 7.9366379390025719, - 7.9425145053392399, - 7.9483672315846778, - 7.9541963103868758, - 7.9600019320680806, - 7.9657842846620870, - 7.9715435539507720, - 7.9772799234999168, - 7.9829935746943104, - 7.9886846867721664, - 7.9943534368588578, -} - -/* Faster logarithm for small integers, with the property of log2(0) == 0. */ -func fastLog2(v uint) float64 { - if v < uint(len(kLog2Table)) { - return float64(kLog2Table[v]) - } - - return math.Log2(float64(v)) -} diff --git a/vendor/github.com/andybalholm/brotli/find_match_length.go b/vendor/github.com/andybalholm/brotli/find_match_length.go deleted file mode 100644 index 14d350aa59a..00000000000 --- a/vendor/github.com/andybalholm/brotli/find_match_length.go +++ /dev/null @@ -1,16 +0,0 @@ -package brotli - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function to find maximal matching prefixes of strings. */ -func findMatchLengthWithLimit(s1 []byte, s2 []byte, limit uint) uint { - var matched uint = 0 - for matched < limit && s1[matched] == s2[matched] { - matched++ - } - return matched -} diff --git a/vendor/github.com/andybalholm/brotli/go.mod b/vendor/github.com/andybalholm/brotli/go.mod deleted file mode 100644 index 8e609842f33..00000000000 --- a/vendor/github.com/andybalholm/brotli/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module github.com/andybalholm/brotli - -go 1.12 diff --git a/vendor/github.com/andybalholm/brotli/go.sum b/vendor/github.com/andybalholm/brotli/go.sum deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/vendor/github.com/andybalholm/brotli/h10.go b/vendor/github.com/andybalholm/brotli/h10.go deleted file mode 100644 index 5662fbbbb52..00000000000 --- a/vendor/github.com/andybalholm/brotli/h10.go +++ /dev/null @@ -1,287 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func (*h10) HashTypeLength() uint { - return 4 -} - -func (*h10) StoreLookahead() uint { - return 128 -} - -func hashBytesH10(data []byte) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return h >> (32 - 17) -} - -/* A (forgetful) hash table where each hash bucket contains a binary tree of - sequences whose first 4 bytes share the same hash code. - Each sequence is 128 long and is identified by its starting - position in the input data. The binary tree is sorted by the lexicographic - order of the sequences, and it is also a max-heap with respect to the - starting positions. */ -type h10 struct { - hasherCommon - window_mask_ uint - buckets_ [1 << 17]uint32 - invalid_pos_ uint32 - forest []uint32 -} - -func (h *h10) Initialize(params *encoderParams) { - h.window_mask_ = (1 << params.lgwin) - 1 - h.invalid_pos_ = uint32(0 - h.window_mask_) - var num_nodes uint = uint(1) << params.lgwin - h.forest = make([]uint32, 2*num_nodes) -} - -func (h *h10) Prepare(one_shot bool, input_size uint, data []byte) { - var invalid_pos uint32 = h.invalid_pos_ - var i uint32 - for i = 0; i < 1<<17; i++ { - h.buckets_[i] = invalid_pos - } -} - -func leftChildIndexH10(self *h10, pos uint) uint { - return 2 * (pos & self.window_mask_) -} - -func rightChildIndexH10(self *h10, pos uint) uint { - return 2*(pos&self.window_mask_) + 1 -} - -/* Stores the hash of the next 4 bytes and in a single tree-traversal, the - hash bucket's binary tree is searched for matches and is re-rooted at the - current position. - - If less than 128 data is available, the hash bucket of the - current position is searched for matches, but the state of the hash table - is not changed, since we can not know the final sorting order of the - current (incomplete) sequence. - - This function must be called with increasing cur_ix positions. */ -func storeAndFindMatchesH10(self *h10, data []byte, cur_ix uint, ring_buffer_mask uint, max_length uint, max_backward uint, best_len *uint, matches []backwardMatch) []backwardMatch { - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var max_comp_len uint = brotli_min_size_t(max_length, 128) - var should_reroot_tree bool = (max_length >= 128) - var key uint32 = hashBytesH10(data[cur_ix_masked:]) - var forest []uint32 = self.forest - var prev_ix uint = uint(self.buckets_[key]) - var node_left uint = leftChildIndexH10(self, cur_ix) - var node_right uint = rightChildIndexH10(self, cur_ix) - var best_len_left uint = 0 - var best_len_right uint = 0 - var depth_remaining uint - /* The forest index of the rightmost node of the left subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - - /* The forest index of the leftmost node of the right subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - - /* The match length of the rightmost node of the left subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - - /* The match length of the leftmost node of the right subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - if should_reroot_tree { - self.buckets_[key] = uint32(cur_ix) - } - - for depth_remaining = 64; ; depth_remaining-- { - var backward uint = cur_ix - prev_ix - var prev_ix_masked uint = prev_ix & ring_buffer_mask - if backward == 0 || backward > max_backward || depth_remaining == 0 { - if should_reroot_tree { - forest[node_left] = self.invalid_pos_ - forest[node_right] = self.invalid_pos_ - } - - break - } - { - var cur_len uint = brotli_min_size_t(best_len_left, best_len_right) - var len uint - assert(cur_len <= 128) - len = cur_len + findMatchLengthWithLimit(data[cur_ix_masked+cur_len:], data[prev_ix_masked+cur_len:], max_length-cur_len) - if matches != nil && len > *best_len { - *best_len = uint(len) - initBackwardMatch(&matches[0], backward, uint(len)) - matches = matches[1:] - } - - if len >= max_comp_len { - if should_reroot_tree { - forest[node_left] = forest[leftChildIndexH10(self, prev_ix)] - forest[node_right] = forest[rightChildIndexH10(self, prev_ix)] - } - - break - } - - if data[cur_ix_masked+len] > data[prev_ix_masked+len] { - best_len_left = uint(len) - if should_reroot_tree { - forest[node_left] = uint32(prev_ix) - } - - node_left = rightChildIndexH10(self, prev_ix) - prev_ix = uint(forest[node_left]) - } else { - best_len_right = uint(len) - if should_reroot_tree { - forest[node_right] = uint32(prev_ix) - } - - node_right = leftChildIndexH10(self, prev_ix) - prev_ix = uint(forest[node_right]) - } - } - } - - return matches -} - -/* Finds all backward matches of &data[cur_ix & ring_buffer_mask] up to the - length of max_length and stores the position cur_ix in the hash table. - - Sets *num_matches to the number of matches found, and stores the found - matches in matches[0] to matches[*num_matches - 1]. The matches will be - sorted by strictly increasing length and (non-strictly) increasing - distance. */ -func findAllMatchesH10(handle *h10, dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, cur_ix uint, max_length uint, max_backward uint, gap uint, params *encoderParams, matches []backwardMatch) uint { - var orig_matches []backwardMatch = matches - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var best_len uint = 1 - var short_match_max_backward uint - if params.quality != hqZopflificationQuality { - short_match_max_backward = 16 - } else { - short_match_max_backward = 64 - } - var stop uint = cur_ix - short_match_max_backward - var dict_matches [maxStaticDictionaryMatchLen + 1]uint32 - var i uint - if cur_ix < short_match_max_backward { - stop = 0 - } - for i = cur_ix - 1; i > stop && best_len <= 2; i-- { - var prev_ix uint = i - var backward uint = cur_ix - prev_ix - if backward > max_backward { - break - } - - prev_ix &= ring_buffer_mask - if data[cur_ix_masked] != data[prev_ix] || data[cur_ix_masked+1] != data[prev_ix+1] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len > best_len { - best_len = uint(len) - initBackwardMatch(&matches[0], backward, uint(len)) - matches = matches[1:] - } - } - } - - if best_len < max_length { - matches = storeAndFindMatchesH10(handle, data, cur_ix, ring_buffer_mask, max_length, max_backward, &best_len, matches) - } - - for i = 0; i <= maxStaticDictionaryMatchLen; i++ { - dict_matches[i] = kInvalidMatch - } - { - var minlen uint = brotli_max_size_t(4, best_len+1) - if findAllStaticDictionaryMatches(dictionary, data[cur_ix_masked:], minlen, max_length, dict_matches[0:]) { - var maxlen uint = brotli_min_size_t(maxStaticDictionaryMatchLen, max_length) - var l uint - for l = minlen; l <= maxlen; l++ { - var dict_id uint32 = dict_matches[l] - if dict_id < kInvalidMatch { - var distance uint = max_backward + gap + uint(dict_id>>5) + 1 - if distance <= params.dist.max_distance { - initDictionaryBackwardMatch(&matches[0], distance, l, uint(dict_id&31)) - matches = matches[1:] - } - } - } - } - } - - return uint(-cap(matches) + cap(orig_matches)) -} - -/* Stores the hash of the next 4 bytes and re-roots the binary tree at the - current sequence, without returning any matches. - REQUIRES: ix + 128 <= end-of-current-block */ -func (h *h10) Store(data []byte, mask uint, ix uint) { - var max_backward uint = h.window_mask_ - windowGap + 1 - /* Maximum distance is window size - 16, see section 9.1. of the spec. */ - storeAndFindMatchesH10(h, data, ix, mask, 128, max_backward, nil, nil) -} - -func (h *h10) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - var i uint = ix_start - var j uint = ix_start - if ix_start+63 <= ix_end { - i = ix_end - 63 - } - - if ix_start+512 <= i { - for ; j < i; j += 8 { - h.Store(data, mask, j) - } - } - - for ; i < ix_end; i++ { - h.Store(data, mask, i) - } -} - -func (h *h10) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { - if num_bytes >= h.HashTypeLength()-1 && position >= 128 { - var i_start uint = position - 128 + 1 - var i_end uint = brotli_min_size_t(position, i_start+num_bytes) - /* Store the last `128 - 1` positions in the hasher. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - - var i uint - for i = i_start; i < i_end; i++ { - /* Maximum distance is window size - 16, see section 9.1. of the spec. - Furthermore, we have to make sure that we don't look further back - from the start of the next block than the window size, otherwise we - could access already overwritten areas of the ring-buffer. */ - var max_backward uint = h.window_mask_ - brotli_max_size_t(windowGap-1, position-i) - - /* We know that i + 128 <= position + num_bytes, i.e. the - end of the current block and that we have at least - 128 tail in the ring-buffer. */ - storeAndFindMatchesH10(h, ringbuffer, i, ringbuffer_mask, 128, max_backward, nil, nil) - } - } -} - -/* MAX_NUM_MATCHES == 64 + MAX_TREE_SEARCH_DEPTH */ -const maxNumMatchesH10 = 128 - -func (*h10) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - panic("unimplemented") -} - -func (*h10) PrepareDistanceCache(distance_cache []int) { - panic("unimplemented") -} diff --git a/vendor/github.com/andybalholm/brotli/h5.go b/vendor/github.com/andybalholm/brotli/h5.go deleted file mode 100644 index f391b73fdd7..00000000000 --- a/vendor/github.com/andybalholm/brotli/h5.go +++ /dev/null @@ -1,214 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - This is a hash map of fixed size (bucket_size_) to a ring buffer of - fixed size (block_size_). The ring buffer contains the last block_size_ - index positions of the given hash key in the compressed data. */ -func (*h5) HashTypeLength() uint { - return 4 -} - -func (*h5) StoreLookahead() uint { - return 4 -} - -/* HashBytes is the function that chooses the bucket to place the address in. */ -func hashBytesH5(data []byte, shift int) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint32(h >> uint(shift)) -} - -type h5 struct { - hasherCommon - bucket_size_ uint - block_size_ uint - hash_shift_ int - block_mask_ uint32 - num []uint16 - buckets []uint32 -} - -func (h *h5) Initialize(params *encoderParams) { - h.hash_shift_ = 32 - h.params.bucket_bits - h.bucket_size_ = uint(1) << uint(h.params.bucket_bits) - h.block_size_ = uint(1) << uint(h.params.block_bits) - h.block_mask_ = uint32(h.block_size_ - 1) - h.num = make([]uint16, h.bucket_size_) - h.buckets = make([]uint32, h.block_size_*h.bucket_size_) -} - -func (h *h5) Prepare(one_shot bool, input_size uint, data []byte) { - var num []uint16 = h.num - var partial_prepare_threshold uint = h.bucket_size_ >> 6 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var key uint32 = hashBytesH5(data[i:], h.hash_shift_) - num[key] = 0 - } - } else { - for i := 0; i < int(h.bucket_size_); i++ { - num[i] = 0 - } - } -} - -/* Look at 4 bytes at &data[ix & mask]. - Compute a hash from these, and store the value of ix at that position. */ -func (h *h5) Store(data []byte, mask uint, ix uint) { - var num []uint16 = h.num - var key uint32 = hashBytesH5(data[ix&mask:], h.hash_shift_) - var minor_ix uint = uint(num[key]) & uint(h.block_mask_) - var offset uint = minor_ix + uint(key<= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ringbuffer_mask, position-3) - h.Store(ringbuffer, ringbuffer_mask, position-2) - h.Store(ringbuffer, ringbuffer_mask, position-1) - } -} - -func (h *h5) PrepareDistanceCache(distance_cache []int) { - prepareDistanceCache(distance_cache, h.params.num_last_distances_to_check) -} - -/* Find a longest backward match of &data[cur_ix] up to the length of - max_length and stores the position cur_ix in the hash table. - - REQUIRES: PrepareDistanceCacheH5 must be invoked for current distance cache - values; if this method is invoked repeatedly with the same distance - cache values, it is enough to invoke PrepareDistanceCacheH5 once. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *h5) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var num []uint16 = h.num - var buckets []uint32 = h.buckets - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = out.len - var i uint - var bucket []uint32 - /* Don't accept a short copy from far away. */ - out.len = 0 - - out.len_code_delta = 0 - - /* Try last distance first. */ - for i = 0; i < uint(h.params.num_last_distances_to_check); i++ { - var backward uint = uint(distance_cache[i]) - var prev_ix uint = uint(cur_ix - backward) - if prev_ix >= cur_ix { - continue - } - - if backward > max_backward { - continue - } - - prev_ix &= ring_buffer_mask - - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 3 || (len == 2 && i < 2) { - /* Comparing for >= 2 does not change the semantics, but just saves for - a few unnecessary binary logarithms in backward reference score, - since we are not interested in such short matches. */ - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - if i != 0 { - score -= backwardReferencePenaltyUsingLastDistance(i) - } - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - } - { - var key uint32 = hashBytesH5(data[cur_ix_masked:], h.hash_shift_) - bucket = buckets[key< h.block_size_ { - down = uint(num[key]) - h.block_size_ - } else { - down = 0 - } - for i = uint(num[key]); i > down; { - var prev_ix uint - i-- - prev_ix = uint(bucket[uint32(i)&h.block_mask_]) - var backward uint = cur_ix - prev_ix - if backward > max_backward { - break - } - - prev_ix &= ring_buffer_mask - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - /* Comparing for >= 3 does not change the semantics, but just saves - for a few unnecessary binary logarithms in backward reference - score, since we are not interested in such short matches. */ - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - - bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix) - num[key]++ - } - - if min_score == out.score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) - } -} diff --git a/vendor/github.com/andybalholm/brotli/h6.go b/vendor/github.com/andybalholm/brotli/h6.go deleted file mode 100644 index 80bb224aa87..00000000000 --- a/vendor/github.com/andybalholm/brotli/h6.go +++ /dev/null @@ -1,216 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - This is a hash map of fixed size (bucket_size_) to a ring buffer of - fixed size (block_size_). The ring buffer contains the last block_size_ - index positions of the given hash key in the compressed data. */ -func (*h6) HashTypeLength() uint { - return 8 -} - -func (*h6) StoreLookahead() uint { - return 8 -} - -/* HashBytes is the function that chooses the bucket to place the address in. */ -func hashBytesH6(data []byte, mask uint64, shift int) uint32 { - var h uint64 = (binary.LittleEndian.Uint64(data) & mask) * kHashMul64Long - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint32(h >> uint(shift)) -} - -type h6 struct { - hasherCommon - bucket_size_ uint - block_size_ uint - hash_shift_ int - hash_mask_ uint64 - block_mask_ uint32 - num []uint16 - buckets []uint32 -} - -func (h *h6) Initialize(params *encoderParams) { - h.hash_shift_ = 64 - h.params.bucket_bits - h.hash_mask_ = (^(uint64(0))) >> uint(64-8*h.params.hash_len) - h.bucket_size_ = uint(1) << uint(h.params.bucket_bits) - h.block_size_ = uint(1) << uint(h.params.block_bits) - h.block_mask_ = uint32(h.block_size_ - 1) - h.num = make([]uint16, h.bucket_size_) - h.buckets = make([]uint32, h.block_size_*h.bucket_size_) -} - -func (h *h6) Prepare(one_shot bool, input_size uint, data []byte) { - var num []uint16 = h.num - var partial_prepare_threshold uint = h.bucket_size_ >> 6 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var key uint32 = hashBytesH6(data[i:], h.hash_mask_, h.hash_shift_) - num[key] = 0 - } - } else { - for i := 0; i < int(h.bucket_size_); i++ { - num[i] = 0 - } - } -} - -/* Look at 4 bytes at &data[ix & mask]. - Compute a hash from these, and store the value of ix at that position. */ -func (h *h6) Store(data []byte, mask uint, ix uint) { - var num []uint16 = h.num - var key uint32 = hashBytesH6(data[ix&mask:], h.hash_mask_, h.hash_shift_) - var minor_ix uint = uint(num[key]) & uint(h.block_mask_) - var offset uint = minor_ix + uint(key<= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ringbuffer_mask, position-3) - h.Store(ringbuffer, ringbuffer_mask, position-2) - h.Store(ringbuffer, ringbuffer_mask, position-1) - } -} - -func (h *h6) PrepareDistanceCache(distance_cache []int) { - prepareDistanceCache(distance_cache, h.params.num_last_distances_to_check) -} - -/* Find a longest backward match of &data[cur_ix] up to the length of - max_length and stores the position cur_ix in the hash table. - - REQUIRES: PrepareDistanceCacheH6 must be invoked for current distance cache - values; if this method is invoked repeatedly with the same distance - cache values, it is enough to invoke PrepareDistanceCacheH6 once. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *h6) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var num []uint16 = h.num - var buckets []uint32 = h.buckets - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = out.len - var i uint - var bucket []uint32 - /* Don't accept a short copy from far away. */ - out.len = 0 - - out.len_code_delta = 0 - - /* Try last distance first. */ - for i = 0; i < uint(h.params.num_last_distances_to_check); i++ { - var backward uint = uint(distance_cache[i]) - var prev_ix uint = uint(cur_ix - backward) - if prev_ix >= cur_ix { - continue - } - - if backward > max_backward { - continue - } - - prev_ix &= ring_buffer_mask - - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 3 || (len == 2 && i < 2) { - /* Comparing for >= 2 does not change the semantics, but just saves for - a few unnecessary binary logarithms in backward reference score, - since we are not interested in such short matches. */ - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - if i != 0 { - score -= backwardReferencePenaltyUsingLastDistance(i) - } - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - } - { - var key uint32 = hashBytesH6(data[cur_ix_masked:], h.hash_mask_, h.hash_shift_) - bucket = buckets[key< h.block_size_ { - down = uint(num[key]) - h.block_size_ - } else { - down = 0 - } - for i = uint(num[key]); i > down; { - var prev_ix uint - i-- - prev_ix = uint(bucket[uint32(i)&h.block_mask_]) - var backward uint = cur_ix - prev_ix - if backward > max_backward { - break - } - - prev_ix &= ring_buffer_mask - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - /* Comparing for >= 3 does not change the semantics, but just saves - for a few unnecessary binary logarithms in backward reference - score, since we are not interested in such short matches. */ - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - - bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix) - num[key]++ - } - - if min_score == out.score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) - } -} diff --git a/vendor/github.com/andybalholm/brotli/hash.go b/vendor/github.com/andybalholm/brotli/hash.go deleted file mode 100644 index 003b433ea62..00000000000 --- a/vendor/github.com/andybalholm/brotli/hash.go +++ /dev/null @@ -1,344 +0,0 @@ -package brotli - -import ( - "encoding/binary" - "fmt" -) - -type hasherCommon struct { - params hasherParams - is_prepared_ bool - dict_num_lookups uint - dict_num_matches uint -} - -func (h *hasherCommon) Common() *hasherCommon { - return h -} - -type hasherHandle interface { - Common() *hasherCommon - Initialize(params *encoderParams) - Prepare(one_shot bool, input_size uint, data []byte) - StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) - HashTypeLength() uint - StoreLookahead() uint - PrepareDistanceCache(distance_cache []int) - FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) - StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) - Store(data []byte, mask uint, ix uint) -} - -type score_t uint - -const kCutoffTransformsCount uint32 = 10 - -/* 0, 12, 27, 23, 42, 63, 56, 48, 59, 64 */ -/* 0+0, 4+8, 8+19, 12+11, 16+26, 20+43, 24+32, 28+20, 32+27, 36+28 */ -const kCutoffTransforms uint64 = 0x071B520ADA2D3200 - -type hasherSearchResult struct { - len uint - distance uint - score uint - len_code_delta int -} - -/* kHashMul32 multiplier has these properties: - * The multiplier must be odd. Otherwise we may lose the highest bit. - * No long streaks of ones or zeros. - * There is no effort to ensure that it is a prime, the oddity is enough - for this use. - * The number has been tuned heuristically against compression benchmarks. */ -const kHashMul32 uint32 = 0x1E35A7BD - -const kHashMul64 uint64 = 0x1E35A7BD1E35A7BD - -const kHashMul64Long uint64 = 0x1FE35A7BD3579BD3 - -func hash14(data []byte) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return h >> (32 - 14) -} - -func prepareDistanceCache(distance_cache []int, num_distances int) { - if num_distances > 4 { - var last_distance int = distance_cache[0] - distance_cache[4] = last_distance - 1 - distance_cache[5] = last_distance + 1 - distance_cache[6] = last_distance - 2 - distance_cache[7] = last_distance + 2 - distance_cache[8] = last_distance - 3 - distance_cache[9] = last_distance + 3 - if num_distances > 10 { - var next_last_distance int = distance_cache[1] - distance_cache[10] = next_last_distance - 1 - distance_cache[11] = next_last_distance + 1 - distance_cache[12] = next_last_distance - 2 - distance_cache[13] = next_last_distance + 2 - distance_cache[14] = next_last_distance - 3 - distance_cache[15] = next_last_distance + 3 - } - } -} - -const literalByteScore = 135 - -const distanceBitPenalty = 30 - -/* Score must be positive after applying maximal penalty. */ -const scoreBase = (distanceBitPenalty * 8 * 8) - -/* Usually, we always choose the longest backward reference. This function - allows for the exception of that rule. - - If we choose a backward reference that is further away, it will - usually be coded with more bits. We approximate this by assuming - log2(distance). If the distance can be expressed in terms of the - last four distances, we use some heuristic constants to estimate - the bits cost. For the first up to four literals we use the bit - cost of the literals from the literal cost model, after that we - use the average bit cost of the cost model. - - This function is used to sometimes discard a longer backward reference - when it is not much longer and the bit cost for encoding it is more - than the saved literals. - - backward_reference_offset MUST be positive. */ -func backwardReferenceScore(copy_length uint, backward_reference_offset uint) uint { - return scoreBase + literalByteScore*uint(copy_length) - distanceBitPenalty*uint(log2FloorNonZero(backward_reference_offset)) -} - -func backwardReferenceScoreUsingLastDistance(copy_length uint) uint { - return literalByteScore*uint(copy_length) + scoreBase + 15 -} - -func backwardReferencePenaltyUsingLastDistance(distance_short_code uint) uint { - return uint(39) + ((0x1CA10 >> (distance_short_code & 0xE)) & 0xE) -} - -func testStaticDictionaryItem(dictionary *encoderDictionary, item uint, data []byte, max_length uint, max_backward uint, max_distance uint, out *hasherSearchResult) bool { - var len uint - var word_idx uint - var offset uint - var matchlen uint - var backward uint - var score uint - len = item & 0x1F - word_idx = item >> 5 - offset = uint(dictionary.words.offsets_by_length[len]) + len*word_idx - if len > max_length { - return false - } - - matchlen = findMatchLengthWithLimit(data, dictionary.words.data[offset:], uint(len)) - if matchlen+uint(dictionary.cutoffTransformsCount) <= len || matchlen == 0 { - return false - } - { - var cut uint = len - matchlen - var transform_id uint = (cut << 2) + uint((dictionary.cutoffTransforms>>(cut*6))&0x3F) - backward = max_backward + 1 + word_idx + (transform_id << dictionary.words.size_bits_by_length[len]) - } - - if backward > max_distance { - return false - } - - score = backwardReferenceScore(matchlen, backward) - if score < out.score { - return false - } - - out.len = matchlen - out.len_code_delta = int(len) - int(matchlen) - out.distance = backward - out.score = score - return true -} - -func searchInStaticDictionary(dictionary *encoderDictionary, handle hasherHandle, data []byte, max_length uint, max_backward uint, max_distance uint, out *hasherSearchResult, shallow bool) { - var key uint - var i uint - var self *hasherCommon = handle.Common() - if self.dict_num_matches < self.dict_num_lookups>>7 { - return - } - - key = uint(hash14(data) << 1) - for i = 0; ; (func() { i++; key++ })() { - var tmp uint - if shallow { - tmp = 1 - } else { - tmp = 2 - } - if i >= tmp { - break - } - var item uint = uint(dictionary.hash_table[key]) - self.dict_num_lookups++ - if item != 0 { - var item_matches bool = testStaticDictionaryItem(dictionary, item, data, max_length, max_backward, max_distance, out) - if item_matches { - self.dict_num_matches++ - } - } - } -} - -type backwardMatch struct { - distance uint32 - length_and_code uint32 -} - -func initBackwardMatch(self *backwardMatch, dist uint, len uint) { - self.distance = uint32(dist) - self.length_and_code = uint32(len << 5) -} - -func initDictionaryBackwardMatch(self *backwardMatch, dist uint, len uint, len_code uint) { - self.distance = uint32(dist) - var tmp uint - if len == len_code { - tmp = 0 - } else { - tmp = len_code - } - self.length_and_code = uint32(len<<5 | tmp) -} - -func backwardMatchLength(self *backwardMatch) uint { - return uint(self.length_and_code >> 5) -} - -func backwardMatchLengthCode(self *backwardMatch) uint { - var code uint = uint(self.length_and_code) & 31 - if code != 0 { - return code - } else { - return backwardMatchLength(self) - } -} - -func hasherReset(handle hasherHandle) { - if handle == nil { - return - } - handle.Common().is_prepared_ = false -} - -func newHasher(typ int) hasherHandle { - switch typ { - case 2: - return &hashLongestMatchQuickly{ - bucketBits: 16, - bucketSweep: 1, - hashLen: 5, - useDictionary: true, - } - case 3: - return &hashLongestMatchQuickly{ - bucketBits: 16, - bucketSweep: 2, - hashLen: 5, - useDictionary: false, - } - case 4: - return &hashLongestMatchQuickly{ - bucketBits: 17, - bucketSweep: 4, - hashLen: 5, - useDictionary: true, - } - case 5: - return new(h5) - case 6: - return new(h6) - case 10: - return new(h10) - case 35: - return &hashComposite{ - ha: newHasher(3), - hb: &hashRolling{jump: 4}, - } - case 40: - return &hashForgetfulChain{ - bucketBits: 15, - numBanks: 1, - bankBits: 16, - numLastDistancesToCheck: 4, - } - case 41: - return &hashForgetfulChain{ - bucketBits: 15, - numBanks: 1, - bankBits: 16, - numLastDistancesToCheck: 10, - } - case 42: - return &hashForgetfulChain{ - bucketBits: 15, - numBanks: 512, - bankBits: 9, - numLastDistancesToCheck: 16, - } - case 54: - return &hashLongestMatchQuickly{ - bucketBits: 20, - bucketSweep: 4, - hashLen: 7, - useDictionary: false, - } - case 55: - return &hashComposite{ - ha: newHasher(54), - hb: &hashRolling{jump: 4}, - } - case 65: - return &hashComposite{ - ha: newHasher(6), - hb: &hashRolling{jump: 1}, - } - } - - panic(fmt.Sprintf("unknown hasher type: %d", typ)) -} - -func hasherSetup(handle *hasherHandle, params *encoderParams, data []byte, position uint, input_size uint, is_last bool) { - var self hasherHandle = nil - var common *hasherCommon = nil - var one_shot bool = (position == 0 && is_last) - if *handle == nil { - chooseHasher(params, ¶ms.hasher) - self = newHasher(params.hasher.type_) - - *handle = self - common = self.Common() - common.params = params.hasher - self.Initialize(params) - } - - self = *handle - common = self.Common() - if !common.is_prepared_ { - self.Prepare(one_shot, input_size, data) - - if position == 0 { - common.dict_num_lookups = 0 - common.dict_num_matches = 0 - } - - common.is_prepared_ = true - } -} - -func initOrStitchToPreviousBlock(handle *hasherHandle, data []byte, mask uint, params *encoderParams, position uint, input_size uint, is_last bool) { - var self hasherHandle - hasherSetup(handle, params, data, position, input_size, is_last) - self = *handle - self.StitchToPreviousBlock(input_size, position, data, mask) -} diff --git a/vendor/github.com/andybalholm/brotli/hash_composite.go b/vendor/github.com/andybalholm/brotli/hash_composite.go deleted file mode 100644 index a65fe2e6a9a..00000000000 --- a/vendor/github.com/andybalholm/brotli/hash_composite.go +++ /dev/null @@ -1,93 +0,0 @@ -package brotli - -/* Copyright 2018 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func (h *hashComposite) HashTypeLength() uint { - var a uint = h.ha.HashTypeLength() - var b uint = h.hb.HashTypeLength() - if a > b { - return a - } else { - return b - } -} - -func (h *hashComposite) StoreLookahead() uint { - var a uint = h.ha.StoreLookahead() - var b uint = h.hb.StoreLookahead() - if a > b { - return a - } else { - return b - } -} - -/* Composite hasher: This hasher allows to combine two other hashers, HASHER_A - and HASHER_B. */ -type hashComposite struct { - hasherCommon - ha hasherHandle - hb hasherHandle - params *encoderParams -} - -func (h *hashComposite) Initialize(params *encoderParams) { - h.params = params -} - -/* TODO: Initialize of the hashers is defered to Prepare (and params - remembered here) because we don't get the one_shot and input_size params - here that are needed to know the memory size of them. Instead provide - those params to all hashers InitializehashComposite */ -func (h *hashComposite) Prepare(one_shot bool, input_size uint, data []byte) { - if h.ha == nil { - var common_a *hasherCommon - var common_b *hasherCommon - - common_a = h.ha.Common() - common_a.params = h.params.hasher - common_a.is_prepared_ = false - common_a.dict_num_lookups = 0 - common_a.dict_num_matches = 0 - h.ha.Initialize(h.params) - - common_b = h.hb.Common() - common_b.params = h.params.hasher - common_b.is_prepared_ = false - common_b.dict_num_lookups = 0 - common_b.dict_num_matches = 0 - h.hb.Initialize(h.params) - } - - h.ha.Prepare(one_shot, input_size, data) - h.hb.Prepare(one_shot, input_size, data) -} - -func (h *hashComposite) Store(data []byte, mask uint, ix uint) { - h.ha.Store(data, mask, ix) - h.hb.Store(data, mask, ix) -} - -func (h *hashComposite) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - h.ha.StoreRange(data, mask, ix_start, ix_end) - h.hb.StoreRange(data, mask, ix_start, ix_end) -} - -func (h *hashComposite) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { - h.ha.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask) - h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask) -} - -func (h *hashComposite) PrepareDistanceCache(distance_cache []int) { - h.ha.PrepareDistanceCache(distance_cache) - h.hb.PrepareDistanceCache(distance_cache) -} - -func (h *hashComposite) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - h.ha.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out) - h.hb.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out) -} diff --git a/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go b/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go deleted file mode 100644 index 3364c44bd52..00000000000 --- a/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go +++ /dev/null @@ -1,253 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func (*hashForgetfulChain) HashTypeLength() uint { - return 4 -} - -func (*hashForgetfulChain) StoreLookahead() uint { - return 4 -} - -/* HashBytes is the function that chooses the bucket to place the address in.*/ -func (h *hashForgetfulChain) HashBytes(data []byte) uint { - var hash uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint(hash >> (32 - h.bucketBits)) -} - -type slot struct { - delta uint16 - next uint16 -} - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - Hashes are stored in chains which are bucketed to groups. Group of chains - share a storage "bank". When more than "bank size" chain nodes are added, - oldest nodes are replaced; this way several chains may share a tail. */ -type hashForgetfulChain struct { - hasherCommon - - bucketBits uint - numBanks uint - bankBits uint - numLastDistancesToCheck int - - addr []uint32 - head []uint16 - tiny_hash [65536]byte - banks [][]slot - free_slot_idx []uint16 - max_hops uint -} - -func (h *hashForgetfulChain) Initialize(params *encoderParams) { - var q uint - if params.quality > 6 { - q = 7 - } else { - q = 8 - } - h.max_hops = q << uint(params.quality-4) - - bankSize := 1 << h.bankBits - bucketSize := 1 << h.bucketBits - - h.addr = make([]uint32, bucketSize) - h.head = make([]uint16, bucketSize) - h.banks = make([][]slot, h.numBanks) - for i := range h.banks { - h.banks[i] = make([]slot, bankSize) - } - h.free_slot_idx = make([]uint16, h.numBanks) -} - -func (h *hashForgetfulChain) Prepare(one_shot bool, input_size uint, data []byte) { - var partial_prepare_threshold uint = (1 << h.bucketBits) >> 6 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var bucket uint = h.HashBytes(data[i:]) - - /* See InitEmpty comment. */ - h.addr[bucket] = 0xCCCCCCCC - - h.head[bucket] = 0xCCCC - } - } else { - /* Fill |addr| array with 0xCCCCCCCC value. Because of wrapping, position - processed by hasher never reaches 3GB + 64M; this makes all new chains - to be terminated after the first node. */ - for i := range h.addr { - h.addr[i] = 0xCCCCCCCC - } - - for i := range h.head { - h.head[i] = 0 - } - } - - h.tiny_hash = [65536]byte{} - for i := range h.free_slot_idx { - h.free_slot_idx[i] = 0 - } -} - -/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend - node to corresponding chain; also update tiny_hash for current position. */ -func (h *hashForgetfulChain) Store(data []byte, mask uint, ix uint) { - var key uint = h.HashBytes(data[ix&mask:]) - var bank uint = key & (h.numBanks - 1) - var idx uint - idx = uint(h.free_slot_idx[bank]) & ((1 << h.bankBits) - 1) - h.free_slot_idx[bank]++ - var delta uint = ix - uint(h.addr[key]) - h.tiny_hash[uint16(ix)] = byte(key) - if delta > 0xFFFF { - delta = 0xFFFF - } - h.banks[bank][idx].delta = uint16(delta) - h.banks[bank][idx].next = h.head[key] - h.addr[key] = uint32(ix) - h.head[key] = uint16(idx) -} - -func (h *hashForgetfulChain) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - var i uint - for i = ix_start; i < ix_end; i++ { - h.Store(data, mask, i) - } -} - -func (h *hashForgetfulChain) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { - if num_bytes >= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ring_buffer_mask, position-3) - h.Store(ringbuffer, ring_buffer_mask, position-2) - h.Store(ringbuffer, ring_buffer_mask, position-1) - } -} - -func (h *hashForgetfulChain) PrepareDistanceCache(distance_cache []int) { - prepareDistanceCache(distance_cache, h.numLastDistancesToCheck) -} - -/* Find a longest backward match of &data[cur_ix] up to the length of - max_length and stores the position cur_ix in the hash table. - - REQUIRES: PrepareDistanceCachehashForgetfulChain must be invoked for current distance cache - values; if this method is invoked repeatedly with the same distance - cache values, it is enough to invoke PrepareDistanceCachehashForgetfulChain once. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *hashForgetfulChain) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = out.len - var key uint = h.HashBytes(data[cur_ix_masked:]) - var tiny_hash byte = byte(key) - /* Don't accept a short copy from far away. */ - out.len = 0 - - out.len_code_delta = 0 - - /* Try last distance first. */ - for i := 0; i < h.numLastDistancesToCheck; i++ { - var backward uint = uint(distance_cache[i]) - var prev_ix uint = (cur_ix - backward) - - /* For distance code 0 we want to consider 2-byte matches. */ - if i > 0 && h.tiny_hash[uint16(prev_ix)] != tiny_hash { - continue - } - if prev_ix >= cur_ix || backward > max_backward { - continue - } - - prev_ix &= ring_buffer_mask - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 2 { - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - if i != 0 { - score -= backwardReferencePenaltyUsingLastDistance(uint(i)) - } - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - } - { - var bank uint = key & (h.numBanks - 1) - var backward uint = 0 - var hops uint = h.max_hops - var delta uint = cur_ix - uint(h.addr[key]) - var slot uint = uint(h.head[key]) - for { - tmp6 := hops - hops-- - if tmp6 == 0 { - break - } - var prev_ix uint - var last uint = slot - backward += delta - if backward > max_backward { - break - } - prev_ix = (cur_ix - backward) & ring_buffer_mask - slot = uint(h.banks[bank][last].next) - delta = uint(h.banks[bank][last].delta) - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - /* Comparing for >= 3 does not change the semantics, but just saves - for a few unnecessary binary logarithms in backward reference - score, since we are not interested in such short matches. */ - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - - h.Store(data, ring_buffer_mask, cur_ix) - } - - if out.score == min_score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) - } -} diff --git a/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go b/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go deleted file mode 100644 index 9375dc15539..00000000000 --- a/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go +++ /dev/null @@ -1,214 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression - a little faster (0.5% - 1%) and it compresses 0.15% better on small text - and HTML inputs. */ - -func (*hashLongestMatchQuickly) HashTypeLength() uint { - return 8 -} - -func (*hashLongestMatchQuickly) StoreLookahead() uint { - return 8 -} - -/* HashBytes is the function that chooses the bucket to place - the address in. The HashLongestMatch and hashLongestMatchQuickly - classes have separate, different implementations of hashing. */ -func (h *hashLongestMatchQuickly) HashBytes(data []byte) uint32 { - var hash uint64 = ((binary.LittleEndian.Uint64(data) << (64 - 8*h.hashLen)) * kHashMul64) - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint32(hash >> (64 - h.bucketBits)) -} - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - This is a hash map of fixed size (1 << 16). Starting from the - given index, 1 buckets are used to store values of a key. */ -type hashLongestMatchQuickly struct { - hasherCommon - - bucketBits uint - bucketSweep int - hashLen uint - useDictionary bool - - buckets []uint32 -} - -func (h *hashLongestMatchQuickly) Initialize(params *encoderParams) { - h.buckets = make([]uint32, 1<> 7 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var key uint32 = h.HashBytes(data[i:]) - for j := 0; j < h.bucketSweep; j++ { - h.buckets[key+uint32(j)] = 0 - } - } - } else { - /* It is not strictly necessary to fill this buffer here, but - not filling will make the results of the compression stochastic - (but correct). This is because random data would cause the - system to find accidentally good backward references here and there. */ - for i := range h.buckets { - h.buckets[i] = 0 - } - } -} - -/* Look at 5 bytes at &data[ix & mask]. - Compute a hash from these, and store the value somewhere within - [ix .. ix+3]. */ -func (h *hashLongestMatchQuickly) Store(data []byte, mask uint, ix uint) { - var key uint32 = h.HashBytes(data[ix&mask:]) - var off uint32 = uint32(ix>>3) % uint32(h.bucketSweep) - /* Wiggle the value with the bucket sweep range. */ - h.buckets[key+off] = uint32(ix) -} - -func (h *hashLongestMatchQuickly) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - var i uint - for i = ix_start; i < ix_end; i++ { - h.Store(data, mask, i) - } -} - -func (h *hashLongestMatchQuickly) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { - if num_bytes >= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ringbuffer_mask, position-3) - h.Store(ringbuffer, ringbuffer_mask, position-2) - h.Store(ringbuffer, ringbuffer_mask, position-1) - } -} - -func (*hashLongestMatchQuickly) PrepareDistanceCache(distance_cache []int) { -} - -/* Find a longest backward match of &data[cur_ix & ring_buffer_mask] - up to the length of max_length and stores the position cur_ix in the - hash table. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *hashLongestMatchQuickly) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var best_len_in uint = out.len - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var key uint32 = h.HashBytes(data[cur_ix_masked:]) - var compare_char int = int(data[cur_ix_masked+best_len_in]) - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = best_len_in - var cached_backward uint = uint(distance_cache[0]) - var prev_ix uint = cur_ix - cached_backward - var bucket []uint32 - out.len_code_delta = 0 - if prev_ix < cur_ix { - prev_ix &= uint(uint32(ring_buffer_mask)) - if compare_char == int(data[prev_ix+best_len]) { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = uint(len) - out.distance = cached_backward - out.score = best_score - compare_char = int(data[cur_ix_masked+best_len]) - if h.bucketSweep == 1 { - h.buckets[key] = uint32(cur_ix) - return - } - } - } - } - } - - if h.bucketSweep == 1 { - var backward uint - var len uint - - /* Only one to look for, don't bother to prepare for a loop. */ - prev_ix = uint(h.buckets[key]) - - h.buckets[key] = uint32(cur_ix) - backward = cur_ix - prev_ix - prev_ix &= uint(uint32(ring_buffer_mask)) - if compare_char != int(data[prev_ix+best_len_in]) { - return - } - - if backward == 0 || backward > max_backward { - return - } - - len = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - out.len = uint(len) - out.distance = backward - out.score = score - return - } - } - } else { - bucket = h.buckets[key:] - var i int - prev_ix = uint(bucket[0]) - bucket = bucket[1:] - for i = 0; i < h.bucketSweep; (func() { i++; tmp3 := bucket; bucket = bucket[1:]; prev_ix = uint(tmp3[0]) })() { - var backward uint = cur_ix - prev_ix - var len uint - prev_ix &= uint(uint32(ring_buffer_mask)) - if compare_char != int(data[prev_ix+best_len]) { - continue - } - - if backward == 0 || backward > max_backward { - continue - } - - len = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = score - compare_char = int(data[cur_ix_masked+best_len]) - } - } - } - } - - if h.useDictionary && min_score == out.score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, true) - } - - h.buckets[key+uint32((cur_ix>>3)%uint(h.bucketSweep))] = uint32(cur_ix) -} diff --git a/vendor/github.com/andybalholm/brotli/hash_rolling.go b/vendor/github.com/andybalholm/brotli/hash_rolling.go deleted file mode 100644 index ad655a0a5b8..00000000000 --- a/vendor/github.com/andybalholm/brotli/hash_rolling.go +++ /dev/null @@ -1,169 +0,0 @@ -package brotli - -/* Copyright 2018 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* NOTE: this hasher does not search in the dictionary. It is used as - backup-hasher, the main hasher already searches in it. */ - -const kRollingHashMul32 uint32 = 69069 - -const kInvalidPosHashRolling uint32 = 0xffffffff - -/* This hasher uses a longer forward length, but returning a higher value here - will hurt compression by the main hasher when combined with a composite - hasher. The hasher tests for forward itself instead. */ -func (*hashRolling) HashTypeLength() uint { - return 4 -} - -func (*hashRolling) StoreLookahead() uint { - return 4 -} - -/* Computes a code from a single byte. A lookup table of 256 values could be - used, but simply adding 1 works about as good. */ -func (*hashRolling) HashByte(b byte) uint32 { - return uint32(b) + 1 -} - -func (h *hashRolling) HashRollingFunctionInitial(state uint32, add byte, factor uint32) uint32 { - return uint32(factor*state + h.HashByte(add)) -} - -func (h *hashRolling) HashRollingFunction(state uint32, add byte, rem byte, factor uint32, factor_remove uint32) uint32 { - return uint32(factor*state + h.HashByte(add) - factor_remove*h.HashByte(rem)) -} - -/* Rolling hash for long distance long string matches. Stores one position - per bucket, bucket key is computed over a long region. */ -type hashRolling struct { - hasherCommon - - jump int - - state uint32 - table []uint32 - next_ix uint - chunk_len uint32 - factor uint32 - factor_remove uint32 -} - -func (h *hashRolling) Initialize(params *encoderParams) { - h.state = 0 - h.next_ix = 0 - - h.factor = kRollingHashMul32 - - /* Compute the factor of the oldest byte to remove: factor**steps modulo - 0xffffffff (the multiplications rely on 32-bit overflow) */ - h.factor_remove = 1 - - for i := 0; i < 32; i += h.jump { - h.factor_remove *= h.factor - } - - h.table = make([]uint32, 16777216) - for i := 0; i < 16777216; i++ { - h.table[i] = kInvalidPosHashRolling - } -} - -func (h *hashRolling) Prepare(one_shot bool, input_size uint, data []byte) { - /* Too small size, cannot use this hasher. */ - if input_size < 32 { - return - } - h.state = 0 - for i := 0; i < 32; i += h.jump { - h.state = h.HashRollingFunctionInitial(h.state, data[i], h.factor) - } -} - -func (*hashRolling) Store(data []byte, mask uint, ix uint) { -} - -func (*hashRolling) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { -} - -func (h *hashRolling) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { - var position_masked uint - /* In this case we must re-initialize the hasher from scratch from the - current position. */ - - var available uint = num_bytes - if position&uint(h.jump-1) != 0 { - var diff uint = uint(h.jump) - (position & uint(h.jump-1)) - if diff > available { - available = 0 - } else { - available = available - diff - } - position += diff - } - - position_masked = position & ring_buffer_mask - - /* wrapping around ringbuffer not handled. */ - if available > ring_buffer_mask-position_masked { - available = ring_buffer_mask - position_masked - } - - h.Prepare(false, available, ringbuffer[position&ring_buffer_mask:]) - h.next_ix = position -} - -func (*hashRolling) PrepareDistanceCache(distance_cache []int) { -} - -func (h *hashRolling) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var pos uint = h.next_ix - - if cur_ix&uint(h.jump-1) != 0 { - return - } - - /* Not enough lookahead */ - if max_length < 32 { - return - } - - for pos = h.next_ix; pos <= cur_ix; pos += uint(h.jump) { - var code uint32 = h.state & ((16777216 * 64) - 1) - var rem byte = data[pos&ring_buffer_mask] - var add byte = data[(pos+32)&ring_buffer_mask] - var found_ix uint = uint(kInvalidPosHashRolling) - - h.state = h.HashRollingFunction(h.state, add, rem, h.factor, h.factor_remove) - - if code < 16777216 { - found_ix = uint(h.table[code]) - h.table[code] = uint32(pos) - if pos == cur_ix && uint32(found_ix) != kInvalidPosHashRolling { - /* The cast to 32-bit makes backward distances up to 4GB work even - if cur_ix is above 4GB, despite using 32-bit values in the table. */ - var backward uint = uint(uint32(cur_ix - found_ix)) - if backward <= max_backward { - var found_ix_masked uint = found_ix & ring_buffer_mask - var len uint = findMatchLengthWithLimit(data[found_ix_masked:], data[cur_ix_masked:], max_length) - if len >= 4 && len > out.len { - var score uint = backwardReferenceScore(uint(len), backward) - if score > out.score { - out.len = uint(len) - out.distance = backward - out.score = score - out.len_code_delta = 0 - } - } - } - } - } - } - - h.next_ix = cur_ix + uint(h.jump) -} diff --git a/vendor/github.com/andybalholm/brotli/histogram.go b/vendor/github.com/andybalholm/brotli/histogram.go deleted file mode 100644 index f208ff74b97..00000000000 --- a/vendor/github.com/andybalholm/brotli/histogram.go +++ /dev/null @@ -1,227 +0,0 @@ -package brotli - -import "math" - -/* The distance symbols effectively used by "Large Window Brotli" (32-bit). */ -const numHistogramDistanceSymbols = 544 - -type histogramLiteral struct { - data_ [numLiteralSymbols]uint32 - total_count_ uint - bit_cost_ float64 -} - -func histogramClearLiteral(self *histogramLiteral) { - self.data_ = [numLiteralSymbols]uint32{} - self.total_count_ = 0 - self.bit_cost_ = math.MaxFloat64 -} - -func clearHistogramsLiteral(array []histogramLiteral, length uint) { - var i uint - for i = 0; i < length; i++ { - histogramClearLiteral(&array[i:][0]) - } -} - -func histogramAddLiteral(self *histogramLiteral, val uint) { - self.data_[val]++ - self.total_count_++ -} - -func histogramAddVectorLiteral(self *histogramLiteral, p []byte, n uint) { - self.total_count_ += n - n += 1 - for { - n-- - if n == 0 { - break - } - self.data_[p[0]]++ - p = p[1:] - } -} - -func histogramAddHistogramLiteral(self *histogramLiteral, v *histogramLiteral) { - var i uint - self.total_count_ += v.total_count_ - for i = 0; i < numLiteralSymbols; i++ { - self.data_[i] += v.data_[i] - } -} - -func histogramDataSizeLiteral() uint { - return numLiteralSymbols -} - -type histogramCommand struct { - data_ [numCommandSymbols]uint32 - total_count_ uint - bit_cost_ float64 -} - -func histogramClearCommand(self *histogramCommand) { - self.data_ = [numCommandSymbols]uint32{} - self.total_count_ = 0 - self.bit_cost_ = math.MaxFloat64 -} - -func clearHistogramsCommand(array []histogramCommand, length uint) { - var i uint - for i = 0; i < length; i++ { - histogramClearCommand(&array[i:][0]) - } -} - -func histogramAddCommand(self *histogramCommand, val uint) { - self.data_[val]++ - self.total_count_++ -} - -func histogramAddVectorCommand(self *histogramCommand, p []uint16, n uint) { - self.total_count_ += n - n += 1 - for { - n-- - if n == 0 { - break - } - self.data_[p[0]]++ - p = p[1:] - } -} - -func histogramAddHistogramCommand(self *histogramCommand, v *histogramCommand) { - var i uint - self.total_count_ += v.total_count_ - for i = 0; i < numCommandSymbols; i++ { - self.data_[i] += v.data_[i] - } -} - -func histogramDataSizeCommand() uint { - return numCommandSymbols -} - -type histogramDistance struct { - data_ [numDistanceSymbols]uint32 - total_count_ uint - bit_cost_ float64 -} - -func histogramClearDistance(self *histogramDistance) { - self.data_ = [numDistanceSymbols]uint32{} - self.total_count_ = 0 - self.bit_cost_ = math.MaxFloat64 -} - -func clearHistogramsDistance(array []histogramDistance, length uint) { - var i uint - for i = 0; i < length; i++ { - histogramClearDistance(&array[i:][0]) - } -} - -func histogramAddDistance(self *histogramDistance, val uint) { - self.data_[val]++ - self.total_count_++ -} - -func histogramAddVectorDistance(self *histogramDistance, p []uint16, n uint) { - self.total_count_ += n - n += 1 - for { - n-- - if n == 0 { - break - } - self.data_[p[0]]++ - p = p[1:] - } -} - -func histogramAddHistogramDistance(self *histogramDistance, v *histogramDistance) { - var i uint - self.total_count_ += v.total_count_ - for i = 0; i < numDistanceSymbols; i++ { - self.data_[i] += v.data_[i] - } -} - -func histogramDataSizeDistance() uint { - return numDistanceSymbols -} - -type blockSplitIterator struct { - split_ *blockSplit - idx_ uint - type_ uint - length_ uint -} - -func initBlockSplitIterator(self *blockSplitIterator, split *blockSplit) { - self.split_ = split - self.idx_ = 0 - self.type_ = 0 - if split.lengths != nil { - self.length_ = uint(split.lengths[0]) - } else { - self.length_ = 0 - } -} - -func blockSplitIteratorNext(self *blockSplitIterator) { - if self.length_ == 0 { - self.idx_++ - self.type_ = uint(self.split_.types[self.idx_]) - self.length_ = uint(self.split_.lengths[self.idx_]) - } - - self.length_-- -} - -func buildHistogramsWithContext(cmds []command, num_commands uint, literal_split *blockSplit, insert_and_copy_split *blockSplit, dist_split *blockSplit, ringbuffer []byte, start_pos uint, mask uint, prev_byte byte, prev_byte2 byte, context_modes []int, literal_histograms []histogramLiteral, insert_and_copy_histograms []histogramCommand, copy_dist_histograms []histogramDistance) { - var pos uint = start_pos - var literal_it blockSplitIterator - var insert_and_copy_it blockSplitIterator - var dist_it blockSplitIterator - var i uint - - initBlockSplitIterator(&literal_it, literal_split) - initBlockSplitIterator(&insert_and_copy_it, insert_and_copy_split) - initBlockSplitIterator(&dist_it, dist_split) - for i = 0; i < num_commands; i++ { - var cmd *command = &cmds[i] - var j uint - blockSplitIteratorNext(&insert_and_copy_it) - histogramAddCommand(&insert_and_copy_histograms[insert_and_copy_it.type_], uint(cmd.cmd_prefix_)) - - /* TODO: unwrap iterator blocks. */ - for j = uint(cmd.insert_len_); j != 0; j-- { - var context uint - blockSplitIteratorNext(&literal_it) - context = literal_it.type_ - if context_modes != nil { - var lut contextLUT = getContextLUT(context_modes[context]) - context = (context << literalContextBits) + uint(getContext(prev_byte, prev_byte2, lut)) - } - - histogramAddLiteral(&literal_histograms[context], uint(ringbuffer[pos&mask])) - prev_byte2 = prev_byte - prev_byte = ringbuffer[pos&mask] - pos++ - } - - pos += uint(commandCopyLen(cmd)) - if commandCopyLen(cmd) != 0 { - prev_byte2 = ringbuffer[(pos-2)&mask] - prev_byte = ringbuffer[(pos-1)&mask] - if cmd.cmd_prefix_ >= 128 { - var context uint - blockSplitIteratorNext(&dist_it) - context = uint(uint32(dist_it.type_< bestQ && - (spec.Value == "*" || spec.Value == offer) { - bestQ = spec.Q - bestOffer = offer - } - } - } - if bestQ == 0 { - bestOffer = "" - } - return bestOffer -} - -// acceptSpec describes an Accept* header. -type acceptSpec struct { - Value string - Q float64 -} - -// parseAccept parses Accept* headers. -func parseAccept(header http.Header, key string) (specs []acceptSpec) { -loop: - for _, s := range header[key] { - for { - var spec acceptSpec - spec.Value, s = expectTokenSlash(s) - if spec.Value == "" { - continue loop - } - spec.Q = 1.0 - s = skipSpace(s) - if strings.HasPrefix(s, ";") { - s = skipSpace(s[1:]) - if !strings.HasPrefix(s, "q=") { - continue loop - } - spec.Q, s = expectQuality(s[2:]) - if spec.Q < 0.0 { - continue loop - } - } - specs = append(specs, spec) - s = skipSpace(s) - if !strings.HasPrefix(s, ",") { - continue loop - } - s = skipSpace(s[1:]) - } - } - return -} - -func skipSpace(s string) (rest string) { - i := 0 - for ; i < len(s); i++ { - if octetTypes[s[i]]&isSpace == 0 { - break - } - } - return s[i:] -} - -func expectTokenSlash(s string) (token, rest string) { - i := 0 - for ; i < len(s); i++ { - b := s[i] - if (octetTypes[b]&isToken == 0) && b != '/' { - break - } - } - return s[:i], s[i:] -} - -func expectQuality(s string) (q float64, rest string) { - switch { - case len(s) == 0: - return -1, "" - case s[0] == '0': - q = 0 - case s[0] == '1': - q = 1 - default: - return -1, "" - } - s = s[1:] - if !strings.HasPrefix(s, ".") { - return q, s - } - s = s[1:] - i := 0 - n := 0 - d := 1 - for ; i < len(s); i++ { - b := s[i] - if b < '0' || b > '9' { - break - } - n = n*10 + int(b) - '0' - d *= 10 - } - return q + float64(n)/float64(d), s[i:] -} - -// Octet types from RFC 2616. -var octetTypes [256]octetType - -type octetType byte - -const ( - isToken octetType = 1 << iota - isSpace -) - -func init() { - // OCTET = - // CHAR = - // CTL = - // CR = - // LF = - // SP = - // HT = - // <"> = - // CRLF = CR LF - // LWS = [CRLF] 1*( SP | HT ) - // TEXT = - // separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> - // | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT - // token = 1* - // qdtext = > - - for c := 0; c < 256; c++ { - var t octetType - isCtl := c <= 31 || c == 127 - isChar := 0 <= c && c <= 127 - isSeparator := strings.IndexRune(" \t\"(),/:;<=>?@[]\\{}", rune(c)) >= 0 - if strings.IndexRune(" \t\r\n", rune(c)) >= 0 { - t |= isSpace - } - if isChar && !isCtl && !isSeparator { - t |= isToken - } - octetTypes[c] = t - } -} diff --git a/vendor/github.com/andybalholm/brotli/huffman.go b/vendor/github.com/andybalholm/brotli/huffman.go deleted file mode 100644 index 182f3d2a552..00000000000 --- a/vendor/github.com/andybalholm/brotli/huffman.go +++ /dev/null @@ -1,653 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Utilities for building Huffman decoding tables. */ - -const huffmanMaxCodeLength = 15 - -/* Maximum possible Huffman table size for an alphabet size of (index * 32), - max code length 15 and root table bits 8. */ -var kMaxHuffmanTableSize = []uint16{ - 256, - 402, - 436, - 468, - 500, - 534, - 566, - 598, - 630, - 662, - 694, - 726, - 758, - 790, - 822, - 854, - 886, - 920, - 952, - 984, - 1016, - 1048, - 1080, - 1112, - 1144, - 1176, - 1208, - 1240, - 1272, - 1304, - 1336, - 1368, - 1400, - 1432, - 1464, - 1496, - 1528, -} - -/* BROTLI_NUM_BLOCK_LEN_SYMBOLS == 26 */ -const huffmanMaxSize26 = 396 - -/* BROTLI_MAX_BLOCK_TYPE_SYMBOLS == 258 */ -const huffmanMaxSize258 = 632 - -/* BROTLI_MAX_CONTEXT_MAP_SYMBOLS == 272 */ -const huffmanMaxSize272 = 646 - -const huffmanMaxCodeLengthCodeLength = 5 - -/* Do not create this struct directly - use the ConstructHuffmanCode - * constructor below! */ -type huffmanCode struct { - bits byte - value uint16 -} - -func constructHuffmanCode(bits byte, value uint16) huffmanCode { - var h huffmanCode - h.bits = bits - h.value = value - return h -} - -/* Builds Huffman lookup table assuming code lengths are in symbol order. */ - -/* Builds Huffman lookup table assuming code lengths are in symbol order. - Returns size of resulting table. */ - -/* Builds a simple Huffman table. The |num_symbols| parameter is to be - interpreted as follows: 0 means 1 symbol, 1 means 2 symbols, - 2 means 3 symbols, 3 means 4 symbols with lengths [2, 2, 2, 2], - 4 means 4 symbols with lengths [1, 2, 3, 3]. */ - -/* Contains a collection of Huffman trees with the same alphabet size. */ -/* max_symbol is needed due to simple codes since log2(alphabet_size) could be - greater than log2(max_symbol). */ -type huffmanTreeGroup struct { - htrees [][]huffmanCode - codes []huffmanCode - alphabet_size uint16 - max_symbol uint16 - num_htrees uint16 -} - -const reverseBitsMax = 8 - -const reverseBitsBase = 0 - -var kReverseBits = [1 << reverseBitsMax]byte{ - 0x00, - 0x80, - 0x40, - 0xC0, - 0x20, - 0xA0, - 0x60, - 0xE0, - 0x10, - 0x90, - 0x50, - 0xD0, - 0x30, - 0xB0, - 0x70, - 0xF0, - 0x08, - 0x88, - 0x48, - 0xC8, - 0x28, - 0xA8, - 0x68, - 0xE8, - 0x18, - 0x98, - 0x58, - 0xD8, - 0x38, - 0xB8, - 0x78, - 0xF8, - 0x04, - 0x84, - 0x44, - 0xC4, - 0x24, - 0xA4, - 0x64, - 0xE4, - 0x14, - 0x94, - 0x54, - 0xD4, - 0x34, - 0xB4, - 0x74, - 0xF4, - 0x0C, - 0x8C, - 0x4C, - 0xCC, - 0x2C, - 0xAC, - 0x6C, - 0xEC, - 0x1C, - 0x9C, - 0x5C, - 0xDC, - 0x3C, - 0xBC, - 0x7C, - 0xFC, - 0x02, - 0x82, - 0x42, - 0xC2, - 0x22, - 0xA2, - 0x62, - 0xE2, - 0x12, - 0x92, - 0x52, - 0xD2, - 0x32, - 0xB2, - 0x72, - 0xF2, - 0x0A, - 0x8A, - 0x4A, - 0xCA, - 0x2A, - 0xAA, - 0x6A, - 0xEA, - 0x1A, - 0x9A, - 0x5A, - 0xDA, - 0x3A, - 0xBA, - 0x7A, - 0xFA, - 0x06, - 0x86, - 0x46, - 0xC6, - 0x26, - 0xA6, - 0x66, - 0xE6, - 0x16, - 0x96, - 0x56, - 0xD6, - 0x36, - 0xB6, - 0x76, - 0xF6, - 0x0E, - 0x8E, - 0x4E, - 0xCE, - 0x2E, - 0xAE, - 0x6E, - 0xEE, - 0x1E, - 0x9E, - 0x5E, - 0xDE, - 0x3E, - 0xBE, - 0x7E, - 0xFE, - 0x01, - 0x81, - 0x41, - 0xC1, - 0x21, - 0xA1, - 0x61, - 0xE1, - 0x11, - 0x91, - 0x51, - 0xD1, - 0x31, - 0xB1, - 0x71, - 0xF1, - 0x09, - 0x89, - 0x49, - 0xC9, - 0x29, - 0xA9, - 0x69, - 0xE9, - 0x19, - 0x99, - 0x59, - 0xD9, - 0x39, - 0xB9, - 0x79, - 0xF9, - 0x05, - 0x85, - 0x45, - 0xC5, - 0x25, - 0xA5, - 0x65, - 0xE5, - 0x15, - 0x95, - 0x55, - 0xD5, - 0x35, - 0xB5, - 0x75, - 0xF5, - 0x0D, - 0x8D, - 0x4D, - 0xCD, - 0x2D, - 0xAD, - 0x6D, - 0xED, - 0x1D, - 0x9D, - 0x5D, - 0xDD, - 0x3D, - 0xBD, - 0x7D, - 0xFD, - 0x03, - 0x83, - 0x43, - 0xC3, - 0x23, - 0xA3, - 0x63, - 0xE3, - 0x13, - 0x93, - 0x53, - 0xD3, - 0x33, - 0xB3, - 0x73, - 0xF3, - 0x0B, - 0x8B, - 0x4B, - 0xCB, - 0x2B, - 0xAB, - 0x6B, - 0xEB, - 0x1B, - 0x9B, - 0x5B, - 0xDB, - 0x3B, - 0xBB, - 0x7B, - 0xFB, - 0x07, - 0x87, - 0x47, - 0xC7, - 0x27, - 0xA7, - 0x67, - 0xE7, - 0x17, - 0x97, - 0x57, - 0xD7, - 0x37, - 0xB7, - 0x77, - 0xF7, - 0x0F, - 0x8F, - 0x4F, - 0xCF, - 0x2F, - 0xAF, - 0x6F, - 0xEF, - 0x1F, - 0x9F, - 0x5F, - 0xDF, - 0x3F, - 0xBF, - 0x7F, - 0xFF, -} - -const reverseBitsLowest = (uint64(1) << (reverseBitsMax - 1 + reverseBitsBase)) - -/* Returns reverse(num >> BROTLI_REVERSE_BITS_BASE, BROTLI_REVERSE_BITS_MAX), - where reverse(value, len) is the bit-wise reversal of the len least - significant bits of value. */ -func reverseBits8(num uint64) uint64 { - return uint64(kReverseBits[num]) -} - -/* Stores code in table[0], table[step], table[2*step], ..., table[end] */ -/* Assumes that end is an integer multiple of step */ -func replicateValue(table []huffmanCode, step int, end int, code huffmanCode) { - for { - end -= step - table[end] = code - if end <= 0 { - break - } - } -} - -/* Returns the table width of the next 2nd level table. |count| is the histogram - of bit lengths for the remaining symbols, |len| is the code length of the - next processed symbol. */ -func nextTableBitSize(count []uint16, len int, root_bits int) int { - var left int = 1 << uint(len-root_bits) - for len < huffmanMaxCodeLength { - left -= int(count[len]) - if left <= 0 { - break - } - len++ - left <<= 1 - } - - return len - root_bits -} - -func buildCodeLengthsHuffmanTable(table []huffmanCode, code_lengths []byte, count []uint16) { - var code huffmanCode /* current table entry */ /* symbol index in original or sorted table */ /* prefix code */ /* prefix code addend */ /* step size to replicate values in current table */ /* size of current table */ /* symbols sorted by code length */ - var symbol int - var key uint64 - var key_step uint64 - var step int - var table_size int - var sorted [codeLengthCodes]int - var offset [huffmanMaxCodeLengthCodeLength + 1]int - var bits int - var bits_count int - /* offsets in sorted table for each length */ - assert(huffmanMaxCodeLengthCodeLength <= reverseBitsMax) - - /* Generate offsets into sorted symbol table by code length. */ - symbol = -1 - - bits = 1 - var i int - for i = 0; i < huffmanMaxCodeLengthCodeLength; i++ { - symbol += int(count[bits]) - offset[bits] = symbol - bits++ - } - - /* Symbols with code length 0 are placed after all other symbols. */ - offset[0] = codeLengthCodes - 1 - - /* Sort symbols by length, by symbol order within each length. */ - symbol = codeLengthCodes - - for { - var i int - for i = 0; i < 6; i++ { - symbol-- - sorted[offset[code_lengths[symbol]]] = symbol - offset[code_lengths[symbol]]-- - } - if symbol == 0 { - break - } - } - - table_size = 1 << huffmanMaxCodeLengthCodeLength - - /* Special case: all symbols but one have 0 code length. */ - if offset[0] == 0 { - code = constructHuffmanCode(0, uint16(sorted[0])) - for key = 0; key < uint64(table_size); key++ { - table[key] = code - } - - return - } - - /* Fill in table. */ - key = 0 - - key_step = reverseBitsLowest - symbol = 0 - bits = 1 - step = 2 - for { - for bits_count = int(count[bits]); bits_count != 0; bits_count-- { - code = constructHuffmanCode(byte(bits), uint16(sorted[symbol])) - symbol++ - replicateValue(table[reverseBits8(key):], step, table_size, code) - key += key_step - } - - step <<= 1 - key_step >>= 1 - bits++ - if bits > huffmanMaxCodeLengthCodeLength { - break - } - } -} - -func buildHuffmanTable(root_table []huffmanCode, root_bits int, symbol_lists symbolList, count []uint16) uint32 { - var code huffmanCode /* current table entry */ /* next available space in table */ /* current code length */ /* symbol index in original or sorted table */ /* prefix code */ /* prefix code addend */ /* 2nd level table prefix code */ /* 2nd level table prefix code addend */ /* step size to replicate values in current table */ /* key length of current table */ /* size of current table */ /* sum of root table size and 2nd level table sizes */ - var table []huffmanCode - var len int - var symbol int - var key uint64 - var key_step uint64 - var sub_key uint64 - var sub_key_step uint64 - var step int - var table_bits int - var table_size int - var total_size int - var max_length int = -1 - var bits int - var bits_count int - - assert(root_bits <= reverseBitsMax) - assert(huffmanMaxCodeLength-root_bits <= reverseBitsMax) - - for symbolListGet(symbol_lists, max_length) == 0xFFFF { - max_length-- - } - max_length += huffmanMaxCodeLength + 1 - - table = root_table - table_bits = root_bits - table_size = 1 << uint(table_bits) - total_size = table_size - - /* Fill in the root table. Reduce the table size to if possible, - and create the repetitions by memcpy. */ - if table_bits > max_length { - table_bits = max_length - table_size = 1 << uint(table_bits) - } - - key = 0 - key_step = reverseBitsLowest - bits = 1 - step = 2 - for { - symbol = bits - (huffmanMaxCodeLength + 1) - for bits_count = int(count[bits]); bits_count != 0; bits_count-- { - symbol = int(symbolListGet(symbol_lists, symbol)) - code = constructHuffmanCode(byte(bits), uint16(symbol)) - replicateValue(table[reverseBits8(key):], step, table_size, code) - key += key_step - } - - step <<= 1 - key_step >>= 1 - bits++ - if bits > table_bits { - break - } - } - - /* If root_bits != table_bits then replicate to fill the remaining slots. */ - for total_size != table_size { - copy(table[table_size:], table[:uint(table_size)]) - table_size <<= 1 - } - - /* Fill in 2nd level tables and add pointers to root table. */ - key_step = reverseBitsLowest >> uint(root_bits-1) - - sub_key = reverseBitsLowest << 1 - sub_key_step = reverseBitsLowest - len = root_bits + 1 - step = 2 - for ; len <= max_length; len++ { - symbol = len - (huffmanMaxCodeLength + 1) - for ; count[len] != 0; count[len]-- { - if sub_key == reverseBitsLowest<<1 { - table = table[table_size:] - table_bits = nextTableBitSize(count, int(len), root_bits) - table_size = 1 << uint(table_bits) - total_size += table_size - sub_key = reverseBits8(key) - key += key_step - root_table[sub_key] = constructHuffmanCode(byte(table_bits+root_bits), uint16(uint64(uint(-cap(table)+cap(root_table)))-sub_key)) - sub_key = 0 - } - - symbol = int(symbolListGet(symbol_lists, symbol)) - code = constructHuffmanCode(byte(len-root_bits), uint16(symbol)) - replicateValue(table[reverseBits8(sub_key):], step, table_size, code) - sub_key += sub_key_step - } - - step <<= 1 - sub_key_step >>= 1 - } - - return uint32(total_size) -} - -func buildSimpleHuffmanTable(table []huffmanCode, root_bits int, val []uint16, num_symbols uint32) uint32 { - var table_size uint32 = 1 - var goal_size uint32 = 1 << uint(root_bits) - switch num_symbols { - case 0: - table[0] = constructHuffmanCode(0, val[0]) - - case 1: - if val[1] > val[0] { - table[0] = constructHuffmanCode(1, val[0]) - table[1] = constructHuffmanCode(1, val[1]) - } else { - table[0] = constructHuffmanCode(1, val[1]) - table[1] = constructHuffmanCode(1, val[0]) - } - - table_size = 2 - - case 2: - table[0] = constructHuffmanCode(1, val[0]) - table[2] = constructHuffmanCode(1, val[0]) - if val[2] > val[1] { - table[1] = constructHuffmanCode(2, val[1]) - table[3] = constructHuffmanCode(2, val[2]) - } else { - table[1] = constructHuffmanCode(2, val[2]) - table[3] = constructHuffmanCode(2, val[1]) - } - - table_size = 4 - - case 3: - var i int - var k int - for i = 0; i < 3; i++ { - for k = i + 1; k < 4; k++ { - if val[k] < val[i] { - var t uint16 = val[k] - val[k] = val[i] - val[i] = t - } - } - } - - table[0] = constructHuffmanCode(2, val[0]) - table[2] = constructHuffmanCode(2, val[1]) - table[1] = constructHuffmanCode(2, val[2]) - table[3] = constructHuffmanCode(2, val[3]) - table_size = 4 - - case 4: - if val[3] < val[2] { - var t uint16 = val[3] - val[3] = val[2] - val[2] = t - } - - table[0] = constructHuffmanCode(1, val[0]) - table[1] = constructHuffmanCode(2, val[1]) - table[2] = constructHuffmanCode(1, val[0]) - table[3] = constructHuffmanCode(3, val[2]) - table[4] = constructHuffmanCode(1, val[0]) - table[5] = constructHuffmanCode(2, val[1]) - table[6] = constructHuffmanCode(1, val[0]) - table[7] = constructHuffmanCode(3, val[3]) - table_size = 8 - } - - for table_size != goal_size { - copy(table[table_size:], table[:uint(table_size)]) - table_size <<= 1 - } - - return goal_size -} diff --git a/vendor/github.com/andybalholm/brotli/literal_cost.go b/vendor/github.com/andybalholm/brotli/literal_cost.go deleted file mode 100644 index 5a9ace94ee0..00000000000 --- a/vendor/github.com/andybalholm/brotli/literal_cost.go +++ /dev/null @@ -1,182 +0,0 @@ -package brotli - -func utf8Position(last uint, c uint, clamp uint) uint { - if c < 128 { - return 0 /* Next one is the 'Byte 1' again. */ - } else if c >= 192 { /* Next one is the 'Byte 2' of utf-8 encoding. */ - return brotli_min_size_t(1, clamp) - } else { - /* Let's decide over the last byte if this ends the sequence. */ - if last < 0xE0 { - return 0 /* Completed two or three byte coding. */ /* Next one is the 'Byte 3' of utf-8 encoding. */ - } else { - return brotli_min_size_t(2, clamp) - } - } -} - -func decideMultiByteStatsLevel(pos uint, len uint, mask uint, data []byte) uint { - var counts = [3]uint{0} /* should be 2, but 1 compresses better. */ - var max_utf8 uint = 1 - var last_c uint = 0 - var i uint - for i = 0; i < len; i++ { - var c uint = uint(data[(pos+i)&mask]) - counts[utf8Position(last_c, c, 2)]++ - last_c = c - } - - if counts[2] < 500 { - max_utf8 = 1 - } - - if counts[1]+counts[2] < 25 { - max_utf8 = 0 - } - - return max_utf8 -} - -func estimateBitCostsForLiteralsUTF8(pos uint, len uint, mask uint, data []byte, cost []float32) { - var max_utf8 uint = decideMultiByteStatsLevel(pos, uint(len), mask, data) - /* Bootstrap histograms. */ - var histogram = [3][256]uint{[256]uint{0}} - var window_half uint = 495 - var in_window uint = brotli_min_size_t(window_half, uint(len)) - var in_window_utf8 = [3]uint{0} - /* max_utf8 is 0 (normal ASCII single byte modeling), - 1 (for 2-byte UTF-8 modeling), or 2 (for 3-byte UTF-8 modeling). */ - - var i uint - { - var last_c uint = 0 - var utf8_pos uint = 0 - for i = 0; i < in_window; i++ { - var c uint = uint(data[(pos+i)&mask]) - histogram[utf8_pos][c]++ - in_window_utf8[utf8_pos]++ - utf8_pos = utf8Position(last_c, c, max_utf8) - last_c = c - } - } - - /* Compute bit costs with sliding window. */ - for i = 0; i < len; i++ { - if i >= window_half { - var c uint - var last_c uint - if i < window_half+1 { - c = 0 - } else { - c = uint(data[(pos+i-window_half-1)&mask]) - } - if i < window_half+2 { - last_c = 0 - } else { - last_c = uint(data[(pos+i-window_half-2)&mask]) - } - /* Remove a byte in the past. */ - - var utf8_pos2 uint = utf8Position(last_c, c, max_utf8) - histogram[utf8_pos2][data[(pos+i-window_half)&mask]]-- - in_window_utf8[utf8_pos2]-- - } - - if i+window_half < len { - var c uint = uint(data[(pos+i+window_half-1)&mask]) - var last_c uint = uint(data[(pos+i+window_half-2)&mask]) - /* Add a byte in the future. */ - - var utf8_pos2 uint = utf8Position(last_c, c, max_utf8) - histogram[utf8_pos2][data[(pos+i+window_half)&mask]]++ - in_window_utf8[utf8_pos2]++ - } - { - var c uint - var last_c uint - if i < 1 { - c = 0 - } else { - c = uint(data[(pos+i-1)&mask]) - } - if i < 2 { - last_c = 0 - } else { - last_c = uint(data[(pos+i-2)&mask]) - } - var utf8_pos uint = utf8Position(last_c, c, max_utf8) - var masked_pos uint = (pos + i) & mask - var histo uint = histogram[utf8_pos][data[masked_pos]] - var lit_cost float64 - if histo == 0 { - histo = 1 - } - - lit_cost = fastLog2(in_window_utf8[utf8_pos]) - fastLog2(histo) - lit_cost += 0.02905 - if lit_cost < 1.0 { - lit_cost *= 0.5 - lit_cost += 0.5 - } - - /* Make the first bytes more expensive -- seems to help, not sure why. - Perhaps because the entropy source is changing its properties - rapidly in the beginning of the file, perhaps because the beginning - of the data is a statistical "anomaly". */ - if i < 2000 { - lit_cost += 0.7 - (float64(2000-i) / 2000.0 * 0.35) - } - - cost[i] = float32(lit_cost) - } - } -} - -func estimateBitCostsForLiterals(pos uint, len uint, mask uint, data []byte, cost []float32) { - if isMostlyUTF8(data, pos, mask, uint(len), kMinUTF8Ratio) { - estimateBitCostsForLiteralsUTF8(pos, uint(len), mask, data, cost) - return - } else { - var histogram = [256]uint{0} - var window_half uint = 2000 - var in_window uint = brotli_min_size_t(window_half, uint(len)) - var i uint - /* Bootstrap histogram. */ - for i = 0; i < in_window; i++ { - histogram[data[(pos+i)&mask]]++ - } - - /* Compute bit costs with sliding window. */ - for i = 0; i < len; i++ { - var histo uint - if i >= window_half { - /* Remove a byte in the past. */ - histogram[data[(pos+i-window_half)&mask]]-- - - in_window-- - } - - if i+window_half < len { - /* Add a byte in the future. */ - histogram[data[(pos+i+window_half)&mask]]++ - - in_window++ - } - - histo = histogram[data[(pos+i)&mask]] - if histo == 0 { - histo = 1 - } - { - var lit_cost float64 = fastLog2(in_window) - fastLog2(histo) - lit_cost += 0.029 - if lit_cost < 1.0 { - lit_cost *= 0.5 - lit_cost += 0.5 - } - - cost[i] = float32(lit_cost) - } - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/memory.go b/vendor/github.com/andybalholm/brotli/memory.go deleted file mode 100644 index 7208a3bbf6f..00000000000 --- a/vendor/github.com/andybalholm/brotli/memory.go +++ /dev/null @@ -1,56 +0,0 @@ -package brotli - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* -Dynamically grows array capacity to at least the requested size -T: data type -A: array -C: capacity -R: requested size -*/ -func brotli_ensure_capacity_uint8_t(a *[]byte, c *uint, r uint) { - if *c < r { - var new_size uint = *c - if new_size == 0 { - new_size = r - } - - for new_size < r { - new_size *= 2 - } - var new_array []byte = make([]byte, new_size) - if *c != 0 { - copy(new_array, (*a)[:*c]) - } - - *a = new_array - *c = new_size - } -} - -func brotli_ensure_capacity_uint32_t(a *[]uint32, c *uint, r uint) { - var new_array []uint32 - if *c < r { - var new_size uint = *c - if new_size == 0 { - new_size = r - } - - for new_size < r { - new_size *= 2 - } - - new_array = make([]uint32, new_size) - if *c != 0 { - copy(new_array, (*a)[:*c]) - } - - *a = new_array - *c = new_size - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock.go b/vendor/github.com/andybalholm/brotli/metablock.go deleted file mode 100644 index 4a412cf4e24..00000000000 --- a/vendor/github.com/andybalholm/brotli/metablock.go +++ /dev/null @@ -1,555 +0,0 @@ -package brotli - -/* Copyright 2014 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Algorithms for distributing the literals and commands of a metablock between - block types and contexts. */ - -type metaBlockSplit struct { - literal_split blockSplit - command_split blockSplit - distance_split blockSplit - literal_context_map []uint32 - literal_context_map_size uint - distance_context_map []uint32 - distance_context_map_size uint - literal_histograms []histogramLiteral - literal_histograms_size uint - command_histograms []histogramCommand - command_histograms_size uint - distance_histograms []histogramDistance - distance_histograms_size uint -} - -func initMetaBlockSplit(mb *metaBlockSplit) { - initBlockSplit(&mb.literal_split) - initBlockSplit(&mb.command_split) - initBlockSplit(&mb.distance_split) - mb.literal_context_map = nil - mb.literal_context_map_size = 0 - mb.distance_context_map = nil - mb.distance_context_map_size = 0 - mb.literal_histograms = nil - mb.literal_histograms_size = 0 - mb.command_histograms = nil - mb.command_histograms_size = 0 - mb.distance_histograms = nil - mb.distance_histograms_size = 0 -} - -func destroyMetaBlockSplit(mb *metaBlockSplit) { - destroyBlockSplit(&mb.literal_split) - destroyBlockSplit(&mb.command_split) - destroyBlockSplit(&mb.distance_split) - mb.literal_context_map = nil - mb.distance_context_map = nil - mb.literal_histograms = nil - mb.command_histograms = nil - mb.distance_histograms = nil -} - -func initDistanceParams(params *encoderParams, npostfix uint32, ndirect uint32) { - var dist_params *distanceParams = ¶ms.dist - var alphabet_size uint32 - var max_distance uint32 - - dist_params.distance_postfix_bits = npostfix - dist_params.num_direct_distance_codes = ndirect - - alphabet_size = uint32(distanceAlphabetSize(uint(npostfix), uint(ndirect), maxDistanceBits)) - max_distance = ndirect + (1 << (maxDistanceBits + npostfix + 2)) - (1 << (npostfix + 2)) - - if params.large_window { - var bound = [maxNpostfix + 1]uint32{0, 4, 12, 28} - var postfix uint32 = 1 << npostfix - alphabet_size = uint32(distanceAlphabetSize(uint(npostfix), uint(ndirect), largeMaxDistanceBits)) - - /* The maximum distance is set so that no distance symbol used can encode - a distance larger than BROTLI_MAX_ALLOWED_DISTANCE with all - its extra bits set. */ - if ndirect < bound[npostfix] { - max_distance = maxAllowedDistance - (bound[npostfix] - ndirect) - } else if ndirect >= bound[npostfix]+postfix { - max_distance = (3 << 29) - 4 + (ndirect - bound[npostfix]) - } else { - max_distance = maxAllowedDistance - } - } - - dist_params.alphabet_size = alphabet_size - dist_params.max_distance = uint(max_distance) -} - -func recomputeDistancePrefixes(cmds []command, num_commands uint, orig_params *distanceParams, new_params *distanceParams) { - var i uint - - if orig_params.distance_postfix_bits == new_params.distance_postfix_bits && orig_params.num_direct_distance_codes == new_params.num_direct_distance_codes { - return - } - - for i = 0; i < num_commands; i++ { - var cmd *command = &cmds[i] - if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 { - prefixEncodeCopyDistance(uint(commandRestoreDistanceCode(cmd, orig_params)), uint(new_params.num_direct_distance_codes), uint(new_params.distance_postfix_bits), &cmd.dist_prefix_, &cmd.dist_extra_) - } - } -} - -func computeDistanceCost(cmds []command, num_commands uint, orig_params *distanceParams, new_params *distanceParams, cost *float64) bool { - var i uint - var equal_params bool = false - var dist_prefix uint16 - var dist_extra uint32 - var extra_bits float64 = 0.0 - var histo histogramDistance - histogramClearDistance(&histo) - - if orig_params.distance_postfix_bits == new_params.distance_postfix_bits && orig_params.num_direct_distance_codes == new_params.num_direct_distance_codes { - equal_params = true - } - - for i = 0; i < num_commands; i++ { - var cmd *command = &cmds[i] - if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 { - if equal_params { - dist_prefix = cmd.dist_prefix_ - } else { - var distance uint32 = commandRestoreDistanceCode(cmd, orig_params) - if distance > uint32(new_params.max_distance) { - return false - } - - prefixEncodeCopyDistance(uint(distance), uint(new_params.num_direct_distance_codes), uint(new_params.distance_postfix_bits), &dist_prefix, &dist_extra) - } - - histogramAddDistance(&histo, uint(dist_prefix)&0x3FF) - extra_bits += float64(dist_prefix >> 10) - } - } - - *cost = populationCostDistance(&histo) + extra_bits - return true -} - -var buildMetaBlock_kMaxNumberOfHistograms uint = 256 - -func buildMetaBlock(ringbuffer []byte, pos uint, mask uint, params *encoderParams, prev_byte byte, prev_byte2 byte, cmds []command, num_commands uint, literal_context_mode int, mb *metaBlockSplit) { - var distance_histograms []histogramDistance - var literal_histograms []histogramLiteral - var literal_context_modes []int = nil - var literal_histograms_size uint - var distance_histograms_size uint - var i uint - var literal_context_multiplier uint = 1 - var npostfix uint32 - var ndirect_msb uint32 = 0 - var check_orig bool = true - var best_dist_cost float64 = 1e99 - var orig_params encoderParams = *params - /* Histogram ids need to fit in one byte. */ - - var new_params encoderParams = *params - - for npostfix = 0; npostfix <= maxNpostfix; npostfix++ { - for ; ndirect_msb < 16; ndirect_msb++ { - var ndirect uint32 = ndirect_msb << npostfix - var skip bool - var dist_cost float64 - initDistanceParams(&new_params, npostfix, ndirect) - if npostfix == orig_params.dist.distance_postfix_bits && ndirect == orig_params.dist.num_direct_distance_codes { - check_orig = false - } - - skip = !computeDistanceCost(cmds, num_commands, &orig_params.dist, &new_params.dist, &dist_cost) - if skip || (dist_cost > best_dist_cost) { - break - } - - best_dist_cost = dist_cost - params.dist = new_params.dist - } - - if ndirect_msb > 0 { - ndirect_msb-- - } - ndirect_msb /= 2 - } - - if check_orig { - var dist_cost float64 - computeDistanceCost(cmds, num_commands, &orig_params.dist, &orig_params.dist, &dist_cost) - if dist_cost < best_dist_cost { - /* NB: currently unused; uncomment when more param tuning is added. */ - /* best_dist_cost = dist_cost; */ - params.dist = orig_params.dist - } - } - - recomputeDistancePrefixes(cmds, num_commands, &orig_params.dist, ¶ms.dist) - - splitBlock(cmds, num_commands, ringbuffer, pos, mask, params, &mb.literal_split, &mb.command_split, &mb.distance_split) - - if !params.disable_literal_context_modeling { - literal_context_multiplier = 1 << literalContextBits - literal_context_modes = make([]int, (mb.literal_split.num_types)) - for i = 0; i < mb.literal_split.num_types; i++ { - literal_context_modes[i] = literal_context_mode - } - } - - literal_histograms_size = mb.literal_split.num_types * literal_context_multiplier - literal_histograms = make([]histogramLiteral, literal_histograms_size) - clearHistogramsLiteral(literal_histograms, literal_histograms_size) - - distance_histograms_size = mb.distance_split.num_types << distanceContextBits - distance_histograms = make([]histogramDistance, distance_histograms_size) - clearHistogramsDistance(distance_histograms, distance_histograms_size) - - assert(mb.command_histograms == nil) - mb.command_histograms_size = mb.command_split.num_types - mb.command_histograms = make([]histogramCommand, (mb.command_histograms_size)) - clearHistogramsCommand(mb.command_histograms, mb.command_histograms_size) - - buildHistogramsWithContext(cmds, num_commands, &mb.literal_split, &mb.command_split, &mb.distance_split, ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_modes, literal_histograms, mb.command_histograms, distance_histograms) - literal_context_modes = nil - - assert(mb.literal_context_map == nil) - mb.literal_context_map_size = mb.literal_split.num_types << literalContextBits - mb.literal_context_map = make([]uint32, (mb.literal_context_map_size)) - - assert(mb.literal_histograms == nil) - mb.literal_histograms_size = mb.literal_context_map_size - mb.literal_histograms = make([]histogramLiteral, (mb.literal_histograms_size)) - - clusterHistogramsLiteral(literal_histograms, literal_histograms_size, buildMetaBlock_kMaxNumberOfHistograms, mb.literal_histograms, &mb.literal_histograms_size, mb.literal_context_map) - literal_histograms = nil - - if params.disable_literal_context_modeling { - /* Distribute assignment to all contexts. */ - for i = mb.literal_split.num_types; i != 0; { - var j uint = 0 - i-- - for ; j < 1< 0 { - var entropy [maxStaticContexts]float64 - var combined_histo []histogramLiteral = make([]histogramLiteral, (2 * num_contexts)) - var combined_entropy [2 * maxStaticContexts]float64 - var diff = [2]float64{0.0} - /* Try merging the set of histograms for the current block type with the - respective set of histograms for the last and second last block types. - Decide over the split based on the total reduction of entropy across - all contexts. */ - - var i uint - for i = 0; i < num_contexts; i++ { - var curr_histo_ix uint = self.curr_histogram_ix_ + i - var j uint - entropy[i] = bitsEntropy(histograms[curr_histo_ix].data_[:], self.alphabet_size_) - for j = 0; j < 2; j++ { - var jx uint = j*num_contexts + i - var last_histogram_ix uint = self.last_histogram_ix_[j] + i - combined_histo[jx] = histograms[curr_histo_ix] - histogramAddHistogramLiteral(&combined_histo[jx], &histograms[last_histogram_ix]) - combined_entropy[jx] = bitsEntropy(combined_histo[jx].data_[0:], self.alphabet_size_) - diff[j] += combined_entropy[jx] - entropy[i] - last_entropy[jx] - } - } - - if split.num_types < self.max_block_types_ && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = split.num_types * num_contexts - for i = 0; i < num_contexts; i++ { - last_entropy[num_contexts+i] = last_entropy[i] - last_entropy[i] = entropy[i] - } - - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_ += num_contexts - if self.curr_histogram_ix_ < *self.histograms_size_ { - clearHistogramsLiteral(self.histograms_[self.curr_histogram_ix_:], self.num_contexts_) - } - - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - for i = 0; i < num_contexts; i++ { - histograms[self.last_histogram_ix_[0]+i] = combined_histo[num_contexts+i] - last_entropy[num_contexts+i] = last_entropy[i] - last_entropy[i] = combined_entropy[num_contexts+i] - histogramClearLiteral(&histograms[self.curr_histogram_ix_+i]) - } - - self.num_blocks_++ - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - for i = 0; i < num_contexts; i++ { - histograms[self.last_histogram_ix_[0]+i] = combined_histo[i] - last_entropy[i] = combined_entropy[i] - if split.num_types == 1 { - last_entropy[num_contexts+i] = last_entropy[i] - } - - histogramClearLiteral(&histograms[self.curr_histogram_ix_+i]) - } - - self.block_size_ = 0 - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - - combined_histo = nil - } - - if is_final { - *self.histograms_size_ = split.num_types * num_contexts - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current block type and context. When the - current block reaches the target size, decides on merging the block. */ -func contextBlockSplitterAddSymbol(self *contextBlockSplitter, symbol uint, context uint) { - histogramAddLiteral(&self.histograms_[self.curr_histogram_ix_+context], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - contextBlockSplitterFinishBlock(self, false) /* is_final = */ - } -} - -func mapStaticContexts(num_contexts uint, static_context_map []uint32, mb *metaBlockSplit) { - var i uint - assert(mb.literal_context_map == nil) - mb.literal_context_map_size = mb.literal_split.num_types << literalContextBits - mb.literal_context_map = make([]uint32, (mb.literal_context_map_size)) - - for i = 0; i < mb.literal_split.num_types; i++ { - var offset uint32 = uint32(i * num_contexts) - var j uint - for j = 0; j < 1<= 128 { - blockSplitterAddSymbolDistance(&dist_blocks, uint(cmd.dist_prefix_)&0x3FF) - } - } - } - - if num_contexts == 1 { - blockSplitterFinishBlockLiteral(&lit_blocks.plain, true) /* is_final = */ - } else { - contextBlockSplitterFinishBlock(&lit_blocks.ctx, true) /* is_final = */ - } - - blockSplitterFinishBlockCommand(&cmd_blocks, true) /* is_final = */ - blockSplitterFinishBlockDistance(&dist_blocks, true) /* is_final = */ - - if num_contexts > 1 { - mapStaticContexts(num_contexts, static_context_map, mb) - } -} - -func buildMetaBlockGreedy(ringbuffer []byte, pos uint, mask uint, prev_byte byte, prev_byte2 byte, literal_context_lut contextLUT, num_contexts uint, static_context_map []uint32, commands []command, n_commands uint, mb *metaBlockSplit) { - if num_contexts == 1 { - buildMetaBlockGreedyInternal(ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_lut, 1, nil, commands, n_commands, mb) - } else { - buildMetaBlockGreedyInternal(ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_lut, num_contexts, static_context_map, commands, n_commands, mb) - } -} - -func optimizeHistograms(num_distance_codes uint32, mb *metaBlockSplit) { - var good_for_rle [numCommandSymbols]byte - var i uint - for i = 0; i < mb.literal_histograms_size; i++ { - optimizeHuffmanCountsForRLE(256, mb.literal_histograms[i].data_[:], good_for_rle[:]) - } - - for i = 0; i < mb.command_histograms_size; i++ { - optimizeHuffmanCountsForRLE(numCommandSymbols, mb.command_histograms[i].data_[:], good_for_rle[:]) - } - - for i = 0; i < mb.distance_histograms_size; i++ { - optimizeHuffmanCountsForRLE(uint(num_distance_codes), mb.distance_histograms[i].data_[:], good_for_rle[:]) - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock_command.go b/vendor/github.com/andybalholm/brotli/metablock_command.go deleted file mode 100644 index d47541c5e6a..00000000000 --- a/vendor/github.com/andybalholm/brotli/metablock_command.go +++ /dev/null @@ -1,162 +0,0 @@ -package brotli - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Greedy block splitter for one block category (literal, command or distance). - */ -type blockSplitterCommand struct { - alphabet_size_ uint - min_block_size_ uint - split_threshold_ float64 - num_blocks_ uint - split_ *blockSplit - histograms_ []histogramCommand - histograms_size_ *uint - target_block_size_ uint - block_size_ uint - curr_histogram_ix_ uint - last_histogram_ix_ [2]uint - last_entropy_ [2]float64 - merge_last_count_ uint -} - -func initBlockSplitterCommand(self *blockSplitterCommand, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramCommand, histograms_size *uint) { - var max_num_blocks uint = num_symbols/min_block_size + 1 - var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1) - /* We have to allocate one more histogram than the maximum number of block - types for the current histogram when the meta-block is too big. */ - self.alphabet_size_ = alphabet_size - - self.min_block_size_ = min_block_size - self.split_threshold_ = split_threshold - self.num_blocks_ = 0 - self.split_ = split - self.histograms_size_ = histograms_size - self.target_block_size_ = min_block_size - self.block_size_ = 0 - self.curr_histogram_ix_ = 0 - self.merge_last_count_ = 0 - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks) - self.split_.num_blocks = max_num_blocks - assert(*histograms == nil) - *histograms_size = max_num_types - *histograms = make([]histogramCommand, (*histograms_size)) - self.histograms_ = *histograms - - /* Clear only current histogram. */ - histogramClearCommand(&self.histograms_[0]) - - self.last_histogram_ix_[1] = 0 - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] -} - -/* Does either of three things: - (1) emits the current block with a new block type; - (2) emits the current block with the type of the second last block; - (3) merges the current block with the last block. */ -func blockSplitterFinishBlockCommand(self *blockSplitterCommand, is_final bool) { - var split *blockSplit = self.split_ - var last_entropy []float64 = self.last_entropy_[:] - var histograms []histogramCommand = self.histograms_ - self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_) - if self.num_blocks_ == 0 { - /* Create first block. */ - split.lengths[0] = uint32(self.block_size_) - - split.types[0] = 0 - last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_) - last_entropy[1] = last_entropy[0] - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - } else if self.block_size_ > 0 { - var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_) - var combined_histo [2]histogramCommand - var combined_entropy [2]float64 - var diff [2]float64 - var j uint - for j = 0; j < 2; j++ { - var last_histogram_ix uint = self.last_histogram_ix_[j] - combined_histo[j] = histograms[self.curr_histogram_ix_] - histogramAddHistogramCommand(&combined_histo[j], &histograms[last_histogram_ix]) - combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_) - diff[j] = combined_entropy[j] - entropy - last_entropy[j] - } - - if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = uint(byte(split.num_types)) - last_entropy[1] = last_entropy[0] - last_entropy[0] = entropy - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - histograms[self.last_histogram_ix_[0]] = combined_histo[1] - last_entropy[1] = last_entropy[0] - last_entropy[0] = combined_entropy[1] - self.num_blocks_++ - self.block_size_ = 0 - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - histograms[self.last_histogram_ix_[0]] = combined_histo[0] - last_entropy[0] = combined_entropy[0] - if split.num_types == 1 { - last_entropy[1] = last_entropy[0] - } - - self.block_size_ = 0 - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - } - - if is_final { - *self.histograms_size_ = split.num_types - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current histogram. When the current histogram - reaches the target size, decides on merging the block. */ -func blockSplitterAddSymbolCommand(self *blockSplitterCommand, symbol uint) { - histogramAddCommand(&self.histograms_[self.curr_histogram_ix_], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - blockSplitterFinishBlockCommand(self, false) /* is_final = */ - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock_distance.go b/vendor/github.com/andybalholm/brotli/metablock_distance.go deleted file mode 100644 index 95923127a6d..00000000000 --- a/vendor/github.com/andybalholm/brotli/metablock_distance.go +++ /dev/null @@ -1,162 +0,0 @@ -package brotli - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Greedy block splitter for one block category (literal, command or distance). - */ -type blockSplitterDistance struct { - alphabet_size_ uint - min_block_size_ uint - split_threshold_ float64 - num_blocks_ uint - split_ *blockSplit - histograms_ []histogramDistance - histograms_size_ *uint - target_block_size_ uint - block_size_ uint - curr_histogram_ix_ uint - last_histogram_ix_ [2]uint - last_entropy_ [2]float64 - merge_last_count_ uint -} - -func initBlockSplitterDistance(self *blockSplitterDistance, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramDistance, histograms_size *uint) { - var max_num_blocks uint = num_symbols/min_block_size + 1 - var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1) - /* We have to allocate one more histogram than the maximum number of block - types for the current histogram when the meta-block is too big. */ - self.alphabet_size_ = alphabet_size - - self.min_block_size_ = min_block_size - self.split_threshold_ = split_threshold - self.num_blocks_ = 0 - self.split_ = split - self.histograms_size_ = histograms_size - self.target_block_size_ = min_block_size - self.block_size_ = 0 - self.curr_histogram_ix_ = 0 - self.merge_last_count_ = 0 - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks) - self.split_.num_blocks = max_num_blocks - assert(*histograms == nil) - *histograms_size = max_num_types - *histograms = make([]histogramDistance, (*histograms_size)) - self.histograms_ = *histograms - - /* Clear only current histogram. */ - histogramClearDistance(&self.histograms_[0]) - - self.last_histogram_ix_[1] = 0 - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] -} - -/* Does either of three things: - (1) emits the current block with a new block type; - (2) emits the current block with the type of the second last block; - (3) merges the current block with the last block. */ -func blockSplitterFinishBlockDistance(self *blockSplitterDistance, is_final bool) { - var split *blockSplit = self.split_ - var last_entropy []float64 = self.last_entropy_[:] - var histograms []histogramDistance = self.histograms_ - self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_) - if self.num_blocks_ == 0 { - /* Create first block. */ - split.lengths[0] = uint32(self.block_size_) - - split.types[0] = 0 - last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_) - last_entropy[1] = last_entropy[0] - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - } else if self.block_size_ > 0 { - var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_) - var combined_histo [2]histogramDistance - var combined_entropy [2]float64 - var diff [2]float64 - var j uint - for j = 0; j < 2; j++ { - var last_histogram_ix uint = self.last_histogram_ix_[j] - combined_histo[j] = histograms[self.curr_histogram_ix_] - histogramAddHistogramDistance(&combined_histo[j], &histograms[last_histogram_ix]) - combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_) - diff[j] = combined_entropy[j] - entropy - last_entropy[j] - } - - if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = uint(byte(split.num_types)) - last_entropy[1] = last_entropy[0] - last_entropy[0] = entropy - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - histograms[self.last_histogram_ix_[0]] = combined_histo[1] - last_entropy[1] = last_entropy[0] - last_entropy[0] = combined_entropy[1] - self.num_blocks_++ - self.block_size_ = 0 - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - histograms[self.last_histogram_ix_[0]] = combined_histo[0] - last_entropy[0] = combined_entropy[0] - if split.num_types == 1 { - last_entropy[1] = last_entropy[0] - } - - self.block_size_ = 0 - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - } - - if is_final { - *self.histograms_size_ = split.num_types - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current histogram. When the current histogram - reaches the target size, decides on merging the block. */ -func blockSplitterAddSymbolDistance(self *blockSplitterDistance, symbol uint) { - histogramAddDistance(&self.histograms_[self.curr_histogram_ix_], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - blockSplitterFinishBlockDistance(self, false) /* is_final = */ - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock_literal.go b/vendor/github.com/andybalholm/brotli/metablock_literal.go deleted file mode 100644 index d7e8a7c9233..00000000000 --- a/vendor/github.com/andybalholm/brotli/metablock_literal.go +++ /dev/null @@ -1,162 +0,0 @@ -package brotli - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Greedy block splitter for one block category (literal, command or distance). - */ -type blockSplitterLiteral struct { - alphabet_size_ uint - min_block_size_ uint - split_threshold_ float64 - num_blocks_ uint - split_ *blockSplit - histograms_ []histogramLiteral - histograms_size_ *uint - target_block_size_ uint - block_size_ uint - curr_histogram_ix_ uint - last_histogram_ix_ [2]uint - last_entropy_ [2]float64 - merge_last_count_ uint -} - -func initBlockSplitterLiteral(self *blockSplitterLiteral, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramLiteral, histograms_size *uint) { - var max_num_blocks uint = num_symbols/min_block_size + 1 - var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1) - /* We have to allocate one more histogram than the maximum number of block - types for the current histogram when the meta-block is too big. */ - self.alphabet_size_ = alphabet_size - - self.min_block_size_ = min_block_size - self.split_threshold_ = split_threshold - self.num_blocks_ = 0 - self.split_ = split - self.histograms_size_ = histograms_size - self.target_block_size_ = min_block_size - self.block_size_ = 0 - self.curr_histogram_ix_ = 0 - self.merge_last_count_ = 0 - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks) - self.split_.num_blocks = max_num_blocks - assert(*histograms == nil) - *histograms_size = max_num_types - *histograms = make([]histogramLiteral, (*histograms_size)) - self.histograms_ = *histograms - - /* Clear only current histogram. */ - histogramClearLiteral(&self.histograms_[0]) - - self.last_histogram_ix_[1] = 0 - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] -} - -/* Does either of three things: - (1) emits the current block with a new block type; - (2) emits the current block with the type of the second last block; - (3) merges the current block with the last block. */ -func blockSplitterFinishBlockLiteral(self *blockSplitterLiteral, is_final bool) { - var split *blockSplit = self.split_ - var last_entropy []float64 = self.last_entropy_[:] - var histograms []histogramLiteral = self.histograms_ - self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_) - if self.num_blocks_ == 0 { - /* Create first block. */ - split.lengths[0] = uint32(self.block_size_) - - split.types[0] = 0 - last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_) - last_entropy[1] = last_entropy[0] - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - } else if self.block_size_ > 0 { - var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_) - var combined_histo [2]histogramLiteral - var combined_entropy [2]float64 - var diff [2]float64 - var j uint - for j = 0; j < 2; j++ { - var last_histogram_ix uint = self.last_histogram_ix_[j] - combined_histo[j] = histograms[self.curr_histogram_ix_] - histogramAddHistogramLiteral(&combined_histo[j], &histograms[last_histogram_ix]) - combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_) - diff[j] = combined_entropy[j] - entropy - last_entropy[j] - } - - if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = uint(byte(split.num_types)) - last_entropy[1] = last_entropy[0] - last_entropy[0] = entropy - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - histograms[self.last_histogram_ix_[0]] = combined_histo[1] - last_entropy[1] = last_entropy[0] - last_entropy[0] = combined_entropy[1] - self.num_blocks_++ - self.block_size_ = 0 - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - histograms[self.last_histogram_ix_[0]] = combined_histo[0] - last_entropy[0] = combined_entropy[0] - if split.num_types == 1 { - last_entropy[1] = last_entropy[0] - } - - self.block_size_ = 0 - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - } - - if is_final { - *self.histograms_size_ = split.num_types - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current histogram. When the current histogram - reaches the target size, decides on merging the block. */ -func blockSplitterAddSymbolLiteral(self *blockSplitterLiteral, symbol uint) { - histogramAddLiteral(&self.histograms_[self.curr_histogram_ix_], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - blockSplitterFinishBlockLiteral(self, false) /* is_final = */ - } -} diff --git a/vendor/github.com/andybalholm/brotli/params.go b/vendor/github.com/andybalholm/brotli/params.go deleted file mode 100644 index 0a4c6875212..00000000000 --- a/vendor/github.com/andybalholm/brotli/params.go +++ /dev/null @@ -1,37 +0,0 @@ -package brotli - -/* Copyright 2017 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Parameters for the Brotli encoder with chosen quality levels. */ -type hasherParams struct { - type_ int - bucket_bits int - block_bits int - hash_len int - num_last_distances_to_check int -} - -type distanceParams struct { - distance_postfix_bits uint32 - num_direct_distance_codes uint32 - alphabet_size uint32 - max_distance uint -} - -/* Encoding parameters */ -type encoderParams struct { - mode int - quality int - lgwin uint - lgblock int - size_hint uint - disable_literal_context_modeling bool - large_window bool - hasher hasherParams - dist distanceParams - dictionary encoderDictionary -} diff --git a/vendor/github.com/andybalholm/brotli/platform.go b/vendor/github.com/andybalholm/brotli/platform.go deleted file mode 100644 index 4ebfb1528ba..00000000000 --- a/vendor/github.com/andybalholm/brotli/platform.go +++ /dev/null @@ -1,103 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func brotli_min_double(a float64, b float64) float64 { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_double(a float64, b float64) float64 { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_float(a float32, b float32) float32 { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_float(a float32, b float32) float32 { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_int(a int, b int) int { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_int(a int, b int) int { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_size_t(a uint, b uint) uint { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_size_t(a uint, b uint) uint { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_uint32_t(a uint32, b uint32) uint32 { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_uint32_t(a uint32, b uint32) uint32 { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_uint8_t(a byte, b byte) byte { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_uint8_t(a byte, b byte) byte { - if a > b { - return a - } else { - return b - } -} diff --git a/vendor/github.com/andybalholm/brotli/prefix.go b/vendor/github.com/andybalholm/brotli/prefix.go deleted file mode 100644 index 484df0d61ec..00000000000 --- a/vendor/github.com/andybalholm/brotli/prefix.go +++ /dev/null @@ -1,30 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Functions for encoding of integers into prefix codes the amount of extra - bits, and the actual values of the extra bits. */ - -/* Here distance_code is an intermediate code, i.e. one of the special codes or - the actual distance increased by BROTLI_NUM_DISTANCE_SHORT_CODES - 1. */ -func prefixEncodeCopyDistance(distance_code uint, num_direct_codes uint, postfix_bits uint, code *uint16, extra_bits *uint32) { - if distance_code < numDistanceShortCodes+num_direct_codes { - *code = uint16(distance_code) - *extra_bits = 0 - return - } else { - var dist uint = (uint(1) << (postfix_bits + 2)) + (distance_code - numDistanceShortCodes - num_direct_codes) - var bucket uint = uint(log2FloorNonZero(dist) - 1) - var postfix_mask uint = (1 << postfix_bits) - 1 - var postfix uint = dist & postfix_mask - var prefix uint = (dist >> bucket) & 1 - var offset uint = (2 + prefix) << bucket - var nbits uint = bucket - postfix_bits - *code = uint16(nbits<<10 | (numDistanceShortCodes + num_direct_codes + ((2*(nbits-1) + prefix) << postfix_bits) + postfix)) - *extra_bits = uint32((dist - offset) >> postfix_bits) - } -} diff --git a/vendor/github.com/andybalholm/brotli/prefix_dec.go b/vendor/github.com/andybalholm/brotli/prefix_dec.go deleted file mode 100644 index 183f0d53fed..00000000000 --- a/vendor/github.com/andybalholm/brotli/prefix_dec.go +++ /dev/null @@ -1,723 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -type cmdLutElement struct { - insert_len_extra_bits byte - copy_len_extra_bits byte - distance_code int8 - context byte - insert_len_offset uint16 - copy_len_offset uint16 -} - -var kCmdLut = [numCommandSymbols]cmdLutElement{ - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0000, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0000, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0000, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0001, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0001, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0001, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0002, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0002, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0002, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0003, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0003, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0003, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0004, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0004, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0004, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0005, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0005, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0005, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0009}, - cmdLutElement{0x01, 0x00, 0, 0x00, 0x0006, 0x0002}, - cmdLutElement{0x01, 0x00, 0, 0x01, 0x0006, 0x0003}, - cmdLutElement{0x01, 0x00, 0, 0x02, 0x0006, 0x0004}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0005}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0006}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0007}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0008}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0009}, - cmdLutElement{0x01, 0x00, 0, 0x00, 0x0008, 0x0002}, - cmdLutElement{0x01, 0x00, 0, 0x01, 0x0008, 0x0003}, - cmdLutElement{0x01, 0x00, 0, 0x02, 0x0008, 0x0004}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0005}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0006}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0007}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0008}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0009}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0000, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0000, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0000, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0000, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0000, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0000, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0000, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0000, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0001, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0001, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0001, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0001, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0001, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0001, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0001, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0001, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0002, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0002, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0002, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0002, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0002, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0002, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0002, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0002, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0003, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0003, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0003, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0003, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0003, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0003, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0003, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0003, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0004, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0004, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0004, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0004, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0004, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0004, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0004, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0004, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0005, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0005, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0005, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0005, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0005, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0005, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0005, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0005, 0x0036}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0006, 0x000a}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0006, 0x000c}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0006, 0x000e}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0006, 0x0012}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0006, 0x0016}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0006, 0x001e}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0006, 0x0026}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0006, 0x0036}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0008, 0x000a}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0008, 0x000c}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0008, 0x000e}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0008, 0x0012}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0008, 0x0016}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0008, 0x001e}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0008, 0x0026}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0008, 0x0036}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0000, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0000, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0000, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0001, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0001, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0001, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0002, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0002, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0002, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0003, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0003, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0003, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0004, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0004, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0004, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0005, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0005, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0005, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0009}, - cmdLutElement{0x01, 0x00, -1, 0x00, 0x0006, 0x0002}, - cmdLutElement{0x01, 0x00, -1, 0x01, 0x0006, 0x0003}, - cmdLutElement{0x01, 0x00, -1, 0x02, 0x0006, 0x0004}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0005}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0006}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0007}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0008}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0009}, - cmdLutElement{0x01, 0x00, -1, 0x00, 0x0008, 0x0002}, - cmdLutElement{0x01, 0x00, -1, 0x01, 0x0008, 0x0003}, - cmdLutElement{0x01, 0x00, -1, 0x02, 0x0008, 0x0004}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0005}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0006}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0007}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0008}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0009}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0000, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0000, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0000, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0000, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0000, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0000, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0000, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0000, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0001, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0001, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0001, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0001, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0001, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0001, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0001, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0001, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0002, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0002, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0002, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0002, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0002, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0002, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0002, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0002, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0003, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0003, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0003, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0003, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0003, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0003, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0003, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0003, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0004, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0004, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0004, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0004, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0004, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0004, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0004, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0004, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0005, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0005, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0005, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0005, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0005, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0005, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0005, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0005, 0x0036}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0006, 0x000a}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0006, 0x000c}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0006, 0x000e}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0006, 0x0012}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0006, 0x0016}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0006, 0x001e}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0006, 0x0026}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0006, 0x0036}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0008, 0x000a}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0008, 0x000c}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0008, 0x000e}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0008, 0x0012}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0008, 0x0016}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0008, 0x001e}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0008, 0x0026}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0008, 0x0036}, - cmdLutElement{0x02, 0x00, -1, 0x00, 0x000a, 0x0002}, - cmdLutElement{0x02, 0x00, -1, 0x01, 0x000a, 0x0003}, - cmdLutElement{0x02, 0x00, -1, 0x02, 0x000a, 0x0004}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0005}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0006}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0007}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0008}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0009}, - cmdLutElement{0x02, 0x00, -1, 0x00, 0x000e, 0x0002}, - cmdLutElement{0x02, 0x00, -1, 0x01, 0x000e, 0x0003}, - cmdLutElement{0x02, 0x00, -1, 0x02, 0x000e, 0x0004}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0005}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0006}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0007}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0008}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0009}, - cmdLutElement{0x03, 0x00, -1, 0x00, 0x0012, 0x0002}, - cmdLutElement{0x03, 0x00, -1, 0x01, 0x0012, 0x0003}, - cmdLutElement{0x03, 0x00, -1, 0x02, 0x0012, 0x0004}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0005}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0006}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0007}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0008}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0009}, - cmdLutElement{0x03, 0x00, -1, 0x00, 0x001a, 0x0002}, - cmdLutElement{0x03, 0x00, -1, 0x01, 0x001a, 0x0003}, - cmdLutElement{0x03, 0x00, -1, 0x02, 0x001a, 0x0004}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0005}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0006}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0007}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0008}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0009}, - cmdLutElement{0x04, 0x00, -1, 0x00, 0x0022, 0x0002}, - cmdLutElement{0x04, 0x00, -1, 0x01, 0x0022, 0x0003}, - cmdLutElement{0x04, 0x00, -1, 0x02, 0x0022, 0x0004}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0005}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0006}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0007}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0008}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0009}, - cmdLutElement{0x04, 0x00, -1, 0x00, 0x0032, 0x0002}, - cmdLutElement{0x04, 0x00, -1, 0x01, 0x0032, 0x0003}, - cmdLutElement{0x04, 0x00, -1, 0x02, 0x0032, 0x0004}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0005}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0006}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0007}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0008}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0009}, - cmdLutElement{0x05, 0x00, -1, 0x00, 0x0042, 0x0002}, - cmdLutElement{0x05, 0x00, -1, 0x01, 0x0042, 0x0003}, - cmdLutElement{0x05, 0x00, -1, 0x02, 0x0042, 0x0004}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0005}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0006}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0007}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0008}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0009}, - cmdLutElement{0x05, 0x00, -1, 0x00, 0x0062, 0x0002}, - cmdLutElement{0x05, 0x00, -1, 0x01, 0x0062, 0x0003}, - cmdLutElement{0x05, 0x00, -1, 0x02, 0x0062, 0x0004}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0005}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0006}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0007}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0008}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0009}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000a, 0x000a}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000a, 0x000c}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000a, 0x000e}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000a, 0x0012}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000a, 0x0016}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000a, 0x001e}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000a, 0x0026}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000a, 0x0036}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000e, 0x000a}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000e, 0x000c}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000e, 0x000e}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000e, 0x0012}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000e, 0x0016}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000e, 0x001e}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000e, 0x0026}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000e, 0x0036}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x0012, 0x000a}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x0012, 0x000c}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x0012, 0x000e}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x0012, 0x0012}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x0012, 0x0016}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x0012, 0x001e}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x0012, 0x0026}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x0012, 0x0036}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x001a, 0x000a}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x001a, 0x000c}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x001a, 0x000e}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x001a, 0x0012}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x001a, 0x0016}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x001a, 0x001e}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x001a, 0x0026}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x001a, 0x0036}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0022, 0x000a}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0022, 0x000c}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0022, 0x000e}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0022, 0x0012}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0022, 0x0016}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0022, 0x001e}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0022, 0x0026}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0022, 0x0036}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0032, 0x000a}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0032, 0x000c}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0032, 0x000e}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0032, 0x0012}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0032, 0x0016}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0032, 0x001e}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0032, 0x0026}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0032, 0x0036}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0042, 0x000a}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0042, 0x000c}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0042, 0x000e}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0042, 0x0012}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0042, 0x0016}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0042, 0x001e}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0042, 0x0026}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0042, 0x0036}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0062, 0x000a}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0062, 0x000c}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0062, 0x000e}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0062, 0x0012}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0062, 0x0016}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0062, 0x001e}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0062, 0x0026}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0062, 0x0036}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0000, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0000, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0000, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0000, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0000, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0000, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0000, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0000, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0001, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0001, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0001, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0001, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0001, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0001, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0001, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0001, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0002, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0002, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0002, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0002, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0002, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0002, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0002, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0002, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0003, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0003, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0003, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0003, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0003, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0003, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0003, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0003, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0004, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0004, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0004, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0004, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0004, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0004, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0004, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0004, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0005, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0005, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0005, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0005, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0005, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0005, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0005, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0005, 0x0846}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0006, 0x0046}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0006, 0x0066}, - cmdLutElement{0x01, 0x06, -1, 0x03, 0x0006, 0x0086}, - cmdLutElement{0x01, 0x07, -1, 0x03, 0x0006, 0x00c6}, - cmdLutElement{0x01, 0x08, -1, 0x03, 0x0006, 0x0146}, - cmdLutElement{0x01, 0x09, -1, 0x03, 0x0006, 0x0246}, - cmdLutElement{0x01, 0x0a, -1, 0x03, 0x0006, 0x0446}, - cmdLutElement{0x01, 0x18, -1, 0x03, 0x0006, 0x0846}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0008, 0x0046}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0008, 0x0066}, - cmdLutElement{0x01, 0x06, -1, 0x03, 0x0008, 0x0086}, - cmdLutElement{0x01, 0x07, -1, 0x03, 0x0008, 0x00c6}, - cmdLutElement{0x01, 0x08, -1, 0x03, 0x0008, 0x0146}, - cmdLutElement{0x01, 0x09, -1, 0x03, 0x0008, 0x0246}, - cmdLutElement{0x01, 0x0a, -1, 0x03, 0x0008, 0x0446}, - cmdLutElement{0x01, 0x18, -1, 0x03, 0x0008, 0x0846}, - cmdLutElement{0x06, 0x00, -1, 0x00, 0x0082, 0x0002}, - cmdLutElement{0x06, 0x00, -1, 0x01, 0x0082, 0x0003}, - cmdLutElement{0x06, 0x00, -1, 0x02, 0x0082, 0x0004}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0005}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0006}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0007}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0008}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0009}, - cmdLutElement{0x07, 0x00, -1, 0x00, 0x00c2, 0x0002}, - cmdLutElement{0x07, 0x00, -1, 0x01, 0x00c2, 0x0003}, - cmdLutElement{0x07, 0x00, -1, 0x02, 0x00c2, 0x0004}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0005}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0006}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0007}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0008}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0009}, - cmdLutElement{0x08, 0x00, -1, 0x00, 0x0142, 0x0002}, - cmdLutElement{0x08, 0x00, -1, 0x01, 0x0142, 0x0003}, - cmdLutElement{0x08, 0x00, -1, 0x02, 0x0142, 0x0004}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0005}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0006}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0007}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0008}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0009}, - cmdLutElement{0x09, 0x00, -1, 0x00, 0x0242, 0x0002}, - cmdLutElement{0x09, 0x00, -1, 0x01, 0x0242, 0x0003}, - cmdLutElement{0x09, 0x00, -1, 0x02, 0x0242, 0x0004}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0005}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0006}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0007}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0008}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0009}, - cmdLutElement{0x0a, 0x00, -1, 0x00, 0x0442, 0x0002}, - cmdLutElement{0x0a, 0x00, -1, 0x01, 0x0442, 0x0003}, - cmdLutElement{0x0a, 0x00, -1, 0x02, 0x0442, 0x0004}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0005}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0006}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0007}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0008}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0009}, - cmdLutElement{0x0c, 0x00, -1, 0x00, 0x0842, 0x0002}, - cmdLutElement{0x0c, 0x00, -1, 0x01, 0x0842, 0x0003}, - cmdLutElement{0x0c, 0x00, -1, 0x02, 0x0842, 0x0004}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0005}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0006}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0007}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0008}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0009}, - cmdLutElement{0x0e, 0x00, -1, 0x00, 0x1842, 0x0002}, - cmdLutElement{0x0e, 0x00, -1, 0x01, 0x1842, 0x0003}, - cmdLutElement{0x0e, 0x00, -1, 0x02, 0x1842, 0x0004}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0005}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0006}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0007}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0008}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0009}, - cmdLutElement{0x18, 0x00, -1, 0x00, 0x5842, 0x0002}, - cmdLutElement{0x18, 0x00, -1, 0x01, 0x5842, 0x0003}, - cmdLutElement{0x18, 0x00, -1, 0x02, 0x5842, 0x0004}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0005}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0006}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0007}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0008}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0009}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000a, 0x0046}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000a, 0x0066}, - cmdLutElement{0x02, 0x06, -1, 0x03, 0x000a, 0x0086}, - cmdLutElement{0x02, 0x07, -1, 0x03, 0x000a, 0x00c6}, - cmdLutElement{0x02, 0x08, -1, 0x03, 0x000a, 0x0146}, - cmdLutElement{0x02, 0x09, -1, 0x03, 0x000a, 0x0246}, - cmdLutElement{0x02, 0x0a, -1, 0x03, 0x000a, 0x0446}, - cmdLutElement{0x02, 0x18, -1, 0x03, 0x000a, 0x0846}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000e, 0x0046}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000e, 0x0066}, - cmdLutElement{0x02, 0x06, -1, 0x03, 0x000e, 0x0086}, - cmdLutElement{0x02, 0x07, -1, 0x03, 0x000e, 0x00c6}, - cmdLutElement{0x02, 0x08, -1, 0x03, 0x000e, 0x0146}, - cmdLutElement{0x02, 0x09, -1, 0x03, 0x000e, 0x0246}, - cmdLutElement{0x02, 0x0a, -1, 0x03, 0x000e, 0x0446}, - cmdLutElement{0x02, 0x18, -1, 0x03, 0x000e, 0x0846}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x0012, 0x0046}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x0012, 0x0066}, - cmdLutElement{0x03, 0x06, -1, 0x03, 0x0012, 0x0086}, - cmdLutElement{0x03, 0x07, -1, 0x03, 0x0012, 0x00c6}, - cmdLutElement{0x03, 0x08, -1, 0x03, 0x0012, 0x0146}, - cmdLutElement{0x03, 0x09, -1, 0x03, 0x0012, 0x0246}, - cmdLutElement{0x03, 0x0a, -1, 0x03, 0x0012, 0x0446}, - cmdLutElement{0x03, 0x18, -1, 0x03, 0x0012, 0x0846}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x001a, 0x0046}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x001a, 0x0066}, - cmdLutElement{0x03, 0x06, -1, 0x03, 0x001a, 0x0086}, - cmdLutElement{0x03, 0x07, -1, 0x03, 0x001a, 0x00c6}, - cmdLutElement{0x03, 0x08, -1, 0x03, 0x001a, 0x0146}, - cmdLutElement{0x03, 0x09, -1, 0x03, 0x001a, 0x0246}, - cmdLutElement{0x03, 0x0a, -1, 0x03, 0x001a, 0x0446}, - cmdLutElement{0x03, 0x18, -1, 0x03, 0x001a, 0x0846}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0022, 0x0046}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0022, 0x0066}, - cmdLutElement{0x04, 0x06, -1, 0x03, 0x0022, 0x0086}, - cmdLutElement{0x04, 0x07, -1, 0x03, 0x0022, 0x00c6}, - cmdLutElement{0x04, 0x08, -1, 0x03, 0x0022, 0x0146}, - cmdLutElement{0x04, 0x09, -1, 0x03, 0x0022, 0x0246}, - cmdLutElement{0x04, 0x0a, -1, 0x03, 0x0022, 0x0446}, - cmdLutElement{0x04, 0x18, -1, 0x03, 0x0022, 0x0846}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0032, 0x0046}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0032, 0x0066}, - cmdLutElement{0x04, 0x06, -1, 0x03, 0x0032, 0x0086}, - cmdLutElement{0x04, 0x07, -1, 0x03, 0x0032, 0x00c6}, - cmdLutElement{0x04, 0x08, -1, 0x03, 0x0032, 0x0146}, - cmdLutElement{0x04, 0x09, -1, 0x03, 0x0032, 0x0246}, - cmdLutElement{0x04, 0x0a, -1, 0x03, 0x0032, 0x0446}, - cmdLutElement{0x04, 0x18, -1, 0x03, 0x0032, 0x0846}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0042, 0x0046}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0042, 0x0066}, - cmdLutElement{0x05, 0x06, -1, 0x03, 0x0042, 0x0086}, - cmdLutElement{0x05, 0x07, -1, 0x03, 0x0042, 0x00c6}, - cmdLutElement{0x05, 0x08, -1, 0x03, 0x0042, 0x0146}, - cmdLutElement{0x05, 0x09, -1, 0x03, 0x0042, 0x0246}, - cmdLutElement{0x05, 0x0a, -1, 0x03, 0x0042, 0x0446}, - cmdLutElement{0x05, 0x18, -1, 0x03, 0x0042, 0x0846}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0062, 0x0046}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0062, 0x0066}, - cmdLutElement{0x05, 0x06, -1, 0x03, 0x0062, 0x0086}, - cmdLutElement{0x05, 0x07, -1, 0x03, 0x0062, 0x00c6}, - cmdLutElement{0x05, 0x08, -1, 0x03, 0x0062, 0x0146}, - cmdLutElement{0x05, 0x09, -1, 0x03, 0x0062, 0x0246}, - cmdLutElement{0x05, 0x0a, -1, 0x03, 0x0062, 0x0446}, - cmdLutElement{0x05, 0x18, -1, 0x03, 0x0062, 0x0846}, - cmdLutElement{0x06, 0x01, -1, 0x03, 0x0082, 0x000a}, - cmdLutElement{0x06, 0x01, -1, 0x03, 0x0082, 0x000c}, - cmdLutElement{0x06, 0x02, -1, 0x03, 0x0082, 0x000e}, - cmdLutElement{0x06, 0x02, -1, 0x03, 0x0082, 0x0012}, - cmdLutElement{0x06, 0x03, -1, 0x03, 0x0082, 0x0016}, - cmdLutElement{0x06, 0x03, -1, 0x03, 0x0082, 0x001e}, - cmdLutElement{0x06, 0x04, -1, 0x03, 0x0082, 0x0026}, - cmdLutElement{0x06, 0x04, -1, 0x03, 0x0082, 0x0036}, - cmdLutElement{0x07, 0x01, -1, 0x03, 0x00c2, 0x000a}, - cmdLutElement{0x07, 0x01, -1, 0x03, 0x00c2, 0x000c}, - cmdLutElement{0x07, 0x02, -1, 0x03, 0x00c2, 0x000e}, - cmdLutElement{0x07, 0x02, -1, 0x03, 0x00c2, 0x0012}, - cmdLutElement{0x07, 0x03, -1, 0x03, 0x00c2, 0x0016}, - cmdLutElement{0x07, 0x03, -1, 0x03, 0x00c2, 0x001e}, - cmdLutElement{0x07, 0x04, -1, 0x03, 0x00c2, 0x0026}, - cmdLutElement{0x07, 0x04, -1, 0x03, 0x00c2, 0x0036}, - cmdLutElement{0x08, 0x01, -1, 0x03, 0x0142, 0x000a}, - cmdLutElement{0x08, 0x01, -1, 0x03, 0x0142, 0x000c}, - cmdLutElement{0x08, 0x02, -1, 0x03, 0x0142, 0x000e}, - cmdLutElement{0x08, 0x02, -1, 0x03, 0x0142, 0x0012}, - cmdLutElement{0x08, 0x03, -1, 0x03, 0x0142, 0x0016}, - cmdLutElement{0x08, 0x03, -1, 0x03, 0x0142, 0x001e}, - cmdLutElement{0x08, 0x04, -1, 0x03, 0x0142, 0x0026}, - cmdLutElement{0x08, 0x04, -1, 0x03, 0x0142, 0x0036}, - cmdLutElement{0x09, 0x01, -1, 0x03, 0x0242, 0x000a}, - cmdLutElement{0x09, 0x01, -1, 0x03, 0x0242, 0x000c}, - cmdLutElement{0x09, 0x02, -1, 0x03, 0x0242, 0x000e}, - cmdLutElement{0x09, 0x02, -1, 0x03, 0x0242, 0x0012}, - cmdLutElement{0x09, 0x03, -1, 0x03, 0x0242, 0x0016}, - cmdLutElement{0x09, 0x03, -1, 0x03, 0x0242, 0x001e}, - cmdLutElement{0x09, 0x04, -1, 0x03, 0x0242, 0x0026}, - cmdLutElement{0x09, 0x04, -1, 0x03, 0x0242, 0x0036}, - cmdLutElement{0x0a, 0x01, -1, 0x03, 0x0442, 0x000a}, - cmdLutElement{0x0a, 0x01, -1, 0x03, 0x0442, 0x000c}, - cmdLutElement{0x0a, 0x02, -1, 0x03, 0x0442, 0x000e}, - cmdLutElement{0x0a, 0x02, -1, 0x03, 0x0442, 0x0012}, - cmdLutElement{0x0a, 0x03, -1, 0x03, 0x0442, 0x0016}, - cmdLutElement{0x0a, 0x03, -1, 0x03, 0x0442, 0x001e}, - cmdLutElement{0x0a, 0x04, -1, 0x03, 0x0442, 0x0026}, - cmdLutElement{0x0a, 0x04, -1, 0x03, 0x0442, 0x0036}, - cmdLutElement{0x0c, 0x01, -1, 0x03, 0x0842, 0x000a}, - cmdLutElement{0x0c, 0x01, -1, 0x03, 0x0842, 0x000c}, - cmdLutElement{0x0c, 0x02, -1, 0x03, 0x0842, 0x000e}, - cmdLutElement{0x0c, 0x02, -1, 0x03, 0x0842, 0x0012}, - cmdLutElement{0x0c, 0x03, -1, 0x03, 0x0842, 0x0016}, - cmdLutElement{0x0c, 0x03, -1, 0x03, 0x0842, 0x001e}, - cmdLutElement{0x0c, 0x04, -1, 0x03, 0x0842, 0x0026}, - cmdLutElement{0x0c, 0x04, -1, 0x03, 0x0842, 0x0036}, - cmdLutElement{0x0e, 0x01, -1, 0x03, 0x1842, 0x000a}, - cmdLutElement{0x0e, 0x01, -1, 0x03, 0x1842, 0x000c}, - cmdLutElement{0x0e, 0x02, -1, 0x03, 0x1842, 0x000e}, - cmdLutElement{0x0e, 0x02, -1, 0x03, 0x1842, 0x0012}, - cmdLutElement{0x0e, 0x03, -1, 0x03, 0x1842, 0x0016}, - cmdLutElement{0x0e, 0x03, -1, 0x03, 0x1842, 0x001e}, - cmdLutElement{0x0e, 0x04, -1, 0x03, 0x1842, 0x0026}, - cmdLutElement{0x0e, 0x04, -1, 0x03, 0x1842, 0x0036}, - cmdLutElement{0x18, 0x01, -1, 0x03, 0x5842, 0x000a}, - cmdLutElement{0x18, 0x01, -1, 0x03, 0x5842, 0x000c}, - cmdLutElement{0x18, 0x02, -1, 0x03, 0x5842, 0x000e}, - cmdLutElement{0x18, 0x02, -1, 0x03, 0x5842, 0x0012}, - cmdLutElement{0x18, 0x03, -1, 0x03, 0x5842, 0x0016}, - cmdLutElement{0x18, 0x03, -1, 0x03, 0x5842, 0x001e}, - cmdLutElement{0x18, 0x04, -1, 0x03, 0x5842, 0x0026}, - cmdLutElement{0x18, 0x04, -1, 0x03, 0x5842, 0x0036}, - cmdLutElement{0x06, 0x05, -1, 0x03, 0x0082, 0x0046}, - cmdLutElement{0x06, 0x05, -1, 0x03, 0x0082, 0x0066}, - cmdLutElement{0x06, 0x06, -1, 0x03, 0x0082, 0x0086}, - cmdLutElement{0x06, 0x07, -1, 0x03, 0x0082, 0x00c6}, - cmdLutElement{0x06, 0x08, -1, 0x03, 0x0082, 0x0146}, - cmdLutElement{0x06, 0x09, -1, 0x03, 0x0082, 0x0246}, - cmdLutElement{0x06, 0x0a, -1, 0x03, 0x0082, 0x0446}, - cmdLutElement{0x06, 0x18, -1, 0x03, 0x0082, 0x0846}, - cmdLutElement{0x07, 0x05, -1, 0x03, 0x00c2, 0x0046}, - cmdLutElement{0x07, 0x05, -1, 0x03, 0x00c2, 0x0066}, - cmdLutElement{0x07, 0x06, -1, 0x03, 0x00c2, 0x0086}, - cmdLutElement{0x07, 0x07, -1, 0x03, 0x00c2, 0x00c6}, - cmdLutElement{0x07, 0x08, -1, 0x03, 0x00c2, 0x0146}, - cmdLutElement{0x07, 0x09, -1, 0x03, 0x00c2, 0x0246}, - cmdLutElement{0x07, 0x0a, -1, 0x03, 0x00c2, 0x0446}, - cmdLutElement{0x07, 0x18, -1, 0x03, 0x00c2, 0x0846}, - cmdLutElement{0x08, 0x05, -1, 0x03, 0x0142, 0x0046}, - cmdLutElement{0x08, 0x05, -1, 0x03, 0x0142, 0x0066}, - cmdLutElement{0x08, 0x06, -1, 0x03, 0x0142, 0x0086}, - cmdLutElement{0x08, 0x07, -1, 0x03, 0x0142, 0x00c6}, - cmdLutElement{0x08, 0x08, -1, 0x03, 0x0142, 0x0146}, - cmdLutElement{0x08, 0x09, -1, 0x03, 0x0142, 0x0246}, - cmdLutElement{0x08, 0x0a, -1, 0x03, 0x0142, 0x0446}, - cmdLutElement{0x08, 0x18, -1, 0x03, 0x0142, 0x0846}, - cmdLutElement{0x09, 0x05, -1, 0x03, 0x0242, 0x0046}, - cmdLutElement{0x09, 0x05, -1, 0x03, 0x0242, 0x0066}, - cmdLutElement{0x09, 0x06, -1, 0x03, 0x0242, 0x0086}, - cmdLutElement{0x09, 0x07, -1, 0x03, 0x0242, 0x00c6}, - cmdLutElement{0x09, 0x08, -1, 0x03, 0x0242, 0x0146}, - cmdLutElement{0x09, 0x09, -1, 0x03, 0x0242, 0x0246}, - cmdLutElement{0x09, 0x0a, -1, 0x03, 0x0242, 0x0446}, - cmdLutElement{0x09, 0x18, -1, 0x03, 0x0242, 0x0846}, - cmdLutElement{0x0a, 0x05, -1, 0x03, 0x0442, 0x0046}, - cmdLutElement{0x0a, 0x05, -1, 0x03, 0x0442, 0x0066}, - cmdLutElement{0x0a, 0x06, -1, 0x03, 0x0442, 0x0086}, - cmdLutElement{0x0a, 0x07, -1, 0x03, 0x0442, 0x00c6}, - cmdLutElement{0x0a, 0x08, -1, 0x03, 0x0442, 0x0146}, - cmdLutElement{0x0a, 0x09, -1, 0x03, 0x0442, 0x0246}, - cmdLutElement{0x0a, 0x0a, -1, 0x03, 0x0442, 0x0446}, - cmdLutElement{0x0a, 0x18, -1, 0x03, 0x0442, 0x0846}, - cmdLutElement{0x0c, 0x05, -1, 0x03, 0x0842, 0x0046}, - cmdLutElement{0x0c, 0x05, -1, 0x03, 0x0842, 0x0066}, - cmdLutElement{0x0c, 0x06, -1, 0x03, 0x0842, 0x0086}, - cmdLutElement{0x0c, 0x07, -1, 0x03, 0x0842, 0x00c6}, - cmdLutElement{0x0c, 0x08, -1, 0x03, 0x0842, 0x0146}, - cmdLutElement{0x0c, 0x09, -1, 0x03, 0x0842, 0x0246}, - cmdLutElement{0x0c, 0x0a, -1, 0x03, 0x0842, 0x0446}, - cmdLutElement{0x0c, 0x18, -1, 0x03, 0x0842, 0x0846}, - cmdLutElement{0x0e, 0x05, -1, 0x03, 0x1842, 0x0046}, - cmdLutElement{0x0e, 0x05, -1, 0x03, 0x1842, 0x0066}, - cmdLutElement{0x0e, 0x06, -1, 0x03, 0x1842, 0x0086}, - cmdLutElement{0x0e, 0x07, -1, 0x03, 0x1842, 0x00c6}, - cmdLutElement{0x0e, 0x08, -1, 0x03, 0x1842, 0x0146}, - cmdLutElement{0x0e, 0x09, -1, 0x03, 0x1842, 0x0246}, - cmdLutElement{0x0e, 0x0a, -1, 0x03, 0x1842, 0x0446}, - cmdLutElement{0x0e, 0x18, -1, 0x03, 0x1842, 0x0846}, - cmdLutElement{0x18, 0x05, -1, 0x03, 0x5842, 0x0046}, - cmdLutElement{0x18, 0x05, -1, 0x03, 0x5842, 0x0066}, - cmdLutElement{0x18, 0x06, -1, 0x03, 0x5842, 0x0086}, - cmdLutElement{0x18, 0x07, -1, 0x03, 0x5842, 0x00c6}, - cmdLutElement{0x18, 0x08, -1, 0x03, 0x5842, 0x0146}, - cmdLutElement{0x18, 0x09, -1, 0x03, 0x5842, 0x0246}, - cmdLutElement{0x18, 0x0a, -1, 0x03, 0x5842, 0x0446}, - cmdLutElement{0x18, 0x18, -1, 0x03, 0x5842, 0x0846}, -} diff --git a/vendor/github.com/andybalholm/brotli/quality.go b/vendor/github.com/andybalholm/brotli/quality.go deleted file mode 100644 index 49709a38239..00000000000 --- a/vendor/github.com/andybalholm/brotli/quality.go +++ /dev/null @@ -1,196 +0,0 @@ -package brotli - -const fastOnePassCompressionQuality = 0 - -const fastTwoPassCompressionQuality = 1 - -const zopflificationQuality = 10 - -const hqZopflificationQuality = 11 - -const maxQualityForStaticEntropyCodes = 2 - -const minQualityForBlockSplit = 4 - -const minQualityForNonzeroDistanceParams = 4 - -const minQualityForOptimizeHistograms = 4 - -const minQualityForExtensiveReferenceSearch = 5 - -const minQualityForContextModeling = 5 - -const minQualityForHqContextModeling = 7 - -const minQualityForHqBlockSplitting = 10 - -/* For quality below MIN_QUALITY_FOR_BLOCK_SPLIT there is no block splitting, - so we buffer at most this much literals and commands. */ -const maxNumDelayedSymbols = 0x2FFF - -/* Returns hash-table size for quality levels 0 and 1. */ -func maxHashTableSize(quality int) uint { - if quality == fastOnePassCompressionQuality { - return 1 << 15 - } else { - return 1 << 17 - } -} - -/* The maximum length for which the zopflification uses distinct distances. */ -const maxZopfliLenQuality10 = 150 - -const maxZopfliLenQuality11 = 325 - -/* Do not thoroughly search when a long copy is found. */ -const longCopyQuickStep = 16384 - -func maxZopfliLen(params *encoderParams) uint { - if params.quality <= 10 { - return maxZopfliLenQuality10 - } else { - return maxZopfliLenQuality11 - } -} - -/* Number of best candidates to evaluate to expand Zopfli chain. */ -func maxZopfliCandidates(params *encoderParams) uint { - if params.quality <= 10 { - return 1 - } else { - return 5 - } -} - -func sanitizeParams(params *encoderParams) { - params.quality = brotli_min_int(maxQuality, brotli_max_int(minQuality, params.quality)) - if params.quality <= maxQualityForStaticEntropyCodes { - params.large_window = false - } - - if params.lgwin < minWindowBits { - params.lgwin = minWindowBits - } else { - var max_lgwin int - if params.large_window { - max_lgwin = largeMaxWindowBits - } else { - max_lgwin = maxWindowBits - } - if params.lgwin > uint(max_lgwin) { - params.lgwin = uint(max_lgwin) - } - } -} - -/* Returns optimized lg_block value. */ -func computeLgBlock(params *encoderParams) int { - var lgblock int = params.lgblock - if params.quality == fastOnePassCompressionQuality || params.quality == fastTwoPassCompressionQuality { - lgblock = int(params.lgwin) - } else if params.quality < minQualityForBlockSplit { - lgblock = 14 - } else if lgblock == 0 { - lgblock = 16 - if params.quality >= 9 && params.lgwin > uint(lgblock) { - lgblock = brotli_min_int(18, int(params.lgwin)) - } - } else { - lgblock = brotli_min_int(maxInputBlockBits, brotli_max_int(minInputBlockBits, lgblock)) - } - - return lgblock -} - -/* Returns log2 of the size of main ring buffer area. - Allocate at least lgwin + 1 bits for the ring buffer so that the newly - added block fits there completely and we still get lgwin bits and at least - read_block_size_bits + 1 bits because the copy tail length needs to be - smaller than ring-buffer size. */ -func computeRbBits(params *encoderParams) int { - return 1 + brotli_max_int(int(params.lgwin), params.lgblock) -} - -func maxMetablockSize(params *encoderParams) uint { - var bits int = brotli_min_int(computeRbBits(params), maxInputBlockBits) - return uint(1) << uint(bits) -} - -/* When searching for backward references and have not seen matches for a long - time, we can skip some match lookups. Unsuccessful match lookups are very - expensive and this kind of a heuristic speeds up compression quite a lot. - At first 8 byte strides are taken and every second byte is put to hasher. - After 4x more literals stride by 16 bytes, every put 4-th byte to hasher. - Applied only to qualities 2 to 9. */ -func literalSpreeLengthForSparseSearch(params *encoderParams) uint { - if params.quality < 9 { - return 64 - } else { - return 512 - } -} - -func chooseHasher(params *encoderParams, hparams *hasherParams) { - if params.quality > 9 { - hparams.type_ = 10 - } else if params.quality == 4 && params.size_hint >= 1<<20 { - hparams.type_ = 54 - } else if params.quality < 5 { - hparams.type_ = params.quality - } else if params.lgwin <= 16 { - if params.quality < 7 { - hparams.type_ = 40 - } else if params.quality < 9 { - hparams.type_ = 41 - } else { - hparams.type_ = 42 - } - } else if params.size_hint >= 1<<20 && params.lgwin >= 19 { - hparams.type_ = 6 - hparams.block_bits = params.quality - 1 - hparams.bucket_bits = 15 - hparams.hash_len = 5 - if params.quality < 7 { - hparams.num_last_distances_to_check = 4 - } else if params.quality < 9 { - hparams.num_last_distances_to_check = 10 - } else { - hparams.num_last_distances_to_check = 16 - } - } else { - hparams.type_ = 5 - hparams.block_bits = params.quality - 1 - if params.quality < 7 { - hparams.bucket_bits = 14 - } else { - hparams.bucket_bits = 15 - } - if params.quality < 7 { - hparams.num_last_distances_to_check = 4 - } else if params.quality < 9 { - hparams.num_last_distances_to_check = 10 - } else { - hparams.num_last_distances_to_check = 16 - } - } - - if params.lgwin > 24 { - /* Different hashers for large window brotli: not for qualities <= 2, - these are too fast for large window. Not for qualities >= 10: their - hasher already works well with large window. So the changes are: - H3 --> H35: for quality 3. - H54 --> H55: for quality 4 with size hint > 1MB - H6 --> H65: for qualities 5, 6, 7, 8, 9. */ - if hparams.type_ == 3 { - hparams.type_ = 35 - } - - if hparams.type_ == 54 { - hparams.type_ = 55 - } - - if hparams.type_ == 6 { - hparams.type_ = 65 - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/reader.go b/vendor/github.com/andybalholm/brotli/reader.go deleted file mode 100644 index 5c795e6e9ec..00000000000 --- a/vendor/github.com/andybalholm/brotli/reader.go +++ /dev/null @@ -1,100 +0,0 @@ -package brotli - -import ( - "errors" - "io" -) - -type decodeError int - -func (err decodeError) Error() string { - return "brotli: " + string(decoderErrorString(int(err))) -} - -var errExcessiveInput = errors.New("brotli: excessive input") -var errInvalidState = errors.New("brotli: invalid state") - -// readBufSize is a "good" buffer size that avoids excessive round-trips -// between C and Go but doesn't waste too much memory on buffering. -// It is arbitrarily chosen to be equal to the constant used in io.Copy. -const readBufSize = 32 * 1024 - -// NewReader creates a new Reader reading the given reader. -func NewReader(src io.Reader) *Reader { - r := new(Reader) - r.Reset(src) - return r -} - -// Reset discards the Reader's state and makes it equivalent to the result of -// its original state from NewReader, but writing to src instead. -// This permits reusing a Reader rather than allocating a new one. -// Error is always nil -func (r *Reader) Reset(src io.Reader) error { - decoderStateInit(r) - r.src = src - r.buf = make([]byte, readBufSize) - return nil -} - -func (r *Reader) Read(p []byte) (n int, err error) { - if !decoderHasMoreOutput(r) && len(r.in) == 0 { - m, readErr := r.src.Read(r.buf) - if m == 0 { - // If readErr is `nil`, we just proxy underlying stream behavior. - return 0, readErr - } - r.in = r.buf[:m] - } - - if len(p) == 0 { - return 0, nil - } - - for { - var written uint - in_len := uint(len(r.in)) - out_len := uint(len(p)) - in_remaining := in_len - out_remaining := out_len - result := decoderDecompressStream(r, &in_remaining, &r.in, &out_remaining, &p) - written = out_len - out_remaining - n = int(written) - - switch result { - case decoderResultSuccess: - if len(r.in) > 0 { - return n, errExcessiveInput - } - return n, nil - case decoderResultError: - return n, decodeError(decoderGetErrorCode(r)) - case decoderResultNeedsMoreOutput: - if n == 0 { - return 0, io.ErrShortBuffer - } - return n, nil - case decoderNeedsMoreInput: - } - - if len(r.in) != 0 { - return 0, errInvalidState - } - - // Calling r.src.Read may block. Don't block if we have data to return. - if n > 0 { - return n, nil - } - - // Top off the buffer. - encN, err := r.src.Read(r.buf) - if encN == 0 { - // Not enough data to complete decoding. - if err == io.EOF { - return 0, io.ErrUnexpectedEOF - } - return 0, err - } - r.in = r.buf[:encN] - } -} diff --git a/vendor/github.com/andybalholm/brotli/ringbuffer.go b/vendor/github.com/andybalholm/brotli/ringbuffer.go deleted file mode 100644 index 693a3f65d3a..00000000000 --- a/vendor/github.com/andybalholm/brotli/ringbuffer.go +++ /dev/null @@ -1,132 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* A ringBuffer(window_bits, tail_bits) contains `1 << window_bits' bytes of - data in a circular manner: writing a byte writes it to: - `position() % (1 << window_bits)'. - For convenience, the ringBuffer array contains another copy of the - first `1 << tail_bits' bytes: - buffer_[i] == buffer_[i + (1 << window_bits)], if i < (1 << tail_bits), - and another copy of the last two bytes: - buffer_[-1] == buffer_[(1 << window_bits) - 1] and - buffer_[-2] == buffer_[(1 << window_bits) - 2]. */ -type ringBuffer struct { - size_ uint32 - mask_ uint32 - tail_size_ uint32 - total_size_ uint32 - cur_size_ uint32 - pos_ uint32 - data_ []byte - buffer_ []byte -} - -func ringBufferInit(rb *ringBuffer) { - rb.cur_size_ = 0 - rb.pos_ = 0 - rb.data_ = nil - rb.buffer_ = nil -} - -func ringBufferSetup(params *encoderParams, rb *ringBuffer) { - var window_bits int = computeRbBits(params) - var tail_bits int = params.lgblock - *(*uint32)(&rb.size_) = 1 << uint(window_bits) - *(*uint32)(&rb.mask_) = (1 << uint(window_bits)) - 1 - *(*uint32)(&rb.tail_size_) = 1 << uint(tail_bits) - *(*uint32)(&rb.total_size_) = rb.size_ + rb.tail_size_ -} - -const kSlackForEightByteHashingEverywhere uint = 7 - -/* Allocates or re-allocates data_ to the given length + plus some slack - region before and after. Fills the slack regions with zeros. */ -func ringBufferInitBuffer(buflen uint32, rb *ringBuffer) { - var new_data []byte = make([]byte, (2 + uint(buflen) + kSlackForEightByteHashingEverywhere)) - var i uint - if rb.data_ != nil { - copy(new_data, rb.data_[:2+rb.cur_size_+uint32(kSlackForEightByteHashingEverywhere)]) - rb.data_ = nil - } - - rb.data_ = new_data - rb.cur_size_ = buflen - rb.buffer_ = rb.data_[2:] - rb.data_[1] = 0 - rb.data_[0] = rb.data_[1] - for i = 0; i < kSlackForEightByteHashingEverywhere; i++ { - rb.buffer_[rb.cur_size_+uint32(i)] = 0 - } -} - -func ringBufferWriteTail(bytes []byte, n uint, rb *ringBuffer) { - var masked_pos uint = uint(rb.pos_ & rb.mask_) - if uint32(masked_pos) < rb.tail_size_ { - /* Just fill the tail buffer with the beginning data. */ - var p uint = uint(rb.size_ + uint32(masked_pos)) - copy(rb.buffer_[p:], bytes[:brotli_min_size_t(n, uint(rb.tail_size_-uint32(masked_pos)))]) - } -} - -/* Push bytes into the ring buffer. */ -func ringBufferWrite(bytes []byte, n uint, rb *ringBuffer) { - if rb.pos_ == 0 && uint32(n) < rb.tail_size_ { - /* Special case for the first write: to process the first block, we don't - need to allocate the whole ring-buffer and we don't need the tail - either. However, we do this memory usage optimization only if the - first write is less than the tail size, which is also the input block - size, otherwise it is likely that other blocks will follow and we - will need to reallocate to the full size anyway. */ - rb.pos_ = uint32(n) - - ringBufferInitBuffer(rb.pos_, rb) - copy(rb.buffer_, bytes[:n]) - return - } - - if rb.cur_size_ < rb.total_size_ { - /* Lazily allocate the full buffer. */ - ringBufferInitBuffer(rb.total_size_, rb) - - /* Initialize the last two bytes to zero, so that we don't have to worry - later when we copy the last two bytes to the first two positions. */ - rb.buffer_[rb.size_-2] = 0 - - rb.buffer_[rb.size_-1] = 0 - } - { - var masked_pos uint = uint(rb.pos_ & rb.mask_) - - /* The length of the writes is limited so that we do not need to worry - about a write */ - ringBufferWriteTail(bytes, n, rb) - - if uint32(masked_pos+n) <= rb.size_ { - /* A single write fits. */ - copy(rb.buffer_[masked_pos:], bytes[:n]) - } else { - /* Split into two writes. - Copy into the end of the buffer, including the tail buffer. */ - copy(rb.buffer_[masked_pos:], bytes[:brotli_min_size_t(n, uint(rb.total_size_-uint32(masked_pos)))]) - - /* Copy into the beginning of the buffer */ - copy(rb.buffer_, bytes[rb.size_-uint32(masked_pos):][:uint32(n)-(rb.size_-uint32(masked_pos))]) - } - } - { - var not_first_lap bool = rb.pos_&(1<<31) != 0 - var rb_pos_mask uint32 = (1 << 31) - 1 - rb.data_[0] = rb.buffer_[rb.size_-2] - rb.data_[1] = rb.buffer_[rb.size_-1] - rb.pos_ = (rb.pos_ & rb_pos_mask) + uint32(uint32(n)&rb_pos_mask) - if not_first_lap { - /* Wrap, but preserve not-a-first-lap feature. */ - rb.pos_ |= 1 << 31 - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/state.go b/vendor/github.com/andybalholm/brotli/state.go deleted file mode 100644 index d03348fe807..00000000000 --- a/vendor/github.com/andybalholm/brotli/state.go +++ /dev/null @@ -1,295 +0,0 @@ -package brotli - -import "io" - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Brotli state for partial streaming decoding. */ -const ( - stateUninited = iota - stateLargeWindowBits - stateInitialize - stateMetablockBegin - stateMetablockHeader - stateMetablockHeader2 - stateContextModes - stateCommandBegin - stateCommandInner - stateCommandPostDecodeLiterals - stateCommandPostWrapCopy - stateUncompressed - stateMetadata - stateCommandInnerWrite - stateMetablockDone - stateCommandPostWrite1 - stateCommandPostWrite2 - stateHuffmanCode0 - stateHuffmanCode1 - stateHuffmanCode2 - stateHuffmanCode3 - stateContextMap1 - stateContextMap2 - stateTreeGroup - stateDone -) - -const ( - stateMetablockHeaderNone = iota - stateMetablockHeaderEmpty - stateMetablockHeaderNibbles - stateMetablockHeaderSize - stateMetablockHeaderUncompressed - stateMetablockHeaderReserved - stateMetablockHeaderBytes - stateMetablockHeaderMetadata -) - -const ( - stateUncompressedNone = iota - stateUncompressedWrite -) - -const ( - stateTreeGroupNone = iota - stateTreeGroupLoop -) - -const ( - stateContextMapNone = iota - stateContextMapReadPrefix - stateContextMapHuffman - stateContextMapDecode - stateContextMapTransform -) - -const ( - stateHuffmanNone = iota - stateHuffmanSimpleSize - stateHuffmanSimpleRead - stateHuffmanSimpleBuild - stateHuffmanComplex - stateHuffmanLengthSymbols -) - -const ( - stateDecodeUint8None = iota - stateDecodeUint8Short - stateDecodeUint8Long -) - -const ( - stateReadBlockLengthNone = iota - stateReadBlockLengthSuffix -) - -type Reader struct { - src io.Reader - buf []byte // scratch space for reading from src - in []byte // current chunk to decode; usually aliases buf - - state int - loop_counter int - br bitReader - buffer struct { - u64 uint64 - u8 [8]byte - } - buffer_length uint32 - pos int - max_backward_distance int - max_distance int - ringbuffer_size int - ringbuffer_mask int - dist_rb_idx int - dist_rb [4]int - error_code int - sub_loop_counter uint32 - ringbuffer []byte - ringbuffer_end []byte - htree_command []huffmanCode - context_lookup []byte - context_map_slice []byte - dist_context_map_slice []byte - literal_hgroup huffmanTreeGroup - insert_copy_hgroup huffmanTreeGroup - distance_hgroup huffmanTreeGroup - block_type_trees []huffmanCode - block_len_trees []huffmanCode - trivial_literal_context int - distance_context int - meta_block_remaining_len int - block_length_index uint32 - block_length [3]uint32 - num_block_types [3]uint32 - block_type_rb [6]uint32 - distance_postfix_bits uint32 - num_direct_distance_codes uint32 - distance_postfix_mask int - num_dist_htrees uint32 - dist_context_map []byte - literal_htree []huffmanCode - dist_htree_index byte - repeat_code_len uint32 - prev_code_len uint32 - copy_length int - distance_code int - rb_roundtrips uint - partial_pos_out uint - symbol uint32 - repeat uint32 - space uint32 - table [32]huffmanCode - symbol_lists symbolList - symbols_lists_array [huffmanMaxCodeLength + 1 + numCommandSymbols]uint16 - next_symbol [32]int - code_length_code_lengths [codeLengthCodes]byte - code_length_histo [16]uint16 - htree_index int - next []huffmanCode - context_index uint32 - max_run_length_prefix uint32 - code uint32 - context_map_table [huffmanMaxSize272]huffmanCode - substate_metablock_header int - substate_tree_group int - substate_context_map int - substate_uncompressed int - substate_huffman int - substate_decode_uint8 int - substate_read_block_length int - is_last_metablock uint - is_uncompressed uint - is_metadata uint - should_wrap_ringbuffer uint - canny_ringbuffer_allocation uint - large_window bool - size_nibbles uint - window_bits uint32 - new_ringbuffer_size int - num_literal_htrees uint32 - context_map []byte - context_modes []byte - dictionary *dictionary - transforms *transforms - trivial_literal_contexts [8]uint32 -} - -func decoderStateInit(s *Reader) bool { - s.error_code = 0 /* BROTLI_DECODER_NO_ERROR */ - - initBitReader(&s.br) - s.state = stateUninited - s.large_window = false - s.substate_metablock_header = stateMetablockHeaderNone - s.substate_tree_group = stateTreeGroupNone - s.substate_context_map = stateContextMapNone - s.substate_uncompressed = stateUncompressedNone - s.substate_huffman = stateHuffmanNone - s.substate_decode_uint8 = stateDecodeUint8None - s.substate_read_block_length = stateReadBlockLengthNone - - s.buffer_length = 0 - s.loop_counter = 0 - s.pos = 0 - s.rb_roundtrips = 0 - s.partial_pos_out = 0 - - s.block_type_trees = nil - s.block_len_trees = nil - s.ringbuffer = nil - s.ringbuffer_size = 0 - s.new_ringbuffer_size = 0 - s.ringbuffer_mask = 0 - - s.context_map = nil - s.context_modes = nil - s.dist_context_map = nil - s.context_map_slice = nil - s.dist_context_map_slice = nil - - s.sub_loop_counter = 0 - - s.literal_hgroup.codes = nil - s.literal_hgroup.htrees = nil - s.insert_copy_hgroup.codes = nil - s.insert_copy_hgroup.htrees = nil - s.distance_hgroup.codes = nil - s.distance_hgroup.htrees = nil - - s.is_last_metablock = 0 - s.is_uncompressed = 0 - s.is_metadata = 0 - s.should_wrap_ringbuffer = 0 - s.canny_ringbuffer_allocation = 1 - - s.window_bits = 0 - s.max_distance = 0 - s.dist_rb[0] = 16 - s.dist_rb[1] = 15 - s.dist_rb[2] = 11 - s.dist_rb[3] = 4 - s.dist_rb_idx = 0 - s.block_type_trees = nil - s.block_len_trees = nil - - s.symbol_lists.storage = s.symbols_lists_array[:] - s.symbol_lists.offset = huffmanMaxCodeLength + 1 - - s.dictionary = getDictionary() - s.transforms = getTransforms() - - return true -} - -func decoderStateMetablockBegin(s *Reader) { - s.meta_block_remaining_len = 0 - s.block_length[0] = 1 << 24 - s.block_length[1] = 1 << 24 - s.block_length[2] = 1 << 24 - s.num_block_types[0] = 1 - s.num_block_types[1] = 1 - s.num_block_types[2] = 1 - s.block_type_rb[0] = 1 - s.block_type_rb[1] = 0 - s.block_type_rb[2] = 1 - s.block_type_rb[3] = 0 - s.block_type_rb[4] = 1 - s.block_type_rb[5] = 0 - s.context_map = nil - s.context_modes = nil - s.dist_context_map = nil - s.context_map_slice = nil - s.literal_htree = nil - s.dist_context_map_slice = nil - s.dist_htree_index = 0 - s.context_lookup = nil - s.literal_hgroup.codes = nil - s.literal_hgroup.htrees = nil - s.insert_copy_hgroup.codes = nil - s.insert_copy_hgroup.htrees = nil - s.distance_hgroup.codes = nil - s.distance_hgroup.htrees = nil -} - -func decoderStateCleanupAfterMetablock(s *Reader) { - s.context_modes = nil - s.context_map = nil - s.dist_context_map = nil - s.literal_hgroup.htrees = nil - s.insert_copy_hgroup.htrees = nil - s.distance_hgroup.htrees = nil -} - -func decoderHuffmanTreeGroupInit(s *Reader, group *huffmanTreeGroup, alphabet_size uint32, max_symbol uint32, ntrees uint32) bool { - var max_table_size uint = uint(kMaxHuffmanTableSize[(alphabet_size+31)>>5]) - group.alphabet_size = uint16(alphabet_size) - group.max_symbol = uint16(max_symbol) - group.num_htrees = uint16(ntrees) - group.htrees = make([][]huffmanCode, ntrees) - group.codes = make([]huffmanCode, (uint(ntrees) * max_table_size)) - return !(group.codes == nil) -} diff --git a/vendor/github.com/andybalholm/brotli/static_dict.go b/vendor/github.com/andybalholm/brotli/static_dict.go deleted file mode 100644 index 8e7492d7aed..00000000000 --- a/vendor/github.com/andybalholm/brotli/static_dict.go +++ /dev/null @@ -1,666 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Class to model the static dictionary. */ - -const maxStaticDictionaryMatchLen = 37 - -const kInvalidMatch uint32 = 0xFFFFFFF - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ -func hash(data []byte) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kDictHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return h >> uint(32-kDictNumBits) -} - -func addMatch(distance uint, len uint, len_code uint, matches []uint32) { - var match uint32 = uint32((distance << 5) + len_code) - matches[len] = brotli_min_uint32_t(matches[len], match) -} - -func dictMatchLength(dict *dictionary, data []byte, id uint, len uint, maxlen uint) uint { - var offset uint = uint(dict.offsets_by_length[len]) + len*id - return findMatchLengthWithLimit(dict.data[offset:], data, brotli_min_size_t(uint(len), maxlen)) -} - -func isMatch(d *dictionary, w dictWord, data []byte, max_length uint) bool { - if uint(w.len) > max_length { - return false - } else { - var offset uint = uint(d.offsets_by_length[w.len]) + uint(w.len)*uint(w.idx) - var dict []byte = d.data[offset:] - if w.transform == 0 { - /* Match against base dictionary word. */ - return findMatchLengthWithLimit(dict, data, uint(w.len)) == uint(w.len) - } else if w.transform == 10 { - /* Match against uppercase first transform. - Note that there are only ASCII uppercase words in the lookup table. */ - return dict[0] >= 'a' && dict[0] <= 'z' && (dict[0]^32) == data[0] && findMatchLengthWithLimit(dict[1:], data[1:], uint(w.len)-1) == uint(w.len-1) - } else { - /* Match against uppercase all transform. - Note that there are only ASCII uppercase words in the lookup table. */ - var i uint - for i = 0; i < uint(w.len); i++ { - if dict[i] >= 'a' && dict[i] <= 'z' { - if (dict[i] ^ 32) != data[i] { - return false - } - } else { - if dict[i] != data[i] { - return false - } - } - } - - return true - } - } -} - -func findAllStaticDictionaryMatches(dict *encoderDictionary, data []byte, min_length uint, max_length uint, matches []uint32) bool { - var has_found_match bool = false - { - var offset uint = uint(dict.buckets[hash(data)]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 { - var matchlen uint = dictMatchLength(dict.words, data, id, l, max_length) - var s []byte - var minlen uint - var maxlen uint - var len uint - - /* Transform "" + BROTLI_TRANSFORM_IDENTITY + "" */ - if matchlen == l { - addMatch(id, l, l, matches) - has_found_match = true - } - - /* Transforms "" + BROTLI_TRANSFORM_OMIT_LAST_1 + "" and - "" + BROTLI_TRANSFORM_OMIT_LAST_1 + "ing " */ - if matchlen >= l-1 { - addMatch(id+12*n, l-1, l, matches) - if l+2 < max_length && data[l-1] == 'i' && data[l] == 'n' && data[l+1] == 'g' && data[l+2] == ' ' { - addMatch(id+49*n, l+3, l, matches) - } - - has_found_match = true - } - - /* Transform "" + BROTLI_TRANSFORM_OMIT_LAST_# + "" (# = 2 .. 9) */ - minlen = min_length - - if l > 9 { - minlen = brotli_max_size_t(minlen, l-9) - } - maxlen = brotli_min_size_t(matchlen, l-2) - for len = minlen; len <= maxlen; len++ { - var cut uint = l - len - var transform_id uint = (cut << 2) + uint((dict.cutoffTransforms>>(cut*6))&0x3F) - addMatch(id+transform_id*n, uint(len), l, matches) - has_found_match = true - } - - if matchlen < l || l+6 >= max_length { - continue - } - - s = data[l:] - - /* Transforms "" + BROTLI_TRANSFORM_IDENTITY + */ - if s[0] == ' ' { - addMatch(id+n, l+1, l, matches) - if s[1] == 'a' { - if s[2] == ' ' { - addMatch(id+28*n, l+3, l, matches) - } else if s[2] == 's' { - if s[3] == ' ' { - addMatch(id+46*n, l+4, l, matches) - } - } else if s[2] == 't' { - if s[3] == ' ' { - addMatch(id+60*n, l+4, l, matches) - } - } else if s[2] == 'n' { - if s[3] == 'd' && s[4] == ' ' { - addMatch(id+10*n, l+5, l, matches) - } - } - } else if s[1] == 'b' { - if s[2] == 'y' && s[3] == ' ' { - addMatch(id+38*n, l+4, l, matches) - } - } else if s[1] == 'i' { - if s[2] == 'n' { - if s[3] == ' ' { - addMatch(id+16*n, l+4, l, matches) - } - } else if s[2] == 's' { - if s[3] == ' ' { - addMatch(id+47*n, l+4, l, matches) - } - } - } else if s[1] == 'f' { - if s[2] == 'o' { - if s[3] == 'r' && s[4] == ' ' { - addMatch(id+25*n, l+5, l, matches) - } - } else if s[2] == 'r' { - if s[3] == 'o' && s[4] == 'm' && s[5] == ' ' { - addMatch(id+37*n, l+6, l, matches) - } - } - } else if s[1] == 'o' { - if s[2] == 'f' { - if s[3] == ' ' { - addMatch(id+8*n, l+4, l, matches) - } - } else if s[2] == 'n' { - if s[3] == ' ' { - addMatch(id+45*n, l+4, l, matches) - } - } - } else if s[1] == 'n' { - if s[2] == 'o' && s[3] == 't' && s[4] == ' ' { - addMatch(id+80*n, l+5, l, matches) - } - } else if s[1] == 't' { - if s[2] == 'h' { - if s[3] == 'e' { - if s[4] == ' ' { - addMatch(id+5*n, l+5, l, matches) - } - } else if s[3] == 'a' { - if s[4] == 't' && s[5] == ' ' { - addMatch(id+29*n, l+6, l, matches) - } - } - } else if s[2] == 'o' { - if s[3] == ' ' { - addMatch(id+17*n, l+4, l, matches) - } - } - } else if s[1] == 'w' { - if s[2] == 'i' && s[3] == 't' && s[4] == 'h' && s[5] == ' ' { - addMatch(id+35*n, l+6, l, matches) - } - } - } else if s[0] == '"' { - addMatch(id+19*n, l+1, l, matches) - if s[1] == '>' { - addMatch(id+21*n, l+2, l, matches) - } - } else if s[0] == '.' { - addMatch(id+20*n, l+1, l, matches) - if s[1] == ' ' { - addMatch(id+31*n, l+2, l, matches) - if s[2] == 'T' && s[3] == 'h' { - if s[4] == 'e' { - if s[5] == ' ' { - addMatch(id+43*n, l+6, l, matches) - } - } else if s[4] == 'i' { - if s[5] == 's' && s[6] == ' ' { - addMatch(id+75*n, l+7, l, matches) - } - } - } - } - } else if s[0] == ',' { - addMatch(id+76*n, l+1, l, matches) - if s[1] == ' ' { - addMatch(id+14*n, l+2, l, matches) - } - } else if s[0] == '\n' { - addMatch(id+22*n, l+1, l, matches) - if s[1] == '\t' { - addMatch(id+50*n, l+2, l, matches) - } - } else if s[0] == ']' { - addMatch(id+24*n, l+1, l, matches) - } else if s[0] == '\'' { - addMatch(id+36*n, l+1, l, matches) - } else if s[0] == ':' { - addMatch(id+51*n, l+1, l, matches) - } else if s[0] == '(' { - addMatch(id+57*n, l+1, l, matches) - } else if s[0] == '=' { - if s[1] == '"' { - addMatch(id+70*n, l+2, l, matches) - } else if s[1] == '\'' { - addMatch(id+86*n, l+2, l, matches) - } - } else if s[0] == 'a' { - if s[1] == 'l' && s[2] == ' ' { - addMatch(id+84*n, l+3, l, matches) - } - } else if s[0] == 'e' { - if s[1] == 'd' { - if s[2] == ' ' { - addMatch(id+53*n, l+3, l, matches) - } - } else if s[1] == 'r' { - if s[2] == ' ' { - addMatch(id+82*n, l+3, l, matches) - } - } else if s[1] == 's' { - if s[2] == 't' && s[3] == ' ' { - addMatch(id+95*n, l+4, l, matches) - } - } - } else if s[0] == 'f' { - if s[1] == 'u' && s[2] == 'l' && s[3] == ' ' { - addMatch(id+90*n, l+4, l, matches) - } - } else if s[0] == 'i' { - if s[1] == 'v' { - if s[2] == 'e' && s[3] == ' ' { - addMatch(id+92*n, l+4, l, matches) - } - } else if s[1] == 'z' { - if s[2] == 'e' && s[3] == ' ' { - addMatch(id+100*n, l+4, l, matches) - } - } - } else if s[0] == 'l' { - if s[1] == 'e' { - if s[2] == 's' && s[3] == 's' && s[4] == ' ' { - addMatch(id+93*n, l+5, l, matches) - } - } else if s[1] == 'y' { - if s[2] == ' ' { - addMatch(id+61*n, l+3, l, matches) - } - } - } else if s[0] == 'o' { - if s[1] == 'u' && s[2] == 's' && s[3] == ' ' { - addMatch(id+106*n, l+4, l, matches) - } - } - } else { - var is_all_caps bool = (w.transform != transformUppercaseFirst) - /* Set is_all_caps=0 for BROTLI_TRANSFORM_UPPERCASE_FIRST and - is_all_caps=1 otherwise (BROTLI_TRANSFORM_UPPERCASE_ALL) - transform. */ - - var s []byte - if !isMatch(dict.words, w, data, max_length) { - continue - } - - /* Transform "" + kUppercase{First,All} + "" */ - var tmp int - if is_all_caps { - tmp = 44 - } else { - tmp = 9 - } - addMatch(id+uint(tmp)*n, l, l, matches) - - has_found_match = true - if l+1 >= max_length { - continue - } - - /* Transforms "" + kUppercase{First,All} + */ - s = data[l:] - - if s[0] == ' ' { - var tmp int - if is_all_caps { - tmp = 68 - } else { - tmp = 4 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - } else if s[0] == '"' { - var tmp int - if is_all_caps { - tmp = 87 - } else { - tmp = 66 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - if s[1] == '>' { - var tmp int - if is_all_caps { - tmp = 97 - } else { - tmp = 69 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } else if s[0] == '.' { - var tmp int - if is_all_caps { - tmp = 101 - } else { - tmp = 79 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 114 - } else { - tmp = 88 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } else if s[0] == ',' { - var tmp int - if is_all_caps { - tmp = 112 - } else { - tmp = 99 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 107 - } else { - tmp = 58 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } else if s[0] == '\'' { - var tmp int - if is_all_caps { - tmp = 94 - } else { - tmp = 74 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - } else if s[0] == '(' { - var tmp int - if is_all_caps { - tmp = 113 - } else { - tmp = 78 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - } else if s[0] == '=' { - if s[1] == '"' { - var tmp int - if is_all_caps { - tmp = 105 - } else { - tmp = 104 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if s[1] == '\'' { - var tmp int - if is_all_caps { - tmp = 116 - } else { - tmp = 108 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } - } - } - } - - /* Transforms with prefixes " " and "." */ - if max_length >= 5 && (data[0] == ' ' || data[0] == '.') { - var is_space bool = (data[0] == ' ') - var offset uint = uint(dict.buckets[hash(data[1:])]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 { - var s []byte - if !isMatch(dict.words, w, data[1:], max_length-1) { - continue - } - - /* Transforms " " + BROTLI_TRANSFORM_IDENTITY + "" and - "." + BROTLI_TRANSFORM_IDENTITY + "" */ - var tmp int - if is_space { - tmp = 6 - } else { - tmp = 32 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - - has_found_match = true - if l+2 >= max_length { - continue - } - - /* Transforms " " + BROTLI_TRANSFORM_IDENTITY + and - "." + BROTLI_TRANSFORM_IDENTITY + - */ - s = data[l+1:] - - if s[0] == ' ' { - var tmp int - if is_space { - tmp = 2 - } else { - tmp = 77 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if s[0] == '(' { - var tmp int - if is_space { - tmp = 89 - } else { - tmp = 67 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if is_space { - if s[0] == ',' { - addMatch(id+103*n, l+2, l, matches) - if s[1] == ' ' { - addMatch(id+33*n, l+3, l, matches) - } - } else if s[0] == '.' { - addMatch(id+71*n, l+2, l, matches) - if s[1] == ' ' { - addMatch(id+52*n, l+3, l, matches) - } - } else if s[0] == '=' { - if s[1] == '"' { - addMatch(id+81*n, l+3, l, matches) - } else if s[1] == '\'' { - addMatch(id+98*n, l+3, l, matches) - } - } - } - } else if is_space { - var is_all_caps bool = (w.transform != transformUppercaseFirst) - /* Set is_all_caps=0 for BROTLI_TRANSFORM_UPPERCASE_FIRST and - is_all_caps=1 otherwise (BROTLI_TRANSFORM_UPPERCASE_ALL) - transform. */ - - var s []byte - if !isMatch(dict.words, w, data[1:], max_length-1) { - continue - } - - /* Transforms " " + kUppercase{First,All} + "" */ - var tmp int - if is_all_caps { - tmp = 85 - } else { - tmp = 30 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - - has_found_match = true - if l+2 >= max_length { - continue - } - - /* Transforms " " + kUppercase{First,All} + */ - s = data[l+1:] - - if s[0] == ' ' { - var tmp int - if is_all_caps { - tmp = 83 - } else { - tmp = 15 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if s[0] == ',' { - if !is_all_caps { - addMatch(id+109*n, l+2, l, matches) - } - - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 111 - } else { - tmp = 65 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } - } else if s[0] == '.' { - var tmp int - if is_all_caps { - tmp = 115 - } else { - tmp = 96 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 117 - } else { - tmp = 91 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } - } else if s[0] == '=' { - if s[1] == '"' { - var tmp int - if is_all_caps { - tmp = 110 - } else { - tmp = 118 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } else if s[1] == '\'' { - var tmp int - if is_all_caps { - tmp = 119 - } else { - tmp = 120 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } - } - } - } - } - - if max_length >= 6 { - /* Transforms with prefixes "e ", "s ", ", " and "\xC2\xA0" */ - if (data[1] == ' ' && (data[0] == 'e' || data[0] == 's' || data[0] == ',')) || (data[0] == 0xC2 && data[1] == 0xA0) { - var offset uint = uint(dict.buckets[hash(data[2:])]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 && isMatch(dict.words, w, data[2:], max_length-2) { - if data[0] == 0xC2 { - addMatch(id+102*n, l+2, l, matches) - has_found_match = true - } else if l+2 < max_length && data[l+2] == ' ' { - var t uint = 13 - if data[0] == 'e' { - t = 18 - } else if data[0] == 's' { - t = 7 - } - addMatch(id+t*n, l+3, l, matches) - has_found_match = true - } - } - } - } - } - - if max_length >= 9 { - /* Transforms with prefixes " the " and ".com/" */ - if (data[0] == ' ' && data[1] == 't' && data[2] == 'h' && data[3] == 'e' && data[4] == ' ') || (data[0] == '.' && data[1] == 'c' && data[2] == 'o' && data[3] == 'm' && data[4] == '/') { - var offset uint = uint(dict.buckets[hash(data[5:])]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 && isMatch(dict.words, w, data[5:], max_length-5) { - var tmp int - if data[0] == ' ' { - tmp = 41 - } else { - tmp = 72 - } - addMatch(id+uint(tmp)*n, l+5, l, matches) - has_found_match = true - if l+5 < max_length { - var s []byte = data[l+5:] - if data[0] == ' ' { - if l+8 < max_length && s[0] == ' ' && s[1] == 'o' && s[2] == 'f' && s[3] == ' ' { - addMatch(id+62*n, l+9, l, matches) - if l+12 < max_length && s[4] == 't' && s[5] == 'h' && s[6] == 'e' && s[7] == ' ' { - addMatch(id+73*n, l+13, l, matches) - } - } - } - } - } - } - } - } - - return has_found_match -} diff --git a/vendor/github.com/andybalholm/brotli/static_dict_lut.go b/vendor/github.com/andybalholm/brotli/static_dict_lut.go deleted file mode 100644 index b33963e967a..00000000000 --- a/vendor/github.com/andybalholm/brotli/static_dict_lut.go +++ /dev/null @@ -1,75094 +0,0 @@ -package brotli - -/* Copyright 2017 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Lookup table for static dictionary and transforms. */ - -type dictWord struct { - len byte - transform byte - idx uint16 -} - -const kDictNumBits int = 15 - -const kDictHashMul32 uint32 = 0x1E35A7BD - -var kStaticDictionaryBuckets = [32768]uint16{ - 1, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3, - 6, - 0, - 0, - 0, - 0, - 0, - 20, - 0, - 0, - 0, - 21, - 0, - 22, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23, - 0, - 0, - 25, - 0, - 29, - 0, - 53, - 0, - 0, - 0, - 0, - 0, - 0, - 55, - 0, - 0, - 0, - 0, - 0, - 0, - 61, - 76, - 0, - 0, - 0, - 94, - 0, - 0, - 0, - 0, - 0, - 0, - 96, - 0, - 97, - 0, - 98, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 99, - 101, - 106, - 108, - 0, - 0, - 0, - 0, - 0, - 110, - 0, - 111, - 112, - 0, - 113, - 118, - 124, - 0, - 0, - 0, - 0, - 0, - 125, - 128, - 0, - 0, - 0, - 0, - 129, - 0, - 0, - 131, - 0, - 0, - 0, - 0, - 0, - 0, - 132, - 0, - 0, - 135, - 0, - 0, - 0, - 137, - 0, - 0, - 0, - 0, - 0, - 138, - 139, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 142, - 143, - 144, - 0, - 0, - 0, - 0, - 0, - 145, - 0, - 0, - 0, - 146, - 149, - 151, - 152, - 0, - 0, - 153, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 154, - 0, - 0, - 0, - 0, - 0, - 0, - 155, - 0, - 0, - 0, - 0, - 160, - 182, - 0, - 0, - 0, - 0, - 0, - 0, - 183, - 0, - 0, - 0, - 188, - 189, - 0, - 0, - 192, - 0, - 0, - 0, - 0, - 0, - 0, - 194, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 197, - 202, - 209, - 0, - 0, - 210, - 0, - 224, - 0, - 0, - 0, - 225, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 231, - 0, - 0, - 0, - 232, - 0, - 240, - 0, - 0, - 242, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 244, - 0, - 0, - 0, - 246, - 0, - 0, - 249, - 251, - 253, - 0, - 0, - 0, - 0, - 0, - 258, - 0, - 0, - 261, - 263, - 0, - 0, - 0, - 267, - 0, - 0, - 268, - 0, - 269, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 271, - 0, - 0, - 0, - 0, - 0, - 0, - 272, - 0, - 273, - 0, - 277, - 0, - 278, - 286, - 0, - 0, - 0, - 0, - 287, - 0, - 289, - 290, - 291, - 0, - 0, - 0, - 295, - 0, - 0, - 296, - 297, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 298, - 0, - 0, - 0, - 299, - 0, - 0, - 305, - 0, - 324, - 0, - 0, - 0, - 0, - 0, - 327, - 0, - 328, - 329, - 0, - 0, - 0, - 0, - 336, - 0, - 0, - 340, - 0, - 341, - 342, - 343, - 0, - 0, - 346, - 0, - 348, - 0, - 0, - 0, - 0, - 0, - 0, - 349, - 351, - 0, - 0, - 355, - 0, - 363, - 0, - 364, - 0, - 368, - 369, - 0, - 370, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 372, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 373, - 0, - 375, - 0, - 0, - 0, - 0, - 376, - 377, - 0, - 0, - 394, - 395, - 396, - 0, - 0, - 398, - 0, - 0, - 0, - 0, - 400, - 0, - 0, - 408, - 0, - 0, - 0, - 0, - 420, - 0, - 0, - 0, - 0, - 0, - 0, - 421, - 0, - 0, - 422, - 423, - 0, - 0, - 429, - 435, - 436, - 442, - 0, - 0, - 443, - 0, - 444, - 445, - 453, - 456, - 0, - 457, - 0, - 0, - 0, - 0, - 0, - 458, - 0, - 0, - 0, - 459, - 0, - 0, - 0, - 460, - 0, - 462, - 463, - 465, - 0, - 0, - 0, - 0, - 0, - 0, - 466, - 469, - 0, - 0, - 0, - 0, - 0, - 0, - 470, - 0, - 0, - 0, - 474, - 0, - 476, - 0, - 0, - 0, - 0, - 483, - 0, - 485, - 0, - 0, - 0, - 486, - 0, - 0, - 488, - 491, - 492, - 0, - 0, - 497, - 499, - 500, - 0, - 501, - 0, - 0, - 0, - 505, - 0, - 0, - 506, - 0, - 0, - 0, - 507, - 0, - 0, - 0, - 509, - 0, - 0, - 0, - 0, - 511, - 512, - 519, - 0, - 0, - 0, - 0, - 0, - 0, - 529, - 530, - 0, - 0, - 0, - 534, - 0, - 0, - 0, - 0, - 543, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 553, - 0, - 0, - 0, - 0, - 557, - 560, - 0, - 0, - 0, - 0, - 0, - 0, - 561, - 0, - 564, - 0, - 0, - 0, - 0, - 0, - 0, - 565, - 566, - 0, - 575, - 0, - 619, - 0, - 620, - 0, - 0, - 623, - 624, - 0, - 0, - 0, - 625, - 0, - 0, - 626, - 627, - 0, - 0, - 628, - 0, - 0, - 0, - 0, - 630, - 0, - 631, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 641, - 0, - 0, - 0, - 0, - 643, - 656, - 668, - 0, - 0, - 0, - 673, - 0, - 0, - 0, - 674, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 682, - 0, - 687, - 0, - 690, - 0, - 693, - 699, - 700, - 0, - 0, - 0, - 0, - 0, - 0, - 704, - 705, - 0, - 0, - 0, - 0, - 707, - 710, - 0, - 711, - 0, - 0, - 0, - 0, - 726, - 0, - 0, - 729, - 0, - 0, - 0, - 730, - 731, - 0, - 0, - 0, - 0, - 0, - 752, - 0, - 0, - 0, - 762, - 0, - 763, - 0, - 0, - 767, - 0, - 0, - 0, - 770, - 774, - 0, - 0, - 775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 776, - 0, - 0, - 0, - 777, - 783, - 0, - 0, - 0, - 785, - 788, - 0, - 0, - 0, - 0, - 790, - 0, - 0, - 0, - 793, - 0, - 0, - 0, - 0, - 794, - 0, - 0, - 804, - 819, - 821, - 0, - 827, - 0, - 0, - 0, - 834, - 0, - 0, - 835, - 0, - 0, - 0, - 841, - 0, - 844, - 0, - 850, - 851, - 859, - 0, - 860, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 874, - 0, - 876, - 0, - 877, - 890, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 893, - 894, - 898, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 899, - 0, - 0, - 0, - 900, - 904, - 906, - 0, - 0, - 0, - 907, - 0, - 908, - 909, - 0, - 910, - 0, - 0, - 0, - 0, - 911, - 0, - 0, - 0, - 0, - 0, - 916, - 0, - 0, - 0, - 922, - 925, - 0, - 930, - 0, - 934, - 0, - 0, - 0, - 0, - 0, - 943, - 0, - 0, - 944, - 0, - 953, - 954, - 0, - 0, - 0, - 0, - 0, - 0, - 955, - 0, - 962, - 963, - 0, - 0, - 976, - 0, - 0, - 977, - 978, - 979, - 980, - 0, - 981, - 0, - 0, - 0, - 0, - 984, - 0, - 0, - 985, - 0, - 0, - 987, - 989, - 991, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 992, - 0, - 0, - 0, - 993, - 0, - 0, - 0, - 0, - 0, - 0, - 996, - 0, - 0, - 0, - 1000, - 0, - 0, - 0, - 0, - 0, - 1002, - 0, - 0, - 0, - 0, - 1005, - 1007, - 0, - 0, - 0, - 1009, - 0, - 0, - 0, - 1010, - 0, - 0, - 0, - 0, - 0, - 0, - 1011, - 0, - 1012, - 0, - 0, - 0, - 0, - 1014, - 1016, - 0, - 0, - 0, - 1020, - 0, - 1021, - 0, - 0, - 0, - 0, - 1022, - 0, - 0, - 0, - 1024, - 0, - 0, - 0, - 0, - 0, - 0, - 1025, - 0, - 0, - 1026, - 1027, - 0, - 0, - 0, - 0, - 0, - 1031, - 0, - 1033, - 0, - 0, - 0, - 0, - 1034, - 0, - 0, - 0, - 1037, - 1040, - 0, - 0, - 0, - 1042, - 1043, - 0, - 0, - 1053, - 0, - 1054, - 0, - 0, - 1057, - 0, - 0, - 0, - 1058, - 0, - 0, - 1060, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1061, - 0, - 0, - 1062, - 0, - 0, - 0, - 0, - 1063, - 0, - 0, - 0, - 0, - 1064, - 0, - 0, - 0, - 0, - 0, - 1065, - 0, - 0, - 0, - 0, - 1066, - 1067, - 0, - 0, - 0, - 1069, - 1070, - 1072, - 0, - 0, - 0, - 0, - 0, - 0, - 1073, - 0, - 1075, - 0, - 0, - 0, - 0, - 0, - 0, - 1080, - 1084, - 0, - 0, - 0, - 0, - 1088, - 0, - 0, - 0, - 0, - 0, - 0, - 1094, - 0, - 1095, - 0, - 1107, - 0, - 0, - 0, - 1112, - 1114, - 0, - 1119, - 0, - 1122, - 0, - 0, - 1126, - 0, - 1129, - 0, - 1130, - 0, - 0, - 0, - 0, - 0, - 1132, - 0, - 0, - 0, - 0, - 0, - 0, - 1144, - 0, - 0, - 1145, - 1146, - 0, - 1148, - 1149, - 0, - 0, - 1150, - 1151, - 0, - 0, - 0, - 0, - 1152, - 0, - 1153, - 0, - 0, - 0, - 0, - 0, - 1154, - 0, - 1163, - 0, - 0, - 0, - 1164, - 0, - 0, - 0, - 0, - 0, - 1165, - 0, - 1167, - 0, - 1170, - 0, - 0, - 0, - 0, - 0, - 1171, - 1172, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1173, - 1175, - 1177, - 0, - 1186, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1195, - 0, - 0, - 1221, - 0, - 0, - 1224, - 0, - 0, - 1227, - 0, - 0, - 0, - 0, - 0, - 1228, - 1229, - 0, - 0, - 1230, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1231, - 0, - 0, - 0, - 1233, - 0, - 0, - 1243, - 1244, - 1246, - 1248, - 0, - 0, - 0, - 0, - 1254, - 1255, - 1258, - 1259, - 0, - 0, - 0, - 1260, - 0, - 0, - 1261, - 0, - 0, - 0, - 1262, - 1264, - 0, - 0, - 1265, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1266, - 0, - 1267, - 0, - 0, - 0, - 0, - 1273, - 1274, - 1276, - 1289, - 0, - 0, - 1291, - 1292, - 1293, - 0, - 0, - 1294, - 1295, - 1296, - 0, - 0, - 0, - 0, - 1302, - 0, - 1304, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1311, - 1312, - 0, - 1314, - 0, - 1316, - 1320, - 1321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1322, - 1323, - 1324, - 0, - 1335, - 0, - 1336, - 0, - 0, - 0, - 0, - 1341, - 1342, - 0, - 1346, - 0, - 1357, - 0, - 0, - 0, - 1358, - 1360, - 0, - 0, - 0, - 0, - 0, - 0, - 1361, - 0, - 0, - 0, - 1362, - 1365, - 0, - 1366, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1379, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1386, - 0, - 1388, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1395, - 0, - 0, - 0, - 0, - 1403, - 0, - 1405, - 0, - 0, - 1407, - 0, - 0, - 0, - 0, - 0, - 1408, - 1409, - 0, - 1410, - 0, - 0, - 0, - 1412, - 1413, - 1416, - 0, - 0, - 1429, - 1451, - 0, - 0, - 1454, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1455, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1456, - 0, - 0, - 0, - 0, - 1459, - 1460, - 1461, - 1475, - 0, - 0, - 0, - 0, - 0, - 0, - 1477, - 0, - 1480, - 0, - 1481, - 0, - 0, - 1486, - 0, - 0, - 1495, - 0, - 0, - 0, - 1496, - 0, - 0, - 1498, - 1499, - 1501, - 1520, - 1521, - 0, - 0, - 0, - 1526, - 0, - 0, - 0, - 0, - 1528, - 1529, - 0, - 1533, - 1536, - 0, - 0, - 0, - 1537, - 1538, - 1549, - 0, - 1550, - 1558, - 1559, - 1572, - 0, - 1573, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1575, - 0, - 0, - 0, - 0, - 0, - 1579, - 0, - 1599, - 0, - 1603, - 0, - 1604, - 0, - 1605, - 0, - 0, - 0, - 0, - 0, - 1608, - 1610, - 0, - 0, - 0, - 0, - 1611, - 0, - 1615, - 0, - 1616, - 1618, - 0, - 1619, - 0, - 0, - 1622, - 0, - 0, - 0, - 0, - 1634, - 0, - 0, - 0, - 1635, - 0, - 0, - 0, - 1641, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1643, - 0, - 0, - 0, - 1650, - 0, - 0, - 1652, - 0, - 0, - 0, - 0, - 0, - 1653, - 0, - 0, - 0, - 1654, - 0, - 0, - 0, - 0, - 1655, - 0, - 1662, - 0, - 0, - 1663, - 1664, - 0, - 0, - 1668, - 0, - 0, - 1669, - 1670, - 0, - 1672, - 1673, - 0, - 0, - 0, - 0, - 0, - 1674, - 0, - 0, - 0, - 1675, - 1676, - 1680, - 0, - 1682, - 0, - 0, - 1687, - 0, - 0, - 0, - 0, - 0, - 1704, - 0, - 0, - 1705, - 0, - 0, - 1721, - 0, - 0, - 0, - 0, - 1734, - 1735, - 0, - 0, - 0, - 0, - 1737, - 0, - 0, - 0, - 0, - 1739, - 0, - 0, - 1740, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1741, - 1743, - 0, - 0, - 0, - 0, - 1745, - 0, - 0, - 0, - 1749, - 0, - 0, - 0, - 1751, - 0, - 0, - 0, - 0, - 0, - 0, - 1760, - 0, - 0, - 0, - 0, - 1765, - 0, - 0, - 0, - 0, - 0, - 1784, - 0, - 1785, - 1787, - 0, - 0, - 0, - 0, - 1788, - 1789, - 0, - 0, - 0, - 0, - 1790, - 1791, - 1793, - 0, - 1798, - 1799, - 0, - 0, - 0, - 0, - 1801, - 0, - 1803, - 1805, - 0, - 0, - 0, - 1806, - 1811, - 0, - 1812, - 1814, - 0, - 1821, - 0, - 0, - 0, - 0, - 0, - 1822, - 1833, - 0, - 0, - 0, - 0, - 0, - 0, - 1848, - 0, - 0, - 0, - 0, - 0, - 0, - 1857, - 0, - 0, - 0, - 1859, - 0, - 0, - 0, - 0, - 1861, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1866, - 0, - 1921, - 1925, - 0, - 0, - 0, - 1929, - 1930, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1931, - 0, - 0, - 0, - 0, - 1932, - 0, - 0, - 0, - 1934, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1946, - 0, - 0, - 1948, - 0, - 0, - 0, - 0, - 1950, - 0, - 1957, - 0, - 1958, - 0, - 0, - 0, - 0, - 0, - 1965, - 1967, - 0, - 0, - 0, - 0, - 1968, - 0, - 1969, - 0, - 1971, - 1972, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1973, - 0, - 0, - 0, - 0, - 1975, - 0, - 0, - 0, - 0, - 1976, - 1979, - 0, - 1982, - 0, - 0, - 0, - 0, - 1984, - 1988, - 0, - 0, - 0, - 0, - 1990, - 2004, - 2008, - 0, - 0, - 0, - 2012, - 2013, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2015, - 0, - 2016, - 2017, - 0, - 0, - 0, - 0, - 2021, - 0, - 0, - 2025, - 0, - 0, - 0, - 0, - 0, - 2029, - 2036, - 2040, - 0, - 2042, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2043, - 0, - 0, - 0, - 0, - 0, - 2045, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2046, - 2047, - 0, - 2048, - 2049, - 0, - 2059, - 0, - 0, - 2063, - 0, - 2064, - 2065, - 0, - 0, - 2066, - 0, - 0, - 0, - 0, - 0, - 0, - 2069, - 0, - 0, - 0, - 0, - 2070, - 0, - 2071, - 0, - 2072, - 0, - 0, - 0, - 0, - 2080, - 2082, - 2083, - 0, - 0, - 0, - 0, - 0, - 2085, - 0, - 2086, - 2088, - 2089, - 2105, - 0, - 0, - 0, - 0, - 2107, - 0, - 0, - 2116, - 2117, - 0, - 2120, - 0, - 0, - 2122, - 0, - 0, - 0, - 0, - 0, - 2123, - 0, - 0, - 2125, - 2127, - 2128, - 0, - 0, - 0, - 2130, - 0, - 0, - 0, - 2137, - 2139, - 2140, - 2141, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2144, - 2145, - 0, - 0, - 2146, - 2149, - 0, - 0, - 0, - 0, - 2150, - 0, - 0, - 2151, - 2158, - 0, - 2159, - 0, - 2160, - 0, - 0, - 0, - 0, - 0, - 0, - 2161, - 2162, - 0, - 0, - 2194, - 2202, - 0, - 0, - 0, - 0, - 0, - 0, - 2205, - 2217, - 0, - 2220, - 0, - 2221, - 0, - 2222, - 2224, - 0, - 0, - 0, - 0, - 2237, - 0, - 0, - 0, - 0, - 0, - 2238, - 0, - 2239, - 2241, - 0, - 0, - 2242, - 0, - 0, - 0, - 0, - 0, - 2243, - 0, - 0, - 0, - 0, - 0, - 0, - 2252, - 0, - 0, - 2253, - 0, - 0, - 0, - 2257, - 2258, - 0, - 0, - 0, - 2260, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2262, - 0, - 2264, - 0, - 0, - 0, - 0, - 0, - 2269, - 2270, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2271, - 0, - 2273, - 0, - 0, - 0, - 0, - 2277, - 0, - 0, - 0, - 0, - 2278, - 0, - 0, - 0, - 0, - 2279, - 0, - 2280, - 0, - 2283, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2287, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2289, - 2290, - 0, - 0, - 0, - 0, - 2291, - 0, - 2292, - 0, - 0, - 0, - 2293, - 2295, - 2296, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2298, - 0, - 0, - 0, - 0, - 0, - 2303, - 0, - 2305, - 0, - 0, - 2306, - 0, - 2307, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2313, - 2314, - 2315, - 2316, - 0, - 0, - 2318, - 0, - 2319, - 0, - 2322, - 0, - 0, - 2323, - 0, - 2324, - 0, - 2326, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2335, - 0, - 2336, - 2338, - 2339, - 0, - 2340, - 0, - 0, - 0, - 2355, - 0, - 2375, - 0, - 2382, - 2386, - 0, - 2387, - 0, - 0, - 2394, - 0, - 0, - 0, - 0, - 2395, - 0, - 2397, - 0, - 0, - 0, - 0, - 0, - 2398, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2399, - 2402, - 2404, - 2408, - 2411, - 0, - 0, - 0, - 2413, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2415, - 0, - 0, - 2416, - 2417, - 2419, - 0, - 2420, - 0, - 0, - 0, - 0, - 0, - 2425, - 0, - 0, - 0, - 2426, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2427, - 2428, - 0, - 2429, - 0, - 0, - 2430, - 2434, - 0, - 2436, - 0, - 0, - 0, - 0, - 0, - 0, - 2441, - 2442, - 0, - 2445, - 0, - 0, - 2446, - 2457, - 0, - 2459, - 0, - 0, - 2462, - 0, - 2464, - 0, - 2477, - 0, - 2478, - 2486, - 0, - 0, - 0, - 2491, - 0, - 0, - 2493, - 0, - 0, - 2494, - 0, - 2495, - 0, - 2513, - 2523, - 0, - 0, - 0, - 0, - 2524, - 0, - 0, - 0, - 0, - 0, - 0, - 2528, - 2529, - 2530, - 0, - 0, - 2531, - 0, - 2533, - 0, - 0, - 2534, - 2535, - 0, - 2536, - 2537, - 0, - 2538, - 0, - 2539, - 2540, - 0, - 0, - 0, - 2545, - 2546, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2548, - 0, - 0, - 2549, - 0, - 2550, - 2555, - 0, - 0, - 0, - 0, - 0, - 2557, - 0, - 2560, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2561, - 0, - 2576, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2577, - 2578, - 0, - 0, - 0, - 2579, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2580, - 0, - 0, - 0, - 0, - 2581, - 0, - 0, - 0, - 0, - 2583, - 0, - 2584, - 0, - 2588, - 2590, - 0, - 0, - 0, - 2591, - 0, - 0, - 0, - 0, - 2593, - 2594, - 0, - 2595, - 0, - 2601, - 2602, - 0, - 0, - 2603, - 0, - 2605, - 0, - 0, - 0, - 2606, - 2607, - 2611, - 0, - 2615, - 0, - 0, - 0, - 2617, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2619, - 0, - 0, - 2620, - 0, - 0, - 0, - 2621, - 0, - 2623, - 0, - 2625, - 0, - 0, - 2628, - 2629, - 0, - 0, - 2635, - 2636, - 2637, - 0, - 0, - 2639, - 0, - 0, - 0, - 2642, - 0, - 0, - 0, - 0, - 2643, - 0, - 2644, - 0, - 2649, - 0, - 0, - 0, - 0, - 0, - 0, - 2655, - 2656, - 0, - 0, - 2657, - 0, - 0, - 0, - 0, - 0, - 2658, - 0, - 0, - 0, - 0, - 0, - 2659, - 0, - 0, - 0, - 0, - 2664, - 2685, - 0, - 2687, - 0, - 2688, - 0, - 0, - 2689, - 0, - 0, - 2694, - 0, - 2695, - 0, - 0, - 2698, - 0, - 2701, - 2706, - 0, - 0, - 0, - 2707, - 0, - 2709, - 2710, - 2711, - 0, - 0, - 0, - 2720, - 2730, - 2735, - 0, - 0, - 0, - 0, - 2738, - 2740, - 0, - 0, - 0, - 0, - 2747, - 0, - 0, - 0, - 0, - 0, - 0, - 2748, - 0, - 0, - 2749, - 0, - 0, - 0, - 0, - 0, - 2750, - 0, - 0, - 2752, - 2754, - 0, - 0, - 0, - 0, - 0, - 2758, - 0, - 0, - 0, - 0, - 2762, - 0, - 0, - 0, - 0, - 2763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2764, - 2767, - 0, - 0, - 0, - 0, - 2768, - 0, - 0, - 2770, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2772, - 0, - 0, - 0, - 0, - 0, - 2773, - 2776, - 0, - 0, - 2783, - 0, - 0, - 2784, - 0, - 2789, - 0, - 2790, - 0, - 0, - 0, - 2792, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2793, - 2795, - 0, - 0, - 0, - 0, - 0, - 0, - 2796, - 0, - 0, - 0, - 0, - 0, - 0, - 2797, - 2799, - 0, - 0, - 0, - 0, - 2803, - 0, - 0, - 0, - 0, - 2806, - 0, - 2807, - 2808, - 2817, - 2819, - 0, - 0, - 0, - 0, - 0, - 2821, - 0, - 0, - 0, - 0, - 2822, - 2823, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2824, - 0, - 0, - 2828, - 0, - 2834, - 0, - 0, - 0, - 0, - 0, - 0, - 2836, - 0, - 2838, - 0, - 0, - 2839, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2841, - 0, - 0, - 0, - 2842, - 0, - 0, - 0, - 0, - 0, - 2843, - 2844, - 0, - 0, - 0, - 0, - 2846, - 0, - 0, - 2847, - 0, - 2849, - 0, - 2853, - 0, - 0, - 0, - 0, - 0, - 2857, - 0, - 0, - 0, - 0, - 2858, - 0, - 2859, - 0, - 0, - 2860, - 0, - 2862, - 2868, - 0, - 0, - 0, - 0, - 2875, - 0, - 2876, - 0, - 0, - 2877, - 2878, - 2884, - 2889, - 2890, - 0, - 0, - 2891, - 0, - 0, - 2892, - 0, - 0, - 0, - 2906, - 2912, - 0, - 2913, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2916, - 0, - 2934, - 0, - 0, - 0, - 0, - 0, - 2935, - 0, - 0, - 0, - 0, - 2939, - 0, - 2940, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2941, - 0, - 0, - 0, - 2946, - 0, - 2949, - 0, - 0, - 2950, - 2954, - 2955, - 0, - 0, - 0, - 2959, - 2961, - 0, - 0, - 2962, - 0, - 2963, - 0, - 0, - 0, - 0, - 0, - 0, - 2964, - 2965, - 2966, - 2967, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2969, - 0, - 0, - 0, - 0, - 0, - 2970, - 2975, - 0, - 2982, - 2983, - 2984, - 0, - 0, - 0, - 0, - 0, - 2989, - 0, - 0, - 2990, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2991, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2998, - 0, - 3000, - 3001, - 0, - 0, - 3002, - 0, - 0, - 0, - 3003, - 0, - 0, - 3012, - 0, - 0, - 3022, - 0, - 0, - 3024, - 0, - 0, - 3025, - 3027, - 0, - 0, - 0, - 3030, - 0, - 0, - 0, - 0, - 3034, - 3035, - 0, - 0, - 3036, - 0, - 3039, - 0, - 3049, - 0, - 0, - 3050, - 0, - 0, - 0, - 0, - 0, - 0, - 3051, - 0, - 3053, - 0, - 0, - 0, - 0, - 3057, - 0, - 3058, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3063, - 0, - 0, - 3073, - 3074, - 3078, - 3079, - 0, - 3080, - 3086, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3087, - 0, - 3092, - 0, - 3095, - 0, - 3099, - 0, - 0, - 0, - 3100, - 0, - 3101, - 3102, - 0, - 3122, - 0, - 0, - 0, - 3124, - 0, - 3125, - 0, - 0, - 0, - 0, - 0, - 0, - 3132, - 3134, - 0, - 0, - 3136, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3147, - 0, - 0, - 3149, - 0, - 0, - 0, - 0, - 0, - 3150, - 3151, - 3152, - 0, - 0, - 0, - 0, - 3158, - 0, - 0, - 3160, - 0, - 0, - 3161, - 0, - 0, - 3162, - 0, - 3163, - 3166, - 3168, - 0, - 0, - 3169, - 3170, - 0, - 0, - 3171, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3182, - 0, - 3184, - 0, - 0, - 3188, - 0, - 0, - 3194, - 0, - 0, - 0, - 0, - 0, - 0, - 3204, - 0, - 0, - 0, - 0, - 3209, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3216, - 3217, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3219, - 0, - 0, - 3220, - 3222, - 0, - 3223, - 0, - 0, - 0, - 0, - 3224, - 0, - 3225, - 3226, - 0, - 3228, - 3233, - 0, - 3239, - 3241, - 3242, - 0, - 0, - 3251, - 3252, - 3253, - 3255, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3260, - 0, - 0, - 3261, - 0, - 0, - 0, - 3267, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3271, - 0, - 0, - 0, - 3278, - 0, - 3282, - 0, - 0, - 0, - 3284, - 0, - 0, - 0, - 3285, - 3286, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3287, - 3292, - 0, - 0, - 0, - 0, - 3294, - 3296, - 0, - 0, - 3299, - 3300, - 3301, - 0, - 3302, - 0, - 0, - 0, - 0, - 0, - 3304, - 3306, - 0, - 0, - 0, - 0, - 0, - 0, - 3308, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3311, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3312, - 3314, - 3315, - 0, - 3318, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3319, - 0, - 0, - 0, - 0, - 0, - 3321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3322, - 0, - 0, - 3324, - 3325, - 0, - 0, - 3326, - 0, - 0, - 3328, - 3329, - 3331, - 0, - 0, - 3335, - 0, - 0, - 3337, - 0, - 3338, - 0, - 0, - 0, - 0, - 3343, - 3347, - 0, - 0, - 0, - 3348, - 0, - 0, - 3351, - 0, - 0, - 0, - 0, - 0, - 0, - 3354, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3355, - 0, - 0, - 3365, - 3366, - 3367, - 0, - 0, - 0, - 0, - 0, - 0, - 3368, - 3369, - 0, - 3370, - 0, - 0, - 3373, - 0, - 0, - 3376, - 0, - 0, - 3377, - 0, - 3379, - 3387, - 0, - 0, - 0, - 0, - 0, - 3390, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3402, - 0, - 3403, - 3436, - 3437, - 3439, - 0, - 0, - 3441, - 0, - 0, - 0, - 3442, - 0, - 0, - 3449, - 0, - 0, - 0, - 3450, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3451, - 0, - 0, - 3452, - 0, - 3453, - 3456, - 0, - 3457, - 0, - 0, - 3458, - 0, - 3459, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3460, - 0, - 0, - 3469, - 3470, - 0, - 0, - 3475, - 0, - 0, - 0, - 3480, - 3487, - 3489, - 0, - 3490, - 0, - 0, - 3491, - 3499, - 0, - 3500, - 0, - 0, - 3501, - 0, - 0, - 0, - 3502, - 0, - 3514, - 0, - 0, - 0, - 3516, - 3517, - 0, - 0, - 0, - 3518, - 0, - 0, - 0, - 0, - 3520, - 3521, - 3522, - 0, - 0, - 3526, - 3530, - 0, - 0, - 0, - 0, - 3531, - 0, - 0, - 0, - 0, - 3536, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3539, - 3541, - 0, - 0, - 3542, - 3544, - 0, - 3547, - 3548, - 0, - 0, - 3550, - 0, - 3553, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3554, - 0, - 3555, - 0, - 3558, - 0, - 3559, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3563, - 0, - 3581, - 0, - 0, - 0, - 3599, - 0, - 0, - 0, - 3600, - 0, - 3601, - 0, - 3602, - 3603, - 0, - 0, - 3606, - 3608, - 0, - 3610, - 3611, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3612, - 3616, - 3619, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3624, - 3628, - 0, - 3629, - 3634, - 3635, - 0, - 0, - 0, - 0, - 0, - 0, - 3636, - 0, - 3637, - 0, - 0, - 3638, - 3651, - 0, - 0, - 0, - 0, - 0, - 0, - 3652, - 3653, - 0, - 0, - 0, - 0, - 3656, - 3657, - 0, - 0, - 0, - 0, - 0, - 3658, - 0, - 0, - 0, - 0, - 3659, - 0, - 3661, - 3663, - 3664, - 0, - 3665, - 0, - 3692, - 0, - 0, - 0, - 3694, - 3696, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3698, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3700, - 0, - 0, - 3701, - 0, - 0, - 0, - 3708, - 3709, - 0, - 0, - 0, - 3711, - 3712, - 0, - 0, - 0, - 0, - 0, - 3723, - 0, - 3724, - 3725, - 0, - 0, - 3726, - 0, - 0, - 0, - 0, - 0, - 0, - 3728, - 3729, - 0, - 3734, - 3735, - 3737, - 0, - 0, - 0, - 3743, - 0, - 3745, - 0, - 0, - 3746, - 0, - 0, - 3747, - 3748, - 0, - 3757, - 0, - 3759, - 3766, - 3767, - 0, - 3768, - 0, - 0, - 0, - 0, - 3769, - 0, - 0, - 3771, - 0, - 3774, - 0, - 0, - 0, - 0, - 0, - 0, - 3775, - 0, - 0, - 0, - 0, - 0, - 0, - 3776, - 0, - 3777, - 3786, - 0, - 3788, - 3789, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3791, - 0, - 3811, - 0, - 0, - 0, - 0, - 0, - 3814, - 3815, - 3816, - 3820, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3821, - 0, - 0, - 3825, - 0, - 0, - 0, - 0, - 3835, - 0, - 0, - 3848, - 3849, - 0, - 0, - 0, - 0, - 3850, - 3851, - 3853, - 0, - 0, - 0, - 0, - 3859, - 0, - 3860, - 3862, - 0, - 0, - 0, - 0, - 0, - 3863, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3873, - 0, - 3874, - 0, - 3875, - 3886, - 0, - 3887, - 0, - 0, - 0, - 0, - 3892, - 3913, - 0, - 3914, - 0, - 0, - 0, - 3925, - 3931, - 0, - 0, - 0, - 0, - 3934, - 3941, - 3942, - 0, - 0, - 0, - 0, - 3943, - 0, - 0, - 0, - 3944, - 0, - 0, - 0, - 0, - 0, - 3945, - 0, - 3947, - 0, - 0, - 0, - 3956, - 3957, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3958, - 0, - 3959, - 3965, - 0, - 0, - 0, - 0, - 3966, - 0, - 0, - 0, - 3967, - 0, - 0, - 0, - 3968, - 3974, - 0, - 0, - 0, - 0, - 0, - 3975, - 3977, - 3978, - 0, - 0, - 0, - 0, - 3980, - 0, - 3985, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3986, - 4011, - 0, - 0, - 4017, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4018, - 0, - 0, - 0, - 0, - 4019, - 0, - 4023, - 0, - 0, - 0, - 4027, - 4028, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4031, - 4034, - 0, - 0, - 4035, - 4037, - 4039, - 4040, - 0, - 0, - 0, - 0, - 0, - 4059, - 0, - 4060, - 4061, - 0, - 4062, - 4063, - 4066, - 0, - 0, - 4072, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4088, - 0, - 0, - 0, - 0, - 0, - 4091, - 0, - 0, - 0, - 0, - 4094, - 4095, - 0, - 0, - 4096, - 0, - 0, - 0, - 0, - 0, - 4098, - 4099, - 0, - 0, - 0, - 4101, - 0, - 4104, - 0, - 0, - 0, - 4105, - 4108, - 0, - 4113, - 0, - 0, - 4115, - 4116, - 0, - 4126, - 0, - 0, - 4127, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4128, - 4132, - 4133, - 0, - 4134, - 0, - 0, - 0, - 4137, - 0, - 0, - 4141, - 0, - 0, - 0, - 0, - 4144, - 4146, - 4147, - 0, - 0, - 0, - 0, - 4148, - 0, - 0, - 4311, - 0, - 0, - 0, - 4314, - 4329, - 0, - 4331, - 4332, - 0, - 4333, - 0, - 4334, - 0, - 0, - 0, - 4335, - 0, - 4336, - 0, - 0, - 0, - 4337, - 0, - 0, - 0, - 4342, - 4345, - 4346, - 4350, - 0, - 4351, - 4352, - 0, - 4354, - 4355, - 0, - 0, - 4364, - 0, - 0, - 0, - 0, - 4369, - 0, - 0, - 0, - 4373, - 0, - 4374, - 0, - 0, - 0, - 0, - 4377, - 0, - 0, - 0, - 0, - 4378, - 0, - 0, - 0, - 4380, - 0, - 0, - 0, - 4381, - 4382, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4384, - 0, - 0, - 0, - 0, - 4385, - 0, - 0, - 0, - 4386, - 0, - 0, - 0, - 4391, - 4398, - 0, - 0, - 0, - 0, - 4407, - 4409, - 0, - 0, - 0, - 0, - 4410, - 0, - 0, - 4411, - 0, - 4414, - 4415, - 4418, - 0, - 4427, - 4428, - 4430, - 0, - 4431, - 0, - 4448, - 0, - 0, - 0, - 0, - 0, - 4449, - 0, - 0, - 0, - 4451, - 4452, - 0, - 4453, - 4454, - 0, - 4456, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4459, - 0, - 4463, - 0, - 0, - 0, - 0, - 0, - 4466, - 0, - 4467, - 0, - 4469, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4470, - 4471, - 0, - 4473, - 0, - 0, - 4475, - 0, - 0, - 0, - 0, - 4477, - 4478, - 0, - 0, - 0, - 4479, - 4481, - 0, - 4482, - 0, - 4484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4486, - 0, - 0, - 4488, - 0, - 0, - 4497, - 0, - 4508, - 0, - 0, - 4510, - 4511, - 0, - 4520, - 4523, - 0, - 4524, - 0, - 4525, - 0, - 4527, - 0, - 0, - 4528, - 0, - 0, - 0, - 0, - 4530, - 0, - 4531, - 0, - 0, - 4532, - 0, - 0, - 0, - 4533, - 0, - 0, - 0, - 0, - 0, - 4535, - 0, - 0, - 0, - 4536, - 0, - 0, - 0, - 0, - 0, - 4541, - 4543, - 4544, - 4545, - 4547, - 0, - 4548, - 0, - 0, - 0, - 0, - 4550, - 4551, - 0, - 4553, - 0, - 0, - 0, - 0, - 4562, - 0, - 0, - 4571, - 0, - 0, - 0, - 4574, - 0, - 0, - 0, - 4575, - 0, - 4576, - 0, - 4577, - 0, - 0, - 0, - 4581, - 0, - 0, - 0, - 0, - 0, - 4582, - 0, - 0, - 4586, - 0, - 0, - 0, - 4588, - 0, - 0, - 4597, - 0, - 4598, - 0, - 0, - 0, - 0, - 4616, - 4617, - 0, - 4618, - 0, - 0, - 0, - 0, - 4619, - 0, - 4620, - 0, - 0, - 4621, - 0, - 4624, - 0, - 0, - 0, - 0, - 0, - 4625, - 0, - 0, - 0, - 0, - 4657, - 0, - 4659, - 0, - 4667, - 0, - 0, - 0, - 4668, - 4670, - 0, - 4672, - 0, - 0, - 0, - 0, - 0, - 4673, - 4676, - 0, - 0, - 0, - 0, - 4687, - 0, - 0, - 0, - 0, - 4697, - 0, - 0, - 0, - 0, - 4699, - 0, - 4701, - 0, - 0, - 0, - 0, - 4702, - 0, - 0, - 4706, - 0, - 0, - 4713, - 0, - 0, - 0, - 4714, - 4715, - 4716, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4717, - 0, - 0, - 4720, - 0, - 4721, - 4729, - 4735, - 0, - 0, - 0, - 4737, - 0, - 0, - 0, - 4739, - 0, - 0, - 0, - 4740, - 0, - 0, - 0, - 4741, - 0, - 0, - 0, - 0, - 0, - 4742, - 0, - 4745, - 4746, - 4747, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4748, - 0, - 0, - 0, - 4749, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4751, - 4786, - 0, - 4787, - 0, - 4788, - 4796, - 0, - 0, - 4797, - 4798, - 0, - 4799, - 4806, - 4807, - 0, - 0, - 0, - 0, - 4809, - 4810, - 0, - 0, - 0, - 0, - 0, - 0, - 4811, - 0, - 0, - 0, - 0, - 0, - 4812, - 0, - 4813, - 0, - 0, - 4815, - 0, - 4821, - 4822, - 0, - 0, - 0, - 0, - 4823, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4824, - 0, - 0, - 0, - 0, - 4826, - 0, - 0, - 0, - 4828, - 0, - 4829, - 0, - 0, - 0, - 4843, - 0, - 0, - 4847, - 0, - 4853, - 4855, - 4858, - 0, - 0, - 0, - 0, - 0, - 4859, - 0, - 4864, - 0, - 0, - 4879, - 0, - 0, - 0, - 0, - 4880, - 0, - 0, - 0, - 0, - 4881, - 0, - 4882, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4883, - 0, - 0, - 0, - 0, - 4884, - 0, - 0, - 0, - 0, - 0, - 4886, - 4887, - 4888, - 4894, - 4896, - 0, - 4902, - 0, - 0, - 4905, - 0, - 0, - 4915, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4916, - 4917, - 4919, - 4921, - 0, - 0, - 0, - 0, - 0, - 4926, - 0, - 0, - 0, - 0, - 4927, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4929, - 0, - 4930, - 4931, - 0, - 4938, - 0, - 4952, - 0, - 4953, - 4957, - 4960, - 4964, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5019, - 5020, - 5022, - 0, - 0, - 0, - 0, - 0, - 5023, - 0, - 0, - 0, - 5024, - 0, - 0, - 0, - 5025, - 0, - 0, - 0, - 0, - 5028, - 0, - 0, - 0, - 0, - 5029, - 5030, - 5031, - 0, - 5033, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5034, - 5035, - 0, - 5036, - 0, - 0, - 5037, - 0, - 0, - 0, - 0, - 5038, - 0, - 0, - 5039, - 0, - 0, - 0, - 5041, - 5042, - 0, - 0, - 0, - 0, - 5044, - 5049, - 5054, - 0, - 5055, - 0, - 5057, - 0, - 0, - 0, - 5060, - 0, - 0, - 0, - 0, - 0, - 5063, - 0, - 5064, - 5065, - 0, - 5067, - 0, - 0, - 0, - 5068, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5076, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5077, - 0, - 0, - 5078, - 5080, - 0, - 0, - 5083, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5085, - 0, - 0, - 0, - 0, - 0, - 0, - 5098, - 5099, - 5101, - 5105, - 5107, - 0, - 5108, - 0, - 5109, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5110, - 0, - 0, - 0, - 0, - 0, - 5117, - 5118, - 0, - 5121, - 0, - 5122, - 0, - 0, - 5130, - 0, - 0, - 0, - 5137, - 0, - 0, - 0, - 5148, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5151, - 5154, - 0, - 0, - 0, - 5155, - 0, - 0, - 5156, - 5159, - 5161, - 0, - 0, - 0, - 0, - 5162, - 0, - 0, - 0, - 0, - 5163, - 5164, - 0, - 5166, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5167, - 0, - 0, - 0, - 5172, - 0, - 0, - 0, - 0, - 0, - 0, - 5178, - 5179, - 0, - 0, - 5190, - 0, - 0, - 5191, - 5192, - 5194, - 0, - 0, - 5198, - 5201, - 0, - 0, - 0, - 0, - 0, - 5203, - 0, - 5206, - 5209, - 0, - 0, - 0, - 0, - 0, - 0, - 5213, - 0, - 5214, - 5216, - 0, - 0, - 0, - 0, - 0, - 5217, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5218, - 5219, - 0, - 5231, - 0, - 0, - 5244, - 5249, - 0, - 5254, - 0, - 5255, - 0, - 0, - 5257, - 0, - 0, - 0, - 0, - 0, - 5258, - 0, - 5260, - 5270, - 0, - 5277, - 0, - 0, - 0, - 0, - 0, - 0, - 5280, - 5281, - 5282, - 5283, - 0, - 0, - 0, - 0, - 0, - 5284, - 0, - 5285, - 0, - 0, - 0, - 0, - 0, - 5287, - 5288, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5289, - 5291, - 0, - 0, - 5294, - 0, - 0, - 5295, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5304, - 0, - 0, - 5306, - 5307, - 5308, - 0, - 5309, - 0, - 0, - 5310, - 0, - 0, - 0, - 0, - 5311, - 5312, - 0, - 5313, - 0, - 0, - 0, - 0, - 0, - 5316, - 0, - 0, - 0, - 5317, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5325, - 0, - 0, - 0, - 0, - 0, - 0, - 5326, - 0, - 5327, - 5329, - 0, - 5332, - 0, - 0, - 0, - 0, - 5338, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5340, - 0, - 0, - 5341, - 0, - 0, - 0, - 5342, - 0, - 5343, - 5344, - 0, - 0, - 5345, - 0, - 0, - 0, - 0, - 0, - 0, - 5347, - 5348, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5349, - 0, - 5350, - 0, - 5354, - 0, - 0, - 0, - 0, - 5358, - 0, - 0, - 5359, - 0, - 0, - 5361, - 0, - 0, - 5365, - 0, - 5367, - 0, - 5373, - 0, - 0, - 0, - 5379, - 0, - 0, - 0, - 5380, - 0, - 0, - 0, - 5382, - 0, - 5384, - 0, - 0, - 0, - 0, - 0, - 0, - 5385, - 0, - 0, - 0, - 0, - 5387, - 0, - 0, - 0, - 0, - 0, - 0, - 5388, - 5390, - 5393, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5396, - 0, - 0, - 0, - 0, - 5397, - 5402, - 0, - 0, - 0, - 0, - 0, - 5403, - 0, - 0, - 0, - 5404, - 5405, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5406, - 0, - 0, - 0, - 0, - 5410, - 0, - 0, - 5411, - 0, - 5415, - 0, - 0, - 0, - 0, - 5416, - 5434, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5438, - 0, - 5440, - 0, - 0, - 0, - 0, - 0, - 0, - 5441, - 5442, - 0, - 0, - 0, - 5443, - 5444, - 5447, - 0, - 0, - 5448, - 5449, - 5451, - 0, - 0, - 0, - 5456, - 5457, - 0, - 0, - 0, - 5459, - 0, - 0, - 0, - 5461, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5464, - 0, - 5466, - 0, - 0, - 5467, - 0, - 5470, - 0, - 0, - 5473, - 0, - 0, - 5474, - 0, - 0, - 5476, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5477, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5484, - 0, - 0, - 5485, - 5486, - 0, - 0, - 0, - 0, - 0, - 5488, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5489, - 0, - 0, - 0, - 0, - 0, - 5507, - 0, - 0, - 0, - 5510, - 0, - 5511, - 0, - 0, - 5512, - 0, - 0, - 0, - 5513, - 0, - 5515, - 0, - 0, - 5516, - 5517, - 0, - 5518, - 0, - 0, - 5522, - 0, - 0, - 0, - 0, - 0, - 5534, - 5535, - 0, - 0, - 5536, - 0, - 5538, - 0, - 0, - 5543, - 0, - 5544, - 0, - 0, - 5545, - 0, - 5547, - 0, - 5557, - 0, - 0, - 5558, - 0, - 5560, - 5567, - 0, - 0, - 0, - 0, - 5568, - 0, - 0, - 0, - 5571, - 5573, - 0, - 5574, - 0, - 5575, - 0, - 0, - 0, - 0, - 5577, - 0, - 0, - 5598, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5600, - 5609, - 0, - 0, - 0, - 0, - 5610, - 0, - 0, - 5612, - 0, - 5624, - 0, - 5625, - 0, - 0, - 0, - 5629, - 0, - 5641, - 0, - 5642, - 5643, - 0, - 0, - 0, - 0, - 0, - 0, - 5651, - 0, - 0, - 0, - 5652, - 5653, - 0, - 5661, - 5662, - 5678, - 0, - 5679, - 0, - 0, - 0, - 0, - 5685, - 5686, - 0, - 0, - 0, - 0, - 0, - 5690, - 5692, - 0, - 5703, - 0, - 0, - 0, - 0, - 0, - 5706, - 0, - 0, - 0, - 0, - 5707, - 0, - 0, - 0, - 0, - 0, - 0, - 5708, - 0, - 0, - 5709, - 0, - 5710, - 0, - 0, - 0, - 5712, - 0, - 5733, - 0, - 5734, - 5735, - 0, - 0, - 5744, - 5751, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5752, - 0, - 5754, - 0, - 0, - 0, - 0, - 0, - 0, - 5757, - 5758, - 0, - 5760, - 5761, - 0, - 0, - 0, - 0, - 5763, - 5764, - 5765, - 0, - 5766, - 0, - 5767, - 5768, - 0, - 5770, - 0, - 0, - 0, - 0, - 5776, - 5780, - 0, - 0, - 0, - 0, - 5782, - 0, - 0, - 0, - 0, - 5784, - 0, - 0, - 5788, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5797, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5799, - 0, - 0, - 5801, - 0, - 0, - 0, - 5811, - 0, - 0, - 0, - 0, - 0, - 0, - 5816, - 0, - 0, - 5827, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5830, - 5831, - 0, - 0, - 5832, - 0, - 0, - 5833, - 0, - 5835, - 5844, - 5845, - 0, - 5846, - 0, - 0, - 0, - 0, - 0, - 5850, - 0, - 0, - 0, - 0, - 0, - 5852, - 0, - 5855, - 5857, - 0, - 0, - 5859, - 0, - 5861, - 0, - 0, - 5863, - 0, - 5865, - 0, - 0, - 0, - 5873, - 5875, - 0, - 0, - 0, - 5877, - 0, - 5879, - 0, - 0, - 0, - 5888, - 0, - 0, - 5889, - 5891, - 0, - 5894, - 0, - 0, - 0, - 0, - 0, - 0, - 5895, - 0, - 5897, - 0, - 0, - 0, - 0, - 0, - 0, - 5907, - 0, - 5911, - 0, - 0, - 5912, - 0, - 5913, - 5922, - 5924, - 0, - 5927, - 5928, - 0, - 0, - 0, - 0, - 5929, - 5930, - 0, - 5933, - 0, - 0, - 0, - 0, - 5949, - 0, - 0, - 5951, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5953, - 0, - 0, - 5954, - 0, - 5959, - 5960, - 5961, - 0, - 5964, - 0, - 0, - 0, - 5976, - 5978, - 5987, - 5990, - 0, - 0, - 0, - 0, - 0, - 5991, - 0, - 5992, - 0, - 0, - 0, - 5994, - 5995, - 0, - 0, - 5996, - 0, - 0, - 6001, - 6003, - 0, - 0, - 0, - 0, - 6007, - 0, - 0, - 0, - 0, - 0, - 6008, - 0, - 0, - 6009, - 0, - 6010, - 0, - 0, - 0, - 6011, - 6015, - 0, - 6017, - 0, - 6019, - 0, - 6023, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6025, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6026, - 0, - 6030, - 0, - 0, - 6032, - 0, - 0, - 0, - 6033, - 6038, - 6040, - 0, - 0, - 0, - 6041, - 6045, - 0, - 0, - 6046, - 0, - 0, - 6053, - 0, - 0, - 6054, - 0, - 6055, - 0, - 0, - 0, - 0, - 0, - 0, - 6057, - 0, - 6063, - 0, - 0, - 0, - 6064, - 0, - 6066, - 6071, - 6072, - 0, - 0, - 0, - 0, - 0, - 0, - 6075, - 6076, - 0, - 0, - 6077, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6078, - 6079, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6080, - 0, - 6083, - 0, - 0, - 0, - 0, - 0, - 6084, - 0, - 0, - 6088, - 0, - 6089, - 0, - 0, - 6093, - 6105, - 0, - 0, - 6107, - 0, - 6110, - 0, - 0, - 0, - 6111, - 6125, - 6126, - 0, - 0, - 0, - 6129, - 0, - 0, - 0, - 0, - 6130, - 0, - 0, - 0, - 6131, - 6134, - 0, - 0, - 0, - 0, - 0, - 0, - 6142, - 0, - 0, - 0, - 0, - 0, - 6144, - 0, - 0, - 6146, - 6151, - 6153, - 0, - 6156, - 0, - 6163, - 0, - 6180, - 6181, - 0, - 0, - 0, - 0, - 0, - 6182, - 0, - 0, - 0, - 0, - 6184, - 6195, - 0, - 0, - 6206, - 0, - 6208, - 0, - 0, - 6212, - 6213, - 6214, - 0, - 6215, - 0, - 0, - 0, - 6228, - 0, - 0, - 0, - 6234, - 0, - 0, - 0, - 0, - 0, - 0, - 6235, - 6240, - 0, - 6242, - 6243, - 6244, - 0, - 6250, - 6255, - 0, - 0, - 0, - 0, - 0, - 6257, - 0, - 0, - 0, - 6258, - 6278, - 0, - 6284, - 0, - 0, - 0, - 6285, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6286, - 0, - 0, - 0, - 6320, - 0, - 0, - 6322, - 6332, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6334, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6335, - 0, - 0, - 6337, - 0, - 6338, - 0, - 6339, - 6340, - 0, - 0, - 6356, - 6357, - 6369, - 0, - 0, - 0, - 6370, - 6371, - 6372, - 0, - 6373, - 0, - 0, - 0, - 0, - 0, - 6376, - 0, - 0, - 0, - 0, - 0, - 6382, - 6383, - 6384, - 0, - 0, - 0, - 0, - 6386, - 0, - 6389, - 6397, - 6400, - 6411, - 0, - 6414, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6415, - 6416, - 0, - 0, - 0, - 0, - 0, - 0, - 6417, - 0, - 0, - 0, - 0, - 6418, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6420, - 0, - 6421, - 6423, - 6425, - 0, - 6429, - 6430, - 0, - 6433, - 6438, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6439, - 6440, - 0, - 0, - 6441, - 0, - 0, - 6444, - 0, - 0, - 0, - 0, - 6446, - 0, - 0, - 0, - 0, - 6447, - 6448, - 0, - 0, - 6450, - 0, - 0, - 0, - 6454, - 0, - 0, - 6455, - 0, - 6461, - 0, - 0, - 0, - 0, - 0, - 0, - 6462, - 0, - 0, - 6463, - 0, - 6464, - 0, - 6465, - 6467, - 0, - 0, - 0, - 6468, - 0, - 6479, - 6480, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6481, - 0, - 0, - 6485, - 6487, - 0, - 0, - 0, - 0, - 0, - 0, - 6493, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6494, - 6495, - 6496, - 0, - 0, - 0, - 0, - 0, - 6498, - 0, - 0, - 0, - 6507, - 6508, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6511, - 6512, - 0, - 0, - 0, - 0, - 6513, - 0, - 0, - 0, - 6514, - 0, - 0, - 0, - 0, - 0, - 6516, - 0, - 0, - 6517, - 6518, - 0, - 0, - 0, - 6519, - 6520, - 6521, - 0, - 6523, - 0, - 0, - 0, - 0, - 6524, - 6528, - 0, - 6530, - 0, - 0, - 6532, - 0, - 6578, - 0, - 0, - 0, - 6583, - 0, - 6584, - 0, - 0, - 0, - 6587, - 0, - 0, - 0, - 6590, - 0, - 6591, - 0, - 0, - 0, - 0, - 0, - 6592, - 0, - 0, - 0, - 0, - 6593, - 6594, - 0, - 0, - 0, - 0, - 0, - 6599, - 6600, - 0, - 0, - 6601, - 6602, - 6604, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6608, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6610, - 6611, - 0, - 6615, - 0, - 6616, - 6618, - 6620, - 0, - 6637, - 0, - 0, - 0, - 0, - 6639, - 0, - 0, - 0, - 0, - 6641, - 0, - 6642, - 0, - 0, - 0, - 6647, - 0, - 6660, - 6663, - 0, - 6664, - 0, - 6666, - 6669, - 0, - 6675, - 6676, - 6677, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6678, - 0, - 0, - 0, - 6679, - 0, - 6680, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6693, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6704, - 6705, - 6706, - 0, - 0, - 6711, - 6713, - 0, - 0, - 0, - 0, - 0, - 6716, - 0, - 0, - 0, - 6717, - 0, - 6719, - 6724, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6725, - 6726, - 0, - 0, - 0, - 0, - 0, - 6728, - 6729, - 6735, - 0, - 6737, - 6742, - 0, - 0, - 6743, - 6750, - 0, - 6751, - 0, - 0, - 6752, - 6753, - 0, - 0, - 0, - 0, - 0, - 0, - 6754, - 0, - 0, - 0, - 0, - 0, - 6756, - 0, - 0, - 0, - 0, - 0, - 0, - 6763, - 0, - 0, - 6764, - 6765, - 0, - 0, - 0, - 6770, - 0, - 0, - 0, - 6776, - 6780, - 0, - 6781, - 0, - 0, - 0, - 6783, - 0, - 6784, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6785, - 0, - 0, - 0, - 6792, - 0, - 0, - 0, - 6793, - 0, - 0, - 6802, - 0, - 0, - 0, - 0, - 0, - 6803, - 0, - 0, - 0, - 6804, - 0, - 0, - 0, - 6812, - 0, - 0, - 6823, - 0, - 6824, - 6839, - 0, - 0, - 0, - 0, - 6852, - 0, - 0, - 6854, - 0, - 6856, - 6857, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6867, - 0, - 6868, - 6870, - 6872, - 0, - 0, - 0, - 6873, - 6874, - 0, - 0, - 0, - 0, - 0, - 6875, - 0, - 0, - 6877, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6878, - 0, - 0, - 0, - 6879, - 0, - 6880, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6887, - 0, - 6888, - 6891, - 6893, - 0, - 6895, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6899, - 0, - 0, - 0, - 0, - 6901, - 0, - 0, - 0, - 0, - 6910, - 0, - 6911, - 0, - 0, - 6912, - 0, - 0, - 6913, - 6914, - 0, - 0, - 0, - 6915, - 0, - 0, - 0, - 6916, - 6919, - 0, - 0, - 0, - 0, - 0, - 0, - 6924, - 0, - 6925, - 0, - 0, - 0, - 6926, - 6927, - 6928, - 0, - 6929, - 0, - 6930, - 0, - 0, - 6931, - 6935, - 0, - 6936, - 0, - 0, - 0, - 0, - 6939, - 6940, - 6941, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6942, - 6948, - 6949, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6952, - 6954, - 6963, - 6965, - 6966, - 0, - 0, - 6967, - 6968, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6969, - 0, - 0, - 6970, - 6979, - 0, - 0, - 6980, - 0, - 0, - 6983, - 0, - 0, - 0, - 0, - 0, - 6984, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6988, - 6990, - 6992, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6995, - 0, - 0, - 0, - 7012, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7019, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7021, - 0, - 0, - 7022, - 7023, - 7028, - 0, - 7030, - 7033, - 0, - 0, - 0, - 0, - 0, - 0, - 7038, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7039, - 0, - 0, - 0, - 0, - 0, - 7046, - 0, - 7047, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7048, - 7052, - 0, - 0, - 0, - 0, - 0, - 7054, - 0, - 7060, - 0, - 0, - 0, - 0, - 7061, - 0, - 7065, - 0, - 0, - 0, - 0, - 7067, - 7069, - 0, - 7070, - 7071, - 7072, - 0, - 0, - 7078, - 0, - 7080, - 7081, - 0, - 7083, - 0, - 0, - 0, - 7084, - 7087, - 7088, - 0, - 0, - 7090, - 0, - 7093, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7107, - 0, - 0, - 7108, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7110, - 0, - 7114, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7115, - 0, - 7116, - 0, - 0, - 0, - 0, - 0, - 7117, - 0, - 0, - 7118, - 0, - 0, - 7124, - 0, - 7125, - 0, - 0, - 7126, - 0, - 0, - 0, - 0, - 7128, - 0, - 0, - 0, - 0, - 0, - 7129, - 0, - 7130, - 0, - 7132, - 7133, - 0, - 0, - 7134, - 0, - 0, - 7139, - 0, - 7148, - 7150, - 0, - 0, - 0, - 0, - 7152, - 0, - 0, - 0, - 7153, - 7156, - 7157, - 0, - 0, - 0, - 0, - 0, - 7158, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7163, - 7165, - 7169, - 0, - 7171, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7172, - 0, - 7173, - 7181, - 0, - 0, - 0, - 0, - 0, - 7182, - 7185, - 0, - 0, - 0, - 0, - 7187, - 0, - 7201, - 7204, - 0, - 0, - 0, - 0, - 0, - 7206, - 7207, - 0, - 0, - 0, - 0, - 7211, - 7216, - 0, - 7218, - 0, - 0, - 0, - 0, - 7226, - 7228, - 7230, - 7232, - 7233, - 7235, - 7237, - 0, - 0, - 0, - 0, - 7238, - 7241, - 0, - 7242, - 0, - 0, - 7247, - 0, - 0, - 0, - 7266, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7289, - 0, - 0, - 7290, - 7291, - 0, - 0, - 7292, - 0, - 7297, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7300, - 0, - 7301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7302, - 0, - 0, - 0, - 0, - 7305, - 0, - 0, - 0, - 0, - 7307, - 0, - 7308, - 0, - 7310, - 0, - 7335, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7337, - 0, - 7343, - 7347, - 0, - 0, - 0, - 0, - 0, - 7348, - 0, - 7349, - 7350, - 7352, - 7354, - 0, - 0, - 0, - 0, - 7357, - 0, - 7358, - 7366, - 0, - 7367, - 7368, - 0, - 0, - 7373, - 0, - 0, - 0, - 7374, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7376, - 0, - 0, - 0, - 7377, - 0, - 0, - 0, - 0, - 0, - 7378, - 0, - 7379, - 7380, - 0, - 0, - 0, - 0, - 0, - 7383, - 0, - 0, - 7386, - 0, - 0, - 0, - 0, - 7398, - 0, - 0, - 0, - 7399, - 7400, - 0, - 7401, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7402, - 0, - 0, - 0, - 0, - 0, - 7405, - 0, - 0, - 0, - 0, - 0, - 7406, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7421, - 7427, - 7429, - 0, - 0, - 0, - 7435, - 0, - 0, - 7436, - 0, - 0, - 0, - 7437, - 0, - 0, - 0, - 0, - 0, - 0, - 7438, - 7443, - 0, - 7446, - 0, - 7448, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7456, - 0, - 0, - 0, - 0, - 0, - 7457, - 0, - 0, - 7461, - 0, - 0, - 0, - 0, - 0, - 7462, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7463, - 7466, - 7472, - 0, - 7476, - 0, - 0, - 7490, - 0, - 7491, - 0, - 0, - 7493, - 0, - 0, - 0, - 7498, - 7499, - 0, - 0, - 7508, - 0, - 0, - 0, - 0, - 0, - 7512, - 0, - 0, - 0, - 7513, - 7514, - 7516, - 0, - 0, - 0, - 0, - 7518, - 0, - 0, - 7519, - 7521, - 7522, - 0, - 0, - 0, - 7526, - 0, - 0, - 7529, - 0, - 0, - 7531, - 0, - 7536, - 0, - 7538, - 0, - 7539, - 0, - 0, - 7541, - 7542, - 7546, - 0, - 0, - 0, - 0, - 0, - 7547, - 0, - 7548, - 0, - 0, - 0, - 0, - 0, - 7550, - 0, - 0, - 7552, - 7553, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7554, - 7563, - 0, - 7573, - 0, - 0, - 0, - 0, - 0, - 0, - 7574, - 7576, - 0, - 7578, - 7581, - 7583, - 0, - 0, - 0, - 7584, - 0, - 7587, - 0, - 0, - 0, - 0, - 0, - 7589, - 0, - 0, - 0, - 7594, - 0, - 0, - 7595, - 0, - 0, - 7600, - 7602, - 7610, - 0, - 0, - 0, - 0, - 0, - 7612, - 0, - 7613, - 7614, - 0, - 0, - 7615, - 0, - 0, - 7616, - 0, - 7620, - 0, - 7621, - 7622, - 0, - 7623, - 0, - 0, - 0, - 0, - 7626, - 0, - 0, - 0, - 0, - 7627, - 7629, - 7631, - 0, - 0, - 7633, - 0, - 0, - 0, - 0, - 0, - 7639, - 0, - 7640, - 7642, - 0, - 0, - 7643, - 0, - 0, - 0, - 0, - 7644, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7645, - 0, - 0, - 0, - 0, - 0, - 7661, - 7662, - 7663, - 7665, - 0, - 7666, - 0, - 7667, - 0, - 7684, - 7688, - 7690, - 0, - 7691, - 0, - 0, - 0, - 0, - 0, - 0, - 7692, - 0, - 0, - 7700, - 0, - 7707, - 0, - 7708, - 0, - 7709, - 0, - 7721, - 0, - 0, - 0, - 7722, - 0, - 7724, - 0, - 0, - 0, - 0, - 0, - 0, - 7729, - 7731, - 0, - 7732, - 0, - 7733, - 7735, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7739, - 0, - 0, - 7741, - 7745, - 0, - 7748, - 0, - 0, - 0, - 7751, - 0, - 0, - 0, - 7752, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7753, - 0, - 0, - 7756, - 0, - 7757, - 0, - 7759, - 0, - 7760, - 0, - 0, - 0, - 0, - 7761, - 7768, - 0, - 0, - 7769, - 0, - 0, - 7770, - 0, - 0, - 7771, - 0, - 0, - 7772, - 0, - 0, - 7773, - 0, - 0, - 0, - 0, - 0, - 7778, - 7783, - 0, - 0, - 0, - 0, - 0, - 7784, - 7785, - 0, - 7790, - 0, - 0, - 0, - 0, - 7792, - 0, - 7798, - 0, - 0, - 0, - 0, - 0, - 7799, - 0, - 7810, - 0, - 0, - 7813, - 0, - 7814, - 0, - 7816, - 0, - 7818, - 7824, - 7825, - 7826, - 0, - 7828, - 7830, - 0, - 0, - 0, - 7840, - 0, - 7842, - 0, - 7843, - 0, - 0, - 0, - 0, - 7844, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7846, - 0, - 0, - 0, - 0, - 0, - 7856, - 7857, - 7858, - 7862, - 0, - 7865, - 0, - 0, - 7866, - 0, - 0, - 7913, - 0, - 0, - 0, - 0, - 7914, - 0, - 0, - 7915, - 7917, - 7918, - 7919, - 0, - 7920, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7921, - 7922, - 0, - 7924, - 0, - 0, - 7925, - 0, - 0, - 7927, - 0, - 7930, - 7935, - 0, - 0, - 7937, - 0, - 0, - 0, - 0, - 0, - 0, - 7939, - 0, - 7940, - 0, - 0, - 0, - 0, - 0, - 7941, - 0, - 0, - 0, - 0, - 7945, - 0, - 0, - 0, - 0, - 7949, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7950, - 0, - 7953, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7968, - 0, - 0, - 0, - 0, - 7969, - 7972, - 7992, - 0, - 7993, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7994, - 0, - 0, - 0, - 0, - 8007, - 8008, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8010, - 0, - 0, - 0, - 8012, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8018, - 0, - 8028, - 8029, - 0, - 0, - 8030, - 0, - 0, - 8032, - 8033, - 0, - 0, - 8034, - 8036, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8037, - 0, - 0, - 0, - 8043, - 8052, - 8059, - 8060, - 0, - 0, - 8061, - 0, - 0, - 0, - 8062, - 0, - 8063, - 0, - 8064, - 0, - 8066, - 8068, - 0, - 0, - 0, - 8080, - 8081, - 0, - 8089, - 0, - 0, - 0, - 0, - 0, - 8092, - 0, - 0, - 0, - 0, - 0, - 0, - 8093, - 8110, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8111, - 0, - 0, - 0, - 0, - 0, - 8112, - 8115, - 0, - 8117, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8120, - 8121, - 8122, - 8128, - 8129, - 8130, - 8131, - 0, - 0, - 8139, - 0, - 0, - 8144, - 0, - 0, - 0, - 0, - 8145, - 8146, - 8153, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8154, - 0, - 8157, - 8160, - 8162, - 0, - 8164, - 8165, - 0, - 0, - 0, - 0, - 8166, - 8167, - 0, - 0, - 8179, - 0, - 0, - 0, - 8185, - 0, - 0, - 0, - 8186, - 0, - 0, - 8187, - 0, - 0, - 0, - 8188, - 0, - 0, - 0, - 0, - 0, - 8204, - 0, - 0, - 0, - 0, - 8210, - 0, - 0, - 0, - 0, - 0, - 8213, - 0, - 8214, - 0, - 0, - 8215, - 0, - 0, - 0, - 0, - 0, - 0, - 8218, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8219, - 0, - 8221, - 0, - 0, - 8222, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8225, - 0, - 0, - 0, - 8233, - 0, - 0, - 8242, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8247, - 0, - 8248, - 8252, - 0, - 8256, - 8257, - 0, - 0, - 8261, - 0, - 8264, - 8265, - 0, - 0, - 0, - 0, - 8267, - 0, - 0, - 0, - 8269, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8270, - 0, - 0, - 0, - 8278, - 0, - 8279, - 8283, - 0, - 0, - 8285, - 8286, - 8289, - 8292, - 0, - 0, - 0, - 0, - 8293, - 8295, - 8299, - 8300, - 8301, - 0, - 0, - 0, - 0, - 0, - 0, - 8304, - 8307, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8321, - 0, - 0, - 0, - 8322, - 8323, - 8325, - 8326, - 8327, - 0, - 0, - 8332, - 8338, - 0, - 0, - 8340, - 0, - 0, - 0, - 0, - 0, - 8350, - 0, - 0, - 8351, - 0, - 8354, - 8355, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8360, - 8372, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8377, - 0, - 0, - 0, - 0, - 8380, - 0, - 0, - 0, - 8383, - 0, - 8384, - 0, - 0, - 0, - 0, - 8386, - 8392, - 0, - 0, - 8394, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8396, - 8397, - 0, - 8398, - 0, - 8399, - 0, - 0, - 0, - 0, - 0, - 8400, - 0, - 8401, - 8410, - 8411, - 0, - 8412, - 8413, - 8422, - 0, - 0, - 0, - 0, - 8423, - 0, - 0, - 0, - 0, - 8424, - 0, - 0, - 8425, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8441, - 8442, - 0, - 0, - 0, - 0, - 0, - 0, - 8443, - 0, - 0, - 8444, - 0, - 8447, - 0, - 0, - 0, - 0, - 8451, - 0, - 8458, - 0, - 8462, - 0, - 0, - 8468, - 0, - 8469, - 0, - 0, - 0, - 8470, - 0, - 8473, - 8479, - 8480, - 0, - 0, - 0, - 0, - 8481, - 8483, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8484, - 0, - 0, - 8490, - 0, - 0, - 0, - 0, - 0, - 0, - 8491, - 8493, - 8494, - 0, - 8528, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8530, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8534, - 8538, - 8540, - 0, - 0, - 8541, - 0, - 0, - 8545, - 0, - 8557, - 0, - 0, - 8569, - 8570, - 0, - 0, - 8571, - 8574, - 8575, - 8579, - 0, - 8583, - 0, - 0, - 0, - 0, - 8591, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8606, - 0, - 8607, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8608, - 0, - 0, - 8609, - 0, - 0, - 0, - 8610, - 0, - 0, - 0, - 8611, - 0, - 0, - 8613, - 8617, - 8621, - 0, - 0, - 8622, - 0, - 8623, - 0, - 8624, - 8625, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8637, - 8638, - 8639, - 8650, - 0, - 0, - 0, - 0, - 8652, - 8654, - 8655, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8656, - 0, - 0, - 0, - 0, - 0, - 8657, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8658, - 0, - 0, - 8659, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8660, - 0, - 0, - 0, - 0, - 0, - 0, - 8661, - 8663, - 8664, - 0, - 0, - 0, - 0, - 8665, - 0, - 8669, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8671, - 8674, - 0, - 8684, - 0, - 8686, - 0, - 0, - 0, - 8689, - 0, - 0, - 0, - 8690, - 0, - 8706, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8710, - 0, - 8711, - 8713, - 8714, - 8724, - 8727, - 8728, - 8733, - 8736, - 0, - 8737, - 8739, - 0, - 0, - 0, - 0, - 8742, - 8743, - 8745, - 8754, - 0, - 0, - 0, - 0, - 8756, - 0, - 0, - 0, - 0, - 0, - 0, - 8757, - 8760, - 0, - 0, - 0, - 0, - 0, - 8762, - 8763, - 8764, - 0, - 8766, - 8769, - 8770, - 8773, - 0, - 8774, - 0, - 8779, - 0, - 0, - 0, - 0, - 8780, - 0, - 0, - 8781, - 0, - 0, - 8783, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8784, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8785, - 0, - 0, - 0, - 0, - 8786, - 0, - 0, - 0, - 0, - 8788, - 8790, - 0, - 0, - 0, - 8803, - 0, - 8813, - 8814, - 0, - 0, - 0, - 0, - 0, - 8815, - 8816, - 0, - 0, - 0, - 0, - 8818, - 0, - 0, - 0, - 0, - 8822, - 8828, - 8829, - 0, - 8831, - 0, - 0, - 0, - 0, - 8833, - 0, - 0, - 0, - 8834, - 0, - 0, - 0, - 8835, - 0, - 8836, - 0, - 0, - 0, - 8837, - 0, - 0, - 0, - 0, - 0, - 0, - 8838, - 8839, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8840, - 0, - 0, - 0, - 8841, - 0, - 8842, - 0, - 0, - 0, - 8846, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8847, - 0, - 8848, - 0, - 0, - 8864, - 0, - 0, - 8866, - 0, - 0, - 8870, - 8872, - 0, - 0, - 8873, - 8874, - 0, - 0, - 0, - 0, - 0, - 0, - 8875, - 0, - 8876, - 0, - 0, - 0, - 0, - 8896, - 8900, - 0, - 0, - 0, - 0, - 8901, - 0, - 0, - 0, - 0, - 0, - 8904, - 0, - 8907, - 0, - 0, - 0, - 0, - 8911, - 8912, - 8913, - 0, - 0, - 0, - 8914, - 0, - 8915, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8916, - 0, - 0, - 0, - 8929, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8930, - 0, - 8932, - 0, - 8943, - 0, - 0, - 0, - 8945, - 8947, - 0, - 0, - 0, - 0, - 8949, - 0, - 8950, - 0, - 8954, - 8957, - 0, - 0, - 8970, - 0, - 0, - 0, - 0, - 8971, - 0, - 8996, - 0, - 0, - 0, - 0, - 8997, - 9000, - 0, - 0, - 0, - 0, - 9001, - 9002, - 0, - 9004, - 9009, - 9024, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9027, - 9082, - 0, - 0, - 9083, - 9089, - 0, - 0, - 0, - 0, - 0, - 0, - 9090, - 0, - 0, - 0, - 9092, - 0, - 0, - 9093, - 0, - 9095, - 0, - 0, - 9096, - 9097, - 9101, - 9102, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9112, - 0, - 0, - 0, - 0, - 0, - 0, - 9114, - 0, - 0, - 9120, - 0, - 9121, - 9122, - 0, - 0, - 0, - 9123, - 9124, - 0, - 0, - 9125, - 0, - 0, - 9126, - 0, - 9127, - 0, - 0, - 9129, - 9131, - 0, - 0, - 0, - 9132, - 0, - 0, - 9136, - 0, - 9144, - 0, - 0, - 9148, - 0, - 0, - 0, - 0, - 0, - 0, - 9149, - 0, - 9152, - 9163, - 0, - 0, - 9165, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9166, - 0, - 9169, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9170, - 0, - 0, - 0, - 0, - 9172, - 0, - 9174, - 9175, - 9176, - 0, - 9177, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9186, - 0, - 9187, - 0, - 0, - 0, - 9188, - 9189, - 0, - 0, - 9190, - 0, - 0, - 0, - 0, - 9191, - 0, - 0, - 0, - 9193, - 0, - 0, - 0, - 0, - 9197, - 9198, - 0, - 0, - 0, - 9208, - 9211, - 0, - 0, - 0, - 0, - 9216, - 9217, - 0, - 9220, - 0, - 0, - 0, - 0, - 9221, - 9222, - 9223, - 0, - 9224, - 9225, - 0, - 0, - 9227, - 0, - 9228, - 9229, - 0, - 0, - 9230, - 0, - 9232, - 0, - 9233, - 0, - 0, - 0, - 0, - 0, - 9234, - 9235, - 0, - 0, - 9237, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9238, - 9240, - 0, - 0, - 9241, - 0, - 0, - 0, - 0, - 9244, - 0, - 0, - 0, - 0, - 9247, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9248, - 0, - 0, - 0, - 9249, - 0, - 0, - 0, - 0, - 0, - 9250, - 0, - 0, - 0, - 0, - 9251, - 0, - 0, - 9252, - 9255, - 0, - 0, - 0, - 9256, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9257, - 0, - 0, - 9258, - 0, - 0, - 0, - 0, - 0, - 0, - 9259, - 0, - 0, - 0, - 0, - 0, - 9262, - 9263, - 0, - 0, - 9265, - 9266, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9268, - 9271, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9273, - 0, - 0, - 0, - 9276, - 9277, - 9279, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9280, - 0, - 0, - 9293, - 0, - 0, - 0, - 0, - 0, - 9297, - 9301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9308, - 9309, - 9313, - 9321, - 9322, - 0, - 9326, - 9327, - 0, - 0, - 9477, - 0, - 9479, - 0, - 0, - 0, - 0, - 9482, - 0, - 0, - 0, - 9483, - 0, - 9484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9485, - 0, - 0, - 9486, - 0, - 0, - 0, - 9489, - 0, - 0, - 0, - 0, - 9490, - 9491, - 0, - 0, - 0, - 0, - 9493, - 0, - 9495, - 9496, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9500, - 0, - 9502, - 0, - 0, - 0, - 0, - 0, - 9504, - 9507, - 0, - 9509, - 0, - 9511, - 0, - 0, - 9513, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9515, - 0, - 0, - 0, - 0, - 0, - 0, - 9516, - 9517, - 0, - 0, - 0, - 0, - 9532, - 0, - 0, - 9533, - 0, - 0, - 9538, - 0, - 9539, - 9540, - 0, - 0, - 0, - 0, - 9541, - 0, - 0, - 0, - 9542, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9544, - 9545, - 0, - 9546, - 0, - 0, - 0, - 0, - 0, - 0, - 9547, - 9548, - 0, - 0, - 0, - 9550, - 0, - 9557, - 0, - 9558, - 0, - 9561, - 0, - 9563, - 9570, - 0, - 9572, - 9574, - 9575, - 0, - 0, - 0, - 9577, - 9592, - 0, - 0, - 9596, - 0, - 0, - 0, - 9598, - 0, - 9600, - 0, - 9601, - 0, - 0, - 0, - 0, - 0, - 0, - 9608, - 0, - 9638, - 9639, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9641, - 0, - 0, - 9643, - 9644, - 9645, - 9646, - 0, - 0, - 0, - 9648, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9650, - 9654, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9655, - 0, - 0, - 0, - 0, - 0, - 9656, - 0, - 9657, - 0, - 0, - 0, - 0, - 9658, - 0, - 0, - 9659, - 0, - 0, - 9664, - 0, - 0, - 9665, - 0, - 9667, - 9669, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9671, - 0, - 9673, - 9681, - 0, - 0, - 0, - 0, - 9682, - 9683, - 9684, - 0, - 0, - 0, - 0, - 9686, - 9698, - 0, - 0, - 9700, - 9701, - 9702, - 0, - 9703, - 9717, - 0, - 0, - 0, - 0, - 9718, - 0, - 9726, - 0, - 0, - 0, - 0, - 9727, - 0, - 0, - 0, - 9728, - 0, - 9742, - 0, - 9744, - 0, - 0, - 0, - 9750, - 0, - 9754, - 9755, - 0, - 0, - 0, - 0, - 0, - 9756, - 0, - 9757, - 9768, - 0, - 9769, - 0, - 0, - 0, - 9770, - 9771, - 0, - 9773, - 0, - 9774, - 0, - 9775, - 0, - 0, - 0, - 9776, - 9777, - 9784, - 0, - 0, - 0, - 9786, - 0, - 9789, - 0, - 0, - 0, - 0, - 9793, - 9794, - 0, - 0, - 0, - 9808, - 0, - 0, - 0, - 0, - 0, - 9811, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9812, - 0, - 9820, - 0, - 9823, - 0, - 9828, - 0, - 0, - 0, - 0, - 9830, - 0, - 0, - 9833, - 9836, - 0, - 0, - 0, - 9840, - 0, - 0, - 0, - 9841, - 0, - 0, - 9842, - 0, - 9845, - 0, - 0, - 0, - 9847, - 9848, - 0, - 0, - 9855, - 0, - 0, - 0, - 0, - 0, - 0, - 9856, - 9863, - 9865, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9866, - 9867, - 9868, - 9873, - 9875, - 0, - 0, - 0, - 0, - 0, - 0, - 9880, - 0, - 9886, - 0, - 0, - 0, - 9887, - 0, - 0, - 9891, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9906, - 9907, - 9908, - 0, - 0, - 0, - 9909, - 0, - 0, - 0, - 0, - 0, - 0, - 9910, - 0, - 0, - 0, - 0, - 9913, - 0, - 0, - 0, - 0, - 9914, - 0, - 0, - 0, - 0, - 0, - 9922, - 0, - 0, - 0, - 0, - 9923, - 9925, - 0, - 0, - 0, - 0, - 0, - 0, - 9930, - 0, - 0, - 0, - 9931, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9932, - 0, - 9939, - 0, - 0, - 9940, - 9962, - 9966, - 0, - 9969, - 9970, - 0, - 0, - 9974, - 0, - 9979, - 9981, - 9982, - 0, - 0, - 0, - 9985, - 0, - 0, - 0, - 0, - 0, - 0, - 9987, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9988, - 9993, - 0, - 0, - 9994, - 0, - 0, - 0, - 9997, - 0, - 10004, - 0, - 0, - 0, - 0, - 0, - 10007, - 10019, - 10020, - 10022, - 0, - 0, - 0, - 10031, - 0, - 0, - 0, - 0, - 0, - 10032, - 0, - 0, - 10034, - 0, - 10036, - 0, - 0, - 0, - 0, - 10038, - 0, - 10039, - 10040, - 10041, - 10042, - 0, - 0, - 0, - 0, - 0, - 10043, - 0, - 0, - 0, - 0, - 0, - 10045, - 10054, - 0, - 0, - 0, - 0, - 10055, - 0, - 0, - 10057, - 10058, - 0, - 0, - 0, - 0, - 0, - 0, - 10059, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10060, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10063, - 0, - 10066, - 0, - 0, - 0, - 10070, - 0, - 10072, - 0, - 0, - 10076, - 10077, - 0, - 0, - 10084, - 0, - 10087, - 10090, - 10091, - 0, - 0, - 0, - 10094, - 10097, - 0, - 0, - 0, - 0, - 0, - 0, - 10098, - 0, - 0, - 0, - 0, - 0, - 0, - 10103, - 0, - 10104, - 0, - 10108, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10120, - 0, - 0, - 0, - 10122, - 0, - 0, - 10125, - 0, - 0, - 0, - 0, - 10127, - 10128, - 0, - 0, - 10134, - 0, - 10135, - 10136, - 0, - 10137, - 0, - 0, - 10147, - 0, - 10149, - 10150, - 0, - 0, - 10156, - 0, - 10158, - 10159, - 10160, - 10168, - 0, - 0, - 10171, - 0, - 10173, - 0, - 0, - 0, - 10176, - 0, - 0, - 0, - 0, - 10177, - 0, - 0, - 0, - 0, - 10178, - 0, - 0, - 0, - 0, - 10194, - 0, - 10202, - 0, - 0, - 10203, - 10204, - 0, - 10205, - 10206, - 0, - 10207, - 0, - 0, - 0, - 0, - 10209, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10213, - 0, - 0, - 0, - 0, - 0, - 0, - 10217, - 0, - 10229, - 0, - 10230, - 10231, - 0, - 0, - 10232, - 0, - 0, - 10237, - 10238, - 10244, - 0, - 0, - 0, - 0, - 0, - 10250, - 0, - 10252, - 0, - 0, - 0, - 0, - 0, - 0, - 10255, - 0, - 0, - 10257, - 0, - 0, - 0, - 0, - 0, - 0, - 10258, - 0, - 10259, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10260, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10284, - 10288, - 10289, - 0, - 0, - 0, - 10290, - 0, - 10296, - 0, - 0, - 0, - 0, - 0, - 10297, - 0, - 0, - 0, - 0, - 0, - 0, - 10298, - 0, - 0, - 0, - 0, - 10299, - 10303, - 0, - 0, - 0, - 0, - 0, - 10306, - 0, - 0, - 0, - 10307, - 0, - 10308, - 0, - 0, - 0, - 0, - 10311, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10315, - 10317, - 0, - 0, - 0, - 10318, - 10319, - 0, - 10321, - 0, - 10326, - 0, - 10328, - 0, - 0, - 0, - 0, - 10329, - 0, - 0, - 10331, - 0, - 10332, - 0, - 0, - 0, - 0, - 0, - 0, - 10334, - 0, - 0, - 10335, - 10338, - 0, - 0, - 0, - 0, - 0, - 10339, - 10349, - 0, - 0, - 0, - 0, - 0, - 0, - 10351, - 0, - 10353, - 0, - 0, - 0, - 0, - 0, - 0, - 10362, - 0, - 10368, - 0, - 10369, - 0, - 0, - 0, - 10372, - 10373, - 0, - 0, - 0, - 0, - 0, - 10374, - 0, - 0, - 0, - 10375, - 0, - 10376, - 0, - 0, - 10386, - 10388, - 10390, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10391, - 0, - 0, - 10392, - 10394, - 0, - 0, - 10396, - 0, - 10397, - 0, - 10403, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10404, - 0, - 10405, - 10410, - 0, - 0, - 10411, - 0, - 10412, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10421, - 10422, - 10423, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10425, - 0, - 0, - 10427, - 0, - 0, - 10430, - 0, - 0, - 0, - 0, - 0, - 10432, - 0, - 10433, - 10434, - 0, - 0, - 0, - 0, - 10436, - 10437, - 0, - 10438, - 0, - 10439, - 0, - 10444, - 10446, - 0, - 0, - 0, - 0, - 0, - 10448, - 0, - 0, - 0, - 0, - 0, - 10449, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10451, - 0, - 10453, - 0, - 0, - 0, - 10454, - 10457, - 0, - 0, - 10459, - 0, - 10469, - 0, - 0, - 0, - 0, - 0, - 10472, - 10481, - 0, - 0, - 0, - 0, - 0, - 10482, - 10483, - 0, - 10492, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10499, - 0, - 0, - 0, - 10502, - 0, - 0, - 10510, - 0, - 10521, - 10524, - 0, - 0, - 10525, - 10526, - 10528, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10530, - 0, - 0, - 0, - 0, - 10533, - 0, - 10534, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10535, - 10536, - 0, - 0, - 10544, - 0, - 10553, - 10556, - 0, - 10557, - 10559, - 0, - 0, - 0, - 0, - 0, - 10562, - 10563, - 10564, - 0, - 10565, - 0, - 0, - 0, - 10566, - 0, - 10567, - 0, - 0, - 0, - 0, - 10575, - 0, - 0, - 10576, - 0, - 10578, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10585, - 10586, - 10587, - 10589, - 0, - 10590, - 0, - 0, - 10594, - 0, - 0, - 0, - 0, - 0, - 10598, - 0, - 0, - 10601, - 0, - 0, - 0, - 10602, - 0, - 10603, - 0, - 10604, - 0, - 10605, - 0, - 0, - 10607, - 0, - 10626, - 0, - 10627, - 0, - 0, - 0, - 0, - 0, - 10629, - 10630, - 10631, - 0, - 0, - 0, - 10646, - 0, - 0, - 0, - 10647, - 0, - 10650, - 0, - 10651, - 0, - 0, - 0, - 10652, - 10653, - 10655, - 0, - 10658, - 0, - 0, - 10659, - 0, - 10667, - 0, - 0, - 0, - 0, - 10669, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10670, - 0, - 0, - 0, - 10671, - 0, - 0, - 0, - 0, - 10672, - 10673, - 0, - 10674, - 0, - 0, - 0, - 10676, - 0, - 0, - 0, - 0, - 0, - 0, - 10678, - 0, - 10682, - 0, - 0, - 10692, - 0, - 10697, - 0, - 0, - 0, - 0, - 10698, - 0, - 0, - 0, - 10700, - 0, - 0, - 0, - 0, - 0, - 10703, - 0, - 10704, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10705, - 0, - 10715, - 10718, - 10720, - 0, - 0, - 10722, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10723, - 0, - 0, - 0, - 0, - 10726, - 0, - 0, - 0, - 0, - 0, - 10727, - 10730, - 10743, - 0, - 0, - 0, - 0, - 0, - 0, - 10744, - 0, - 0, - 10745, - 0, - 0, - 0, - 0, - 0, - 0, - 10748, - 0, - 0, - 0, - 0, - 10750, - 0, - 0, - 10752, - 10753, - 0, - 0, - 0, - 10756, - 0, - 0, - 0, - 0, - 0, - 0, - 10758, - 0, - 0, - 0, - 10759, - 0, - 10769, - 0, - 0, - 10772, - 0, - 0, - 0, - 0, - 0, - 0, - 10773, - 0, - 0, - 0, - 10777, - 0, - 0, - 10779, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10780, - 10784, - 0, - 0, - 0, - 10789, - 0, - 0, - 0, - 10791, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10795, - 0, - 0, - 10796, - 0, - 10808, - 0, - 10809, - 0, - 0, - 0, - 10810, - 0, - 0, - 0, - 10812, - 0, - 0, - 10814, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10815, - 0, - 0, - 0, - 0, - 10816, - 10817, - 0, - 0, - 0, - 0, - 10819, - 0, - 10820, - 0, - 0, - 0, - 0, - 10821, - 10822, - 10823, - 0, - 10826, - 10849, - 0, - 0, - 0, - 0, - 10850, - 0, - 0, - 10852, - 0, - 10853, - 0, - 0, - 10856, - 0, - 0, - 10857, - 10858, - 10859, - 10860, - 0, - 0, - 0, - 0, - 0, - 0, - 10863, - 0, - 10866, - 10867, - 10872, - 10890, - 0, - 0, - 10891, - 10892, - 0, - 0, - 0, - 0, - 0, - 10893, - 0, - 0, - 0, - 10896, - 10899, - 0, - 0, - 10900, - 10902, - 0, - 0, - 0, - 0, - 0, - 10903, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10905, - 0, - 10906, - 0, - 0, - 0, - 0, - 10908, - 10911, - 0, - 10912, - 0, - 0, - 10916, - 0, - 0, - 0, - 0, - 0, - 10917, - 0, - 10918, - 0, - 0, - 0, - 10923, - 0, - 0, - 0, - 0, - 0, - 10924, - 0, - 0, - 10928, - 10929, - 0, - 0, - 10930, - 0, - 0, - 0, - 10932, - 0, - 0, - 0, - 0, - 10939, - 0, - 0, - 10945, - 0, - 0, - 0, - 10947, - 0, - 0, - 10948, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10958, - 0, - 10960, - 10962, - 0, - 0, - 10964, - 0, - 0, - 0, - 10966, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10967, - 0, - 0, - 0, - 10968, - 0, - 0, - 0, - 10973, - 0, - 0, - 0, - 0, - 0, - 10975, - 0, - 0, - 0, - 10976, - 10978, - 0, - 0, - 10982, - 10984, - 10987, - 0, - 0, - 10988, - 0, - 10989, - 0, - 0, - 10991, - 0, - 0, - 0, - 0, - 10992, - 0, - 0, - 0, - 10993, - 0, - 10995, - 0, - 0, - 0, - 10996, - 10997, - 0, - 0, - 0, - 10998, - 0, - 10999, - 0, - 11001, - 0, - 0, - 0, - 0, - 0, - 0, - 11010, - 11012, - 0, - 11013, - 11016, - 11017, - 0, - 0, - 11019, - 11020, - 11021, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11022, - 0, - 0, - 11023, - 11029, - 0, - 0, - 0, - 0, - 11031, - 0, - 0, - 0, - 11034, - 0, - 0, - 0, - 0, - 11055, - 0, - 0, - 0, - 0, - 0, - 11056, - 11060, - 0, - 0, - 0, - 0, - 0, - 0, - 11061, - 0, - 0, - 11064, - 11065, - 0, - 11066, - 0, - 11069, - 0, - 11085, - 0, - 0, - 0, - 0, - 0, - 11086, - 0, - 0, - 0, - 11088, - 0, - 0, - 0, - 11094, - 0, - 0, - 0, - 11095, - 11096, - 0, - 0, - 0, - 0, - 0, - 0, - 11097, - 11098, - 0, - 0, - 0, - 0, - 0, - 0, - 11099, - 0, - 0, - 11102, - 11108, - 0, - 0, - 0, - 11109, - 0, - 11114, - 11119, - 0, - 11131, - 0, - 0, - 0, - 11142, - 0, - 0, - 11143, - 0, - 11146, - 0, - 11147, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11148, - 0, - 11149, - 11152, - 11153, - 11154, - 0, - 11156, - 0, - 11157, - 0, - 0, - 0, - 11158, - 0, - 0, - 11159, - 11160, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11163, - 0, - 0, - 11164, - 11166, - 0, - 0, - 0, - 11172, - 11174, - 0, - 0, - 0, - 11176, - 0, - 0, - 0, - 0, - 0, - 11182, - 11183, - 0, - 0, - 0, - 11184, - 11187, - 0, - 0, - 11188, - 11189, - 0, - 0, - 0, - 0, - 0, - 0, - 11194, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11200, - 11202, - 0, - 0, - 0, - 0, - 0, - 0, - 11203, - 0, - 11204, - 0, - 0, - 0, - 0, - 0, - 11205, - 0, - 0, - 0, - 11206, - 0, - 11207, - 0, - 0, - 11209, - 0, - 11211, - 0, - 11214, - 0, - 0, - 11231, - 0, - 0, - 0, - 11293, - 11295, - 0, - 0, - 11296, - 11297, - 11302, - 0, - 0, - 0, - 11307, - 0, - 0, - 0, - 0, - 11309, - 11310, - 0, - 11311, - 0, - 0, - 0, - 11313, - 0, - 11314, - 0, - 0, - 0, - 0, - 11334, - 0, - 11338, - 0, - 0, - 0, - 11339, - 0, - 0, - 0, - 0, - 0, - 11340, - 0, - 11341, - 11342, - 0, - 11344, - 0, - 11345, - 0, - 0, - 0, - 11348, - 11349, - 0, - 0, - 11350, - 0, - 0, - 0, - 11355, - 0, - 0, - 0, - 0, - 0, - 0, - 11356, - 0, - 11357, - 11370, - 0, - 0, - 11371, - 0, - 11374, - 11376, - 0, - 0, - 0, - 11377, - 0, - 0, - 11378, - 11383, - 0, - 11386, - 11399, - 0, - 11400, - 11406, - 0, - 0, - 0, - 11408, - 0, - 0, - 11409, - 11412, - 0, - 0, - 0, - 0, - 11417, - 0, - 0, - 0, - 11418, - 0, - 11421, - 0, - 11426, - 11429, - 0, - 0, - 0, - 0, - 0, - 11430, - 0, - 11437, - 0, - 11438, - 0, - 0, - 0, - 0, - 0, - 11440, - 11453, - 0, - 0, - 0, - 0, - 0, - 0, - 11454, - 0, - 0, - 0, - 0, - 11455, - 0, - 0, - 11456, - 11460, - 11461, - 11463, - 0, - 11469, - 0, - 11473, - 0, - 0, - 0, - 0, - 11474, - 0, - 0, - 0, - 11475, - 0, - 11476, - 11477, - 11480, - 0, - 0, - 0, - 0, - 11481, - 0, - 0, - 11484, - 0, - 0, - 11487, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11497, - 0, - 0, - 11502, - 0, - 11509, - 0, - 0, - 11510, - 11511, - 11513, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11515, - 0, - 0, - 0, - 0, - 11516, - 0, - 11520, - 11521, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11529, - 11530, - 11531, - 11534, - 0, - 0, - 11543, - 0, - 0, - 0, - 0, - 0, - 11547, - 0, - 11548, - 0, - 0, - 0, - 0, - 0, - 11552, - 11556, - 0, - 11557, - 0, - 0, - 11559, - 0, - 11560, - 0, - 0, - 0, - 0, - 0, - 0, - 11561, - 0, - 0, - 11563, - 11564, - 0, - 11565, - 0, - 0, - 0, - 0, - 11567, - 0, - 0, - 0, - 11569, - 0, - 11574, - 0, - 11575, - 0, - 0, - 0, - 11577, - 0, - 11578, - 0, - 0, - 0, - 11580, - 11581, - 0, - 0, - 0, - 11582, - 11584, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11587, - 0, - 11588, - 11591, - 0, - 11595, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11596, - 0, - 11597, - 0, - 0, - 0, - 0, - 11598, - 11601, - 0, - 0, - 0, - 11602, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11603, - 11604, - 0, - 11606, - 0, - 0, - 11608, - 0, - 0, - 0, - 0, - 11610, - 0, - 0, - 11611, - 0, - 0, - 0, - 0, - 11613, - 0, - 11622, - 0, - 0, - 0, - 11623, - 0, - 0, - 0, - 0, - 11625, - 0, - 0, - 11626, - 11627, - 11628, - 11630, - 0, - 0, - 0, - 0, - 0, - 0, - 11639, - 0, - 0, - 11646, - 0, - 11648, - 11649, - 0, - 11650, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11651, - 0, - 0, - 11652, - 11653, - 11656, - 0, - 0, - 11677, - 11679, - 0, - 0, - 0, - 0, - 11680, - 0, - 0, - 11681, - 0, - 11685, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11688, - 0, - 0, - 0, - 11716, - 0, - 11719, - 0, - 0, - 0, - 0, - 0, - 11721, - 0, - 0, - 11724, - 11743, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11745, - 11748, - 11750, - 0, - 0, - 0, - 0, - 0, - 11751, - 0, - 0, - 0, - 11752, - 11754, - 0, - 11755, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11759, - 0, - 0, - 0, - 0, - 0, - 0, - 11760, - 0, - 0, - 0, - 11761, - 0, - 0, - 0, - 0, - 0, - 0, - 11766, - 11767, - 0, - 11772, - 11773, - 0, - 11774, - 0, - 0, - 11775, - 0, - 11777, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11778, - 11780, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11783, - 0, - 11784, - 0, - 0, - 0, - 11785, - 0, - 0, - 0, - 11786, - 0, - 0, - 0, - 0, - 11788, - 0, - 0, - 11789, - 11791, - 11792, - 0, - 0, - 0, - 0, - 11795, - 11834, - 11835, - 11836, - 0, - 0, - 11837, - 0, - 0, - 0, - 11838, - 0, - 0, - 11846, - 11851, - 0, - 11852, - 0, - 11869, - 0, - 0, - 0, - 11871, - 0, - 0, - 0, - 11872, - 11874, - 0, - 0, - 0, - 0, - 0, - 0, - 11875, - 0, - 11876, - 11877, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11883, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11884, - 0, - 11885, - 0, - 11886, - 0, - 0, - 11887, - 0, - 11894, - 11895, - 11897, - 11909, - 11910, - 0, - 11912, - 11918, - 0, - 0, - 11920, - 0, - 11922, - 11924, - 11927, - 11928, - 0, - 0, - 0, - 0, - 11929, - 0, - 11934, - 0, - 0, - 0, - 0, - 0, - 11941, - 11943, - 11944, - 0, - 11945, - 0, - 0, - 0, - 0, - 11948, - 11949, - 0, - 0, - 0, - 0, - 11953, - 0, - 11954, - 0, - 11955, - 0, - 11956, - 0, - 0, - 0, - 0, - 0, - 11957, - 0, - 0, - 11959, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11961, - 0, - 0, - 0, - 0, - 0, - 11978, - 0, - 0, - 0, - 11979, - 11980, - 11986, - 11987, - 0, - 11992, - 0, - 0, - 0, - 0, - 0, - 11993, - 0, - 0, - 0, - 11994, - 0, - 11999, - 12004, - 12005, - 12006, - 0, - 0, - 0, - 0, - 0, - 12011, - 0, - 0, - 12012, - 12014, - 0, - 0, - 12015, - 0, - 0, - 12019, - 12028, - 0, - 0, - 12029, - 0, - 0, - 12032, - 12033, - 0, - 0, - 0, - 0, - 12034, - 0, - 12041, - 12043, - 0, - 0, - 12044, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12046, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12054, - 12055, - 0, - 12056, - 0, - 0, - 0, - 12060, - 12064, - 0, - 0, - 0, - 0, - 0, - 12065, - 12067, - 12068, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12074, - 0, - 0, - 0, - 12075, - 12076, - 0, - 0, - 0, - 12079, - 0, - 12081, - 12086, - 12087, - 0, - 0, - 12088, - 0, - 0, - 0, - 0, - 12089, - 0, - 12092, - 0, - 0, - 0, - 0, - 12097, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12098, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12102, - 12103, - 12104, - 12111, - 0, - 0, - 12114, - 12116, - 0, - 0, - 0, - 12118, - 0, - 0, - 0, - 12119, - 12120, - 12128, - 0, - 0, - 0, - 0, - 12130, - 0, - 0, - 0, - 0, - 0, - 0, - 12131, - 0, - 0, - 0, - 12132, - 12134, - 0, - 0, - 0, - 0, - 12137, - 0, - 12139, - 0, - 12141, - 0, - 0, - 12142, - 0, - 0, - 0, - 12144, - 0, - 0, - 0, - 0, - 0, - 12145, - 0, - 12148, - 0, - 12153, - 0, - 0, - 0, - 0, - 12154, - 12171, - 12173, - 0, - 0, - 0, - 12175, - 0, - 0, - 0, - 0, - 12178, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12183, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12184, - 0, - 0, - 0, - 12186, - 0, - 0, - 0, - 0, - 0, - 12187, - 12188, - 0, - 0, - 12189, - 0, - 12196, - 0, - 12197, - 0, - 0, - 12198, - 0, - 12201, - 0, - 0, - 0, - 0, - 12203, - 0, - 12209, - 0, - 0, - 0, - 0, - 12210, - 12211, - 12212, - 12213, - 0, - 12217, - 12218, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12222, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12223, - 0, - 0, - 12229, - 0, - 0, - 0, - 0, - 12233, - 0, - 0, - 0, - 0, - 12234, - 0, - 0, - 12236, - 12242, - 0, - 0, - 0, - 12243, - 0, - 0, - 0, - 12244, - 12253, - 0, - 12254, - 12256, - 0, - 12257, - 0, - 0, - 12275, - 0, - 0, - 0, - 0, - 0, - 12277, - 0, - 0, - 0, - 0, - 0, - 12278, - 0, - 12289, - 0, - 0, - 12290, - 0, - 12292, - 12293, - 0, - 0, - 12294, - 0, - 12295, - 0, - 0, - 12296, - 0, - 12297, - 0, - 12298, - 0, - 0, - 0, - 0, - 12301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12309, - 0, - 12338, - 12340, - 0, - 0, - 0, - 0, - 12341, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12342, - 12343, - 0, - 12344, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12345, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12346, - 0, - 0, - 0, - 0, - 12348, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12350, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12351, - 0, - 12355, - 12356, - 12357, - 0, - 0, - 12367, - 12370, - 12371, - 0, - 0, - 0, - 0, - 0, - 12372, - 12376, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12379, - 0, - 12382, - 0, - 12383, - 0, - 0, - 12384, - 0, - 0, - 0, - 0, - 12393, - 0, - 0, - 12394, - 0, - 0, - 0, - 0, - 12398, - 12403, - 0, - 0, - 12404, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12410, - 0, - 0, - 0, - 12411, - 0, - 0, - 0, - 12412, - 0, - 0, - 0, - 0, - 12420, - 0, - 12421, - 0, - 0, - 0, - 0, - 0, - 12423, - 0, - 12425, - 12429, - 0, - 0, - 0, - 12431, - 12432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12434, - 0, - 0, - 0, - 0, - 0, - 12435, - 12436, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12437, - 0, - 0, - 0, - 0, - 0, - 12438, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12445, - 0, - 0, - 0, - 12450, - 12451, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12452, - 12475, - 0, - 0, - 12493, - 12494, - 0, - 0, - 0, - 12495, - 0, - 0, - 0, - 0, - 12496, - 12502, - 12509, - 0, - 0, - 0, - 0, - 12510, - 0, - 12512, - 12513, - 0, - 0, - 0, - 0, - 12514, - 0, - 0, - 0, - 12515, - 0, - 12520, - 0, - 0, - 0, - 12524, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12527, - 0, - 0, - 0, - 12528, - 0, - 0, - 0, - 12529, - 0, - 0, - 0, - 0, - 0, - 12530, - 0, - 12535, - 0, - 0, - 12536, - 0, - 12538, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12540, - 0, - 12548, - 0, - 0, - 0, - 0, - 0, - 12550, - 0, - 0, - 0, - 12551, - 12552, - 0, - 0, - 0, - 12554, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12555, - 0, - 0, - 12562, - 0, - 12565, - 0, - 12566, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12569, - 0, - 0, - 0, - 12571, - 12574, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12577, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12578, - 12579, - 12603, - 0, - 12608, - 0, - 0, - 12611, - 0, - 12612, - 0, - 12615, - 0, - 12625, - 0, - 0, - 0, - 0, - 12627, - 12646, - 0, - 12648, - 0, - 0, - 12657, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12670, - 0, - 0, - 12671, - 0, - 12673, - 12677, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12679, - 0, - 12681, - 0, - 12682, - 12693, - 0, - 12694, - 0, - 12697, - 0, - 12701, - 0, - 0, - 0, - 12703, - 12704, - 0, - 0, - 0, - 0, - 12707, - 12737, - 0, - 0, - 12739, - 0, - 0, - 12740, - 0, - 0, - 12742, - 12743, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12745, - 0, - 12746, - 12747, - 0, - 12748, - 0, - 0, - 12759, - 12767, - 0, - 0, - 0, - 0, - 12773, - 0, - 12774, - 12778, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12779, - 0, - 0, - 0, - 0, - 0, - 12780, - 12793, - 0, - 12824, - 0, - 12825, - 0, - 12836, - 0, - 0, - 0, - 0, - 12839, - 0, - 12842, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12843, - 12845, - 0, - 12846, - 0, - 0, - 0, - 0, - 12847, - 0, - 0, - 12850, - 12852, - 12853, - 0, - 0, - 0, - 12854, - 0, - 0, - 0, - 12855, - 0, - 12856, - 0, - 12858, - 0, - 0, - 12859, - 0, - 12862, - 0, - 12863, - 0, - 0, - 12866, - 0, - 12869, - 12872, - 12873, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12875, - 0, - 12877, - 0, - 0, - 12878, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12884, - 12885, - 12888, - 0, - 12889, - 0, - 0, - 0, - 0, - 12893, - 0, - 0, - 0, - 12895, - 12896, - 12898, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12902, - 0, - 12909, - 12910, - 0, - 12926, - 0, - 12928, - 0, - 0, - 0, - 12929, - 0, - 12930, - 0, - 0, - 0, - 0, - 12931, - 0, - 12932, - 12933, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12934, - 0, - 12942, - 0, - 0, - 0, - 0, - 12944, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12946, - 0, - 0, - 12948, - 0, - 0, - 12949, - 0, - 0, - 0, - 0, - 12950, - 0, - 0, - 0, - 0, - 12951, - 0, - 12952, - 0, - 12953, - 0, - 0, - 0, - 12954, - 12958, - 12959, - 0, - 0, - 0, - 0, - 0, - 12960, - 12964, - 0, - 0, - 0, - 0, - 0, - 12966, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12970, - 0, - 12971, - 0, - 0, - 0, - 0, - 0, - 0, - 12972, - 0, - 0, - 12982, - 0, - 0, - 0, - 12984, - 12985, - 0, - 12986, - 12996, - 12997, - 13001, - 13002, - 0, - 0, - 0, - 0, - 13004, - 0, - 0, - 13005, - 0, - 0, - 13007, - 13009, - 0, - 13017, - 0, - 0, - 0, - 13020, - 0, - 13021, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13022, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13024, - 13027, - 0, - 0, - 0, - 0, - 0, - 13028, - 0, - 0, - 13029, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13032, - 0, - 13037, - 0, - 0, - 0, - 0, - 0, - 0, - 13040, - 0, - 0, - 13041, - 0, - 0, - 0, - 13043, - 13044, - 13046, - 0, - 0, - 0, - 0, - 13047, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13049, - 13054, - 0, - 13056, - 0, - 0, - 13060, - 13061, - 0, - 0, - 0, - 0, - 0, - 13067, - 0, - 0, - 13068, - 0, - 13071, - 0, - 0, - 0, - 0, - 0, - 13077, - 13078, - 0, - 0, - 0, - 0, - 0, - 13079, - 13080, - 13081, - 0, - 13082, - 0, - 0, - 0, - 13085, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13086, - 0, - 13087, - 13088, - 0, - 0, - 0, - 0, - 0, - 13094, - 0, - 13099, - 0, - 13100, - 0, - 0, - 0, - 13101, - 0, - 13125, - 13126, - 13128, - 13129, - 0, - 0, - 13130, - 0, - 13131, - 0, - 0, - 0, - 0, - 0, - 0, - 13134, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13150, - 0, - 13168, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13169, - 0, - 0, - 13170, - 0, - 0, - 0, - 0, - 13174, - 0, - 0, - 0, - 13176, - 0, - 0, - 0, - 0, - 0, - 13177, - 0, - 13178, - 13183, - 13187, - 0, - 0, - 0, - 13189, - 0, - 0, - 13190, - 0, - 0, - 13191, - 0, - 0, - 13206, - 0, - 0, - 0, - 13207, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13212, - 0, - 0, - 13219, - 13232, - 0, - 0, - 0, - 13241, - 0, - 13249, - 13253, - 0, - 0, - 0, - 0, - 0, - 13255, - 13259, - 0, - 13260, - 13261, - 0, - 13262, - 0, - 13272, - 0, - 0, - 0, - 0, - 13276, - 0, - 0, - 0, - 0, - 13277, - 13299, - 0, - 0, - 13301, - 13302, - 0, - 0, - 13303, - 0, - 0, - 13305, - 0, - 13310, - 0, - 0, - 0, - 13311, - 0, - 0, - 0, - 0, - 13325, - 0, - 13328, - 0, - 0, - 0, - 13329, - 0, - 0, - 0, - 0, - 0, - 0, - 13330, - 0, - 0, - 13331, - 0, - 13335, - 0, - 0, - 13342, - 0, - 0, - 0, - 0, - 0, - 13343, - 0, - 13354, - 0, - 13362, - 0, - 13366, - 13367, - 13369, - 0, - 0, - 13371, - 13372, - 0, - 13373, - 13374, - 0, - 13376, - 0, - 13380, - 13381, - 13386, - 0, - 13387, - 13388, - 0, - 13389, - 13391, - 13395, - 0, - 0, - 0, - 0, - 0, - 13401, - 13409, - 0, - 13410, - 0, - 0, - 0, - 0, - 13420, - 0, - 0, - 0, - 0, - 0, - 13422, - 0, - 0, - 0, - 0, - 13423, - 0, - 0, - 0, - 0, - 13425, - 0, - 0, - 0, - 0, - 0, - 13427, - 0, - 0, - 0, - 13428, - 0, - 0, - 13430, - 13438, - 0, - 13439, - 0, - 13445, - 0, - 13448, - 13449, - 0, - 0, - 0, - 0, - 0, - 0, - 13451, - 0, - 13457, - 0, - 0, - 0, - 0, - 13458, - 13459, - 0, - 13460, - 0, - 0, - 0, - 0, - 13464, - 13465, - 13466, - 13470, - 0, - 13471, - 13472, - 13474, - 13475, - 0, - 13476, - 0, - 0, - 13478, - 13479, - 0, - 13481, - 0, - 0, - 0, - 0, - 13487, - 0, - 13490, - 0, - 13493, - 0, - 0, - 13494, - 0, - 0, - 13495, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13496, - 13497, - 0, - 13500, - 0, - 0, - 13516, - 13522, - 0, - 0, - 13525, - 13528, - 0, - 0, - 0, - 13530, - 13535, - 0, - 13537, - 13539, - 0, - 13540, - 0, - 13543, - 0, - 13544, - 0, - 0, - 0, - 0, - 0, - 0, - 13545, - 0, - 0, - 0, - 0, - 0, - 0, - 13547, - 0, - 0, - 0, - 13549, - 13555, - 0, - 0, - 0, - 13556, - 13557, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13558, - 0, - 13563, - 0, - 0, - 0, - 0, - 13564, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13566, - 0, - 0, - 0, - 0, - 0, - 0, - 13569, - 0, - 0, - 13571, - 0, - 0, - 0, - 0, - 13573, - 0, - 0, - 0, - 0, - 0, - 0, - 13578, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13581, - 0, - 13586, - 0, - 13595, - 0, - 13600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13601, - 13603, - 0, - 13604, - 13605, - 13606, - 13607, - 0, - 0, - 13617, - 13618, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13623, - 0, - 13625, - 13627, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13629, - 0, - 0, - 0, - 13634, - 0, - 0, - 0, - 13638, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13654, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13656, - 0, - 13659, - 0, - 0, - 13660, - 0, - 0, - 13662, - 0, - 0, - 0, - 13663, - 0, - 13664, - 0, - 0, - 0, - 0, - 0, - 13668, - 0, - 13669, - 13671, - 0, - 0, - 13672, - 0, - 0, - 0, - 0, - 0, - 0, - 13675, - 13685, - 0, - 13686, - 0, - 0, - 0, - 13687, - 0, - 0, - 0, - 13692, - 13694, - 13697, - 0, - 0, - 0, - 13702, - 0, - 0, - 0, - 0, - 0, - 13705, - 0, - 0, - 0, - 0, - 13707, - 0, - 0, - 0, - 13714, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13715, - 0, - 13716, - 13717, - 0, - 0, - 13719, - 13724, - 13730, - 13731, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13732, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13734, - 0, - 13736, - 0, - 0, - 13737, - 13738, - 13747, - 0, - 13751, - 0, - 0, - 13752, - 0, - 0, - 0, - 13753, - 0, - 13757, - 0, - 0, - 13762, - 13763, - 0, - 13764, - 13765, - 0, - 13766, - 0, - 0, - 13767, - 0, - 0, - 0, - 13768, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13769, - 0, - 0, - 13772, - 0, - 13775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13776, - 13778, - 13787, - 0, - 0, - 0, - 13797, - 0, - 13798, - 0, - 13801, - 0, - 13804, - 13806, - 0, - 0, - 0, - 0, - 13816, - 13817, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13834, - 0, - 13836, - 0, - 0, - 13838, - 0, - 0, - 13839, - 0, - 13840, - 0, - 0, - 0, - 0, - 13842, - 0, - 0, - 0, - 0, - 0, - 0, - 13843, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13845, - 0, - 0, - 0, - 0, - 0, - 13858, - 0, - 0, - 13860, - 0, - 0, - 13861, - 0, - 0, - 13862, - 13863, - 0, - 13868, - 0, - 13869, - 13870, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13872, - 0, - 0, - 0, - 0, - 13873, - 13878, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13886, - 0, - 13888, - 13889, - 13890, - 0, - 0, - 13891, - 13894, - 0, - 13897, - 13899, - 13900, - 13904, - 0, - 0, - 13906, - 0, - 0, - 0, - 13909, - 0, - 0, - 0, - 13910, - 0, - 0, - 0, - 13911, - 0, - 0, - 0, - 0, - 0, - 13912, - 13917, - 0, - 0, - 0, - 0, - 13918, - 0, - 13919, - 0, - 0, - 13920, - 0, - 0, - 0, - 13921, - 0, - 0, - 13922, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13924, - 0, - 13927, - 0, - 0, - 0, - 0, - 0, - 13932, - 0, - 13933, - 0, - 13934, - 0, - 0, - 13935, - 0, - 13944, - 0, - 0, - 0, - 13954, - 0, - 0, - 13955, - 0, - 0, - 0, - 0, - 13956, - 0, - 13957, - 0, - 13967, - 13969, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13970, - 13990, - 0, - 13991, - 13994, - 0, - 13995, - 0, - 0, - 0, - 0, - 13996, - 0, - 0, - 13999, - 0, - 0, - 0, - 14018, - 0, - 14019, - 0, - 14021, - 0, - 0, - 0, - 0, - 0, - 0, - 14041, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14043, - 0, - 0, - 0, - 0, - 14046, - 0, - 0, - 0, - 14048, - 14049, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14051, - 0, - 0, - 14052, - 14056, - 0, - 14063, - 0, - 14064, - 14066, - 0, - 0, - 14067, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14068, - 0, - 0, - 0, - 14072, - 0, - 14074, - 14075, - 0, - 14076, - 14079, - 14085, - 14086, - 14087, - 14093, - 0, - 0, - 0, - 0, - 14095, - 0, - 0, - 0, - 0, - 0, - 0, - 14096, - 14097, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14098, - 0, - 14102, - 0, - 0, - 0, - 0, - 0, - 14103, - 0, - 0, - 0, - 14104, - 0, - 0, - 14105, - 0, - 0, - 0, - 14107, - 14108, - 0, - 0, - 14109, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14117, - 0, - 0, - 0, - 0, - 14118, - 0, - 0, - 0, - 0, - 14119, - 0, - 0, - 14120, - 0, - 0, - 14121, - 0, - 14122, - 14127, - 0, - 14128, - 14136, - 0, - 0, - 14138, - 0, - 14140, - 0, - 0, - 0, - 14141, - 14142, - 0, - 0, - 0, - 0, - 14146, - 0, - 0, - 14149, - 0, - 14151, - 0, - 0, - 0, - 14152, - 0, - 0, - 14153, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14154, - 0, - 14156, - 14157, - 0, - 0, - 14159, - 0, - 14161, - 0, - 0, - 0, - 0, - 14162, - 0, - 0, - 0, - 0, - 0, - 0, - 14163, - 0, - 0, - 14173, - 0, - 0, - 0, - 0, - 0, - 0, - 14174, - 0, - 0, - 14176, - 0, - 0, - 14178, - 0, - 0, - 14179, - 14181, - 0, - 0, - 14182, - 14185, - 14187, - 0, - 14190, - 0, - 0, - 14197, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14198, - 0, - 0, - 0, - 0, - 0, - 0, - 14199, - 14200, - 0, - 0, - 0, - 14204, - 0, - 0, - 14208, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14231, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14234, - 0, - 0, - 14235, - 0, - 0, - 0, - 14240, - 14241, - 0, - 0, - 0, - 14246, - 0, - 0, - 0, - 14247, - 0, - 14250, - 0, - 0, - 14251, - 0, - 0, - 14254, - 0, - 0, - 14256, - 0, - 0, - 0, - 14260, - 0, - 14261, - 0, - 0, - 0, - 0, - 14262, - 14267, - 14269, - 0, - 0, - 14277, - 0, - 0, - 14278, - 0, - 14279, - 14282, - 0, - 0, - 0, - 14283, - 0, - 0, - 0, - 14284, - 14285, - 0, - 0, - 0, - 0, - 14286, - 0, - 0, - 0, - 14288, - 0, - 0, - 0, - 14289, - 0, - 14290, - 0, - 14293, - 14301, - 14302, - 14304, - 14305, - 0, - 14307, - 0, - 14308, - 14309, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14311, - 14312, - 0, - 0, - 14317, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14318, - 0, - 0, - 0, - 0, - 14320, - 0, - 0, - 0, - 0, - 14321, - 14322, - 0, - 0, - 0, - 0, - 0, - 14326, - 14329, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14330, - 14331, - 0, - 0, - 0, - 0, - 14332, - 0, - 0, - 0, - 14333, - 0, - 0, - 14337, - 14340, - 0, - 14341, - 0, - 0, - 14342, - 0, - 14345, - 14346, - 0, - 0, - 14347, - 0, - 14362, - 0, - 0, - 0, - 0, - 0, - 14364, - 14365, - 14371, - 0, - 14373, - 0, - 0, - 14374, - 0, - 14379, - 0, - 14400, - 0, - 0, - 0, - 0, - 0, - 14401, - 0, - 0, - 14405, - 0, - 14406, - 0, - 14408, - 14409, - 0, - 0, - 0, - 14417, - 0, - 0, - 14424, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14430, - 0, - 0, - 0, - 14431, - 0, - 0, - 14435, - 0, - 14440, - 0, - 0, - 0, - 0, - 0, - 0, - 14442, - 0, - 0, - 14443, - 0, - 0, - 0, - 0, - 0, - 14446, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14454, - 0, - 14457, - 0, - 14460, - 0, - 0, - 14466, - 0, - 0, - 0, - 0, - 0, - 14467, - 0, - 0, - 0, - 0, - 0, - 0, - 14469, - 0, - 14477, - 0, - 0, - 0, - 0, - 0, - 0, - 14478, - 14482, - 0, - 0, - 0, - 14483, - 0, - 0, - 0, - 14485, - 14486, - 0, - 0, - 0, - 14487, - 14488, - 14489, - 14492, - 14493, - 14494, - 14495, - 14496, - 14497, - 0, - 14499, - 0, - 14501, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14502, - 0, - 14507, - 14512, - 14513, - 14514, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14515, - 14526, - 14530, - 0, - 14537, - 0, - 14544, - 0, - 14547, - 0, - 0, - 14548, - 14550, - 14551, - 0, - 0, - 14552, - 0, - 0, - 0, - 14553, - 0, - 14554, - 0, - 0, - 0, - 0, - 14556, - 14564, - 0, - 0, - 14565, - 14566, - 0, - 0, - 0, - 0, - 0, - 0, - 14568, - 0, - 0, - 14569, - 0, - 0, - 0, - 14571, - 14576, - 0, - 0, - 14577, - 14578, - 14579, - 0, - 0, - 14580, - 0, - 0, - 0, - 0, - 14582, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14583, - 0, - 0, - 0, - 0, - 0, - 14587, - 0, - 14588, - 0, - 0, - 14600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14601, - 0, - 0, - 14604, - 14605, - 14611, - 0, - 14613, - 0, - 0, - 0, - 0, - 14615, - 0, - 0, - 0, - 0, - 0, - 0, - 14627, - 0, - 14628, - 0, - 0, - 0, - 0, - 14631, - 0, - 14633, - 14634, - 0, - 0, - 0, - 0, - 14635, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14636, - 0, - 0, - 14639, - 14642, - 0, - 0, - 0, - 0, - 14644, - 0, - 0, - 0, - 0, - 14645, - 14646, - 0, - 14653, - 0, - 0, - 14654, - 0, - 14658, - 0, - 14661, - 0, - 0, - 0, - 14665, - 0, - 0, - 0, - 14668, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14669, - 0, - 0, - 14670, - 0, - 0, - 0, - 14680, - 0, - 0, - 14681, - 0, - 0, - 0, - 0, - 0, - 14682, - 14683, - 0, - 0, - 0, - 0, - 14686, - 0, - 0, - 0, - 0, - 14687, - 14697, - 0, - 0, - 0, - 0, - 14699, - 14705, - 14711, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14712, - 0, - 0, - 0, - 14713, - 0, - 0, - 0, - 0, - 14719, - 0, - 14720, - 14721, - 14726, - 0, - 0, - 0, - 14728, - 14729, - 0, - 0, - 0, - 0, - 14731, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14733, - 14736, - 14737, - 0, - 0, - 14740, - 14742, - 0, - 0, - 0, - 14744, - 14753, - 0, - 0, - 0, - 0, - 14755, - 14758, - 14760, - 0, - 0, - 0, - 0, - 0, - 14761, - 14762, - 14765, - 14771, - 0, - 14772, - 0, - 14773, - 14774, - 0, - 0, - 14775, - 0, - 0, - 14776, - 0, - 0, - 0, - 0, - 14777, - 0, - 14779, - 0, - 0, - 14782, - 0, - 0, - 14785, - 14786, - 14788, - 0, - 0, - 0, - 0, - 0, - 14795, - 0, - 0, - 0, - 0, - 0, - 0, - 14798, - 0, - 14803, - 14804, - 14806, - 0, - 0, - 0, - 14809, - 0, - 0, - 0, - 0, - 0, - 0, - 14810, - 0, - 0, - 0, - 0, - 14811, - 0, - 14812, - 0, - 0, - 0, - 0, - 0, - 14815, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14816, - 0, - 14818, - 0, - 0, - 0, - 0, - 0, - 0, - 14819, - 0, - 14820, - 0, - 14823, - 0, - 0, - 0, - 14824, - 0, - 0, - 14826, - 14827, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14830, - 0, - 0, - 0, - 0, - 0, - 14833, - 0, - 14845, - 0, - 0, - 0, - 0, - 0, - 14846, - 0, - 0, - 14847, - 14871, - 0, - 14873, - 0, - 14876, - 0, - 14877, - 14878, - 14880, - 0, - 0, - 0, - 0, - 0, - 14881, - 0, - 14882, - 14894, - 0, - 0, - 0, - 0, - 14895, - 0, - 14907, - 0, - 14908, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14911, - 0, - 0, - 0, - 0, - 14920, - 0, - 0, - 14931, - 0, - 14932, - 14934, - 14935, - 0, - 0, - 14936, - 0, - 14945, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14947, - 0, - 0, - 14948, - 14949, - 14951, - 0, - 0, - 14952, - 0, - 0, - 0, - 14964, - 14973, - 0, - 0, - 14990, - 0, - 0, - 0, - 0, - 14995, - 0, - 0, - 14998, - 15001, - 0, - 0, - 15002, - 15020, - 0, - 0, - 0, - 0, - 0, - 0, - 15021, - 0, - 15022, - 0, - 0, - 0, - 0, - 15023, - 0, - 0, - 15025, - 15029, - 15033, - 0, - 0, - 0, - 15034, - 0, - 0, - 0, - 15035, - 0, - 0, - 0, - 0, - 0, - 15043, - 15044, - 0, - 0, - 0, - 15045, - 15046, - 15048, - 15050, - 0, - 15065, - 0, - 0, - 0, - 0, - 15066, - 0, - 0, - 15075, - 15082, - 15084, - 0, - 0, - 15085, - 15086, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15088, - 0, - 0, - 0, - 15089, - 0, - 0, - 0, - 0, - 15094, - 0, - 15096, - 0, - 15097, - 0, - 15100, - 0, - 0, - 15102, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15105, - 0, - 0, - 15106, - 0, - 15109, - 15113, - 0, - 0, - 0, - 15115, - 0, - 15118, - 0, - 0, - 0, - 0, - 0, - 0, - 15119, - 0, - 0, - 15120, - 0, - 0, - 0, - 0, - 0, - 15123, - 15129, - 0, - 0, - 0, - 15130, - 0, - 15131, - 0, - 0, - 15134, - 0, - 15135, - 0, - 0, - 0, - 15137, - 15138, - 0, - 0, - 0, - 0, - 0, - 0, - 15139, - 0, - 0, - 0, - 0, - 0, - 15140, - 0, - 0, - 15154, - 15162, - 0, - 15169, - 15170, - 0, - 15175, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15177, - 0, - 15178, - 15179, - 0, - 0, - 0, - 0, - 0, - 15183, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15185, - 15187, - 0, - 15194, - 15195, - 15196, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15204, - 0, - 0, - 0, - 0, - 15206, - 0, - 0, - 0, - 0, - 0, - 15207, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15213, - 0, - 15214, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15232, - 0, - 0, - 0, - 0, - 15234, - 0, - 15238, - 15240, - 0, - 15248, - 0, - 0, - 0, - 0, - 15250, - 15251, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15252, - 0, - 0, - 0, - 15255, - 15262, - 15266, - 0, - 0, - 0, - 15267, - 0, - 0, - 0, - 15277, - 15279, - 0, - 0, - 0, - 15280, - 15281, - 15282, - 0, - 0, - 0, - 0, - 0, - 15285, - 0, - 0, - 0, - 0, - 15289, - 0, - 0, - 15291, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15296, - 15297, - 0, - 0, - 15304, - 0, - 0, - 0, - 0, - 15306, - 0, - 0, - 0, - 0, - 0, - 0, - 15307, - 15308, - 0, - 15309, - 0, - 0, - 15311, - 0, - 0, - 15312, - 15313, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15314, - 15317, - 0, - 0, - 0, - 15318, - 15319, - 0, - 0, - 0, - 0, - 15320, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15321, - 0, - 0, - 0, - 0, - 0, - 15324, - 0, - 15325, - 15326, - 0, - 15330, - 0, - 0, - 0, - 0, - 15334, - 0, - 15335, - 0, - 15341, - 0, - 0, - 15342, - 0, - 0, - 15343, - 15344, - 0, - 0, - 0, - 0, - 15345, - 0, - 0, - 0, - 0, - 15347, - 0, - 0, - 15348, - 15349, - 15350, - 0, - 15356, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15357, - 0, - 15358, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15359, - 15360, - 15364, - 0, - 15380, - 0, - 0, - 0, - 0, - 0, - 15392, - 0, - 0, - 15393, - 0, - 15395, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15396, - 0, - 0, - 15397, - 15398, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15399, - 0, - 15400, - 0, - 0, - 0, - 15402, - 0, - 15405, - 15410, - 0, - 0, - 0, - 0, - 15411, - 0, - 0, - 0, - 15412, - 0, - 15416, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15428, - 0, - 15435, - 0, - 0, - 15438, - 0, - 0, - 0, - 0, - 15439, - 0, - 0, - 0, - 15440, - 0, - 0, - 0, - 15441, - 15449, - 15451, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15452, - 0, - 0, - 15455, - 0, - 0, - 0, - 15456, - 0, - 0, - 15458, - 0, - 15460, - 15461, - 0, - 0, - 0, - 0, - 0, - 15462, - 15464, - 0, - 15465, - 0, - 0, - 15466, - 0, - 0, - 15467, - 0, - 0, - 0, - 0, - 0, - 15468, - 0, - 0, - 0, - 0, - 15481, - 0, - 0, - 15484, - 0, - 15485, - 15486, - 0, - 0, - 0, - 15487, - 0, - 0, - 0, - 0, - 0, - 15488, - 0, - 15492, - 15498, - 0, - 0, - 0, - 15499, - 0, - 0, - 0, - 15500, - 0, - 15501, - 0, - 0, - 15512, - 0, - 15522, - 0, - 0, - 0, - 15524, - 0, - 15525, - 15526, - 0, - 0, - 15527, - 0, - 0, - 15545, - 15546, - 0, - 15548, - 15552, - 0, - 15553, - 0, - 0, - 0, - 15554, - 0, - 15555, - 0, - 15557, - 15565, - 15573, - 15577, - 15578, - 0, - 15582, - 0, - 15583, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15586, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15588, - 0, - 0, - 0, - 0, - 0, - 15589, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15593, - 15594, - 0, - 0, - 0, - 0, - 15595, - 0, - 0, - 0, - 0, - 0, - 0, - 15596, - 0, - 0, - 0, - 15597, - 0, - 0, - 0, - 0, - 15600, - 0, - 0, - 15601, - 0, - 0, - 0, - 0, - 15602, - 15603, - 0, - 0, - 0, - 0, - 0, - 0, - 15604, - 0, - 15609, - 0, - 0, - 15612, - 0, - 0, - 15613, - 0, - 0, - 15615, - 15617, - 15618, - 0, - 0, - 15620, - 0, - 15636, - 15637, - 0, - 0, - 15649, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15650, - 0, - 0, - 15651, - 0, - 0, - 0, - 15656, - 0, - 15658, - 0, - 0, - 0, - 15664, - 0, - 0, - 15665, - 0, - 0, - 15668, - 0, - 0, - 0, - 0, - 0, - 15669, - 0, - 0, - 15674, - 0, - 0, - 15675, - 0, - 0, - 0, - 0, - 15676, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15677, - 0, - 0, - 0, - 0, - 15678, - 0, - 0, - 0, - 0, - 0, - 15679, - 0, - 0, - 15681, - 0, - 15686, - 0, - 0, - 0, - 0, - 15687, - 0, - 15688, - 0, - 0, - 15690, - 0, - 0, - 0, - 15697, - 0, - 15699, - 15700, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15701, - 0, - 15702, - 15703, - 0, - 15704, - 0, - 15705, - 0, - 15707, - 0, - 15709, - 0, - 15712, - 15716, - 0, - 15717, - 0, - 15718, - 15720, - 0, - 0, - 0, - 0, - 0, - 15724, - 0, - 0, - 0, - 15725, - 0, - 15726, - 0, - 0, - 0, - 15740, - 0, - 15745, - 15746, - 0, - 0, - 15747, - 0, - 15748, - 0, - 0, - 0, - 0, - 0, - 15749, - 0, - 0, - 0, - 15752, - 0, - 15753, - 0, - 0, - 0, - 0, - 0, - 0, - 15759, - 0, - 0, - 0, - 15765, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15767, - 0, - 0, - 0, - 15771, - 0, - 0, - 15784, - 0, - 0, - 0, - 0, - 15785, - 15790, - 15791, - 0, - 0, - 15792, - 0, - 0, - 0, - 15807, - 0, - 15811, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15818, - 0, - 0, - 0, - 15819, - 0, - 0, - 0, - 0, - 15821, - 0, - 0, - 0, - 0, - 0, - 15822, - 15824, - 0, - 0, - 15827, - 0, - 0, - 15829, - 15831, - 0, - 15832, - 0, - 0, - 15833, - 0, - 15835, - 15838, - 15839, - 15843, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15844, - 0, - 0, - 0, - 0, - 15845, - 15851, - 15856, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15858, - 15860, - 0, - 15861, - 0, - 0, - 0, - 15864, - 0, - 0, - 0, - 0, - 15865, - 0, - 0, - 0, - 0, - 0, - 0, - 15866, - 0, - 15872, - 0, - 0, - 15876, - 0, - 0, - 0, - 0, - 15877, - 15878, - 15883, - 15885, - 0, - 0, - 15888, - 0, - 0, - 0, - 0, - 0, - 15889, - 15890, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15892, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15893, - 0, - 0, - 15894, - 0, - 0, - 0, - 15895, - 0, - 15896, - 15897, - 0, - 15898, - 15901, - 15902, - 0, - 15911, - 15915, - 0, - 15916, - 0, - 15924, - 15935, - 0, - 15937, - 0, - 0, - 0, - 0, - 0, - 15950, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15958, - 0, - 0, - 0, - 15961, - 0, - 0, - 15966, - 0, - 15967, - 0, - 0, - 15977, - 0, - 0, - 15978, - 0, - 0, - 15981, - 15982, - 15983, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15986, - 0, - 0, - 0, - 15990, - 0, - 15991, - 15995, - 15998, - 0, - 15999, - 0, - 16000, - 0, - 0, - 0, - 0, - 16008, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16009, - 16011, - 0, - 16013, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16014, - 0, - 0, - 16015, - 16023, - 16024, - 16025, - 0, - 0, - 16026, - 0, - 16030, - 0, - 16032, - 0, - 16033, - 0, - 0, - 0, - 0, - 0, - 0, - 16035, - 16036, - 16037, - 0, - 0, - 0, - 0, - 0, - 16039, - 0, - 0, - 0, - 0, - 16041, - 0, - 0, - 0, - 0, - 0, - 16043, - 16044, - 0, - 0, - 16047, - 0, - 0, - 0, - 16048, - 0, - 0, - 16049, - 16050, - 16052, - 0, - 0, - 0, - 0, - 0, - 16055, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16056, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16058, - 16060, - 16061, - 0, - 0, - 16063, - 0, - 0, - 16064, - 0, - 0, - 0, - 16067, - 16068, - 0, - 0, - 16069, - 16078, - 0, - 0, - 0, - 16079, - 0, - 0, - 0, - 16080, - 0, - 16081, - 0, - 0, - 0, - 16088, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16089, - 16093, - 0, - 16097, - 0, - 16103, - 0, - 16104, - 16105, - 0, - 0, - 16256, - 0, - 0, - 16259, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16260, - 16261, - 0, - 0, - 16262, - 0, - 0, - 16263, - 0, - 16268, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16269, - 0, - 0, - 16270, - 16273, - 0, - 16274, - 0, - 0, - 0, - 0, - 16275, - 16276, - 16277, - 16280, - 0, - 0, - 0, - 16281, - 16284, - 0, - 0, - 0, - 16286, - 0, - 16289, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16290, - 0, - 0, - 0, - 0, - 16291, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16292, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16293, - 16295, - 16297, - 0, - 16302, - 0, - 16304, - 0, - 16305, - 0, - 16306, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16307, - 16308, - 16312, - 0, - 0, - 0, - 0, - 0, - 0, - 16313, - 16315, - 0, - 16318, - 0, - 0, - 0, - 16321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16326, - 16333, - 16336, - 0, - 0, - 0, - 0, - 16337, - 16340, - 0, - 0, - 0, - 0, - 0, - 16345, - 0, - 0, - 16346, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16347, - 0, - 0, - 16348, - 0, - 0, - 0, - 0, - 16349, - 0, - 0, - 0, - 16350, - 0, - 16357, - 0, - 0, - 0, - 0, - 16359, - 16360, - 0, - 0, - 0, - 0, - 16362, - 16363, - 16364, - 16365, - 0, - 0, - 16366, - 0, - 0, - 0, - 0, - 16367, - 16368, - 0, - 16369, - 16374, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16376, - 0, - 0, - 0, - 0, - 16378, - 16379, - 0, - 16380, - 0, - 0, - 0, - 16381, - 16383, - 0, - 0, - 0, - 0, - 0, - 16390, - 0, - 0, - 0, - 16399, - 0, - 16402, - 16404, - 16406, - 16407, - 0, - 0, - 0, - 16409, - 16411, - 0, - 0, - 0, - 0, - 16412, - 0, - 16413, - 16415, - 16423, - 0, - 0, - 0, - 0, - 0, - 16424, - 0, - 0, - 0, - 16428, - 16434, - 16435, - 16449, - 0, - 16450, - 16451, - 0, - 0, - 0, - 16453, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16454, - 0, - 0, - 16456, - 16458, - 0, - 0, - 16459, - 0, - 0, - 16460, - 0, - 0, - 0, - 0, - 16462, - 0, - 16463, - 0, - 0, - 16466, - 0, - 0, - 0, - 0, - 0, - 16479, - 0, - 0, - 16480, - 0, - 16481, - 16484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16485, - 0, - 0, - 0, - 0, - 0, - 0, - 16489, - 0, - 0, - 0, - 0, - 0, - 16491, - 0, - 0, - 16498, - 0, - 0, - 16503, - 0, - 16505, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16506, - 0, - 0, - 0, - 16508, - 16509, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16511, - 16513, - 0, - 0, - 0, - 16516, - 0, - 16517, - 0, - 16519, - 0, - 16529, - 0, - 0, - 16531, - 0, - 0, - 0, - 0, - 0, - 0, - 16534, - 0, - 0, - 16541, - 16542, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16543, - 16547, - 16548, - 0, - 0, - 0, - 16551, - 0, - 16552, - 0, - 0, - 0, - 16553, - 0, - 0, - 16558, - 0, - 0, - 16562, - 16565, - 0, - 0, - 0, - 16570, - 0, - 0, - 0, - 16573, - 16585, - 0, - 0, - 0, - 16586, - 16587, - 16595, - 0, - 16596, - 0, - 16598, - 0, - 0, - 0, - 16600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16601, - 0, - 0, - 0, - 0, - 16603, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16604, - 16612, - 0, - 0, - 0, - 0, - 16613, - 0, - 16618, - 0, - 0, - 0, - 16640, - 0, - 0, - 16641, - 0, - 0, - 0, - 0, - 0, - 0, - 16645, - 0, - 0, - 0, - 0, - 16646, - 0, - 0, - 0, - 0, - 0, - 0, - 16651, - 0, - 0, - 0, - 0, - 16653, - 16654, - 0, - 0, - 0, - 16655, - 0, - 0, - 16656, - 16667, - 0, - 0, - 0, - 0, - 16671, - 0, - 16672, - 0, - 0, - 0, - 16673, - 0, - 0, - 0, - 0, - 0, - 16676, - 0, - 16686, - 0, - 0, - 0, - 0, - 16689, - 0, - 16690, - 0, - 16692, - 0, - 16693, - 0, - 16694, - 0, - 16696, - 0, - 0, - 0, - 16705, - 0, - 0, - 0, - 0, - 0, - 0, - 16707, - 0, - 0, - 0, - 16709, - 0, - 0, - 0, - 0, - 16711, - 0, - 16712, - 16713, - 0, - 0, - 0, - 16715, - 0, - 0, - 0, - 0, - 16716, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16718, - 16724, - 0, - 0, - 16726, - 16727, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16728, - 0, - 16729, - 0, - 0, - 16730, - 0, - 0, - 0, - 0, - 0, - 16731, - 0, - 0, - 0, - 16732, - 0, - 0, - 0, - 0, - 16734, - 16738, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16743, - 0, - 0, - 16745, - 0, - 0, - 0, - 0, - 0, - 16749, - 0, - 16752, - 0, - 0, - 0, - 0, - 16756, - 0, - 0, - 16758, - 0, - 16759, - 0, - 0, - 0, - 0, - 0, - 16760, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16762, - 0, - 16769, - 0, - 16770, - 0, - 16772, - 0, - 0, - 0, - 16777, - 16780, - 0, - 0, - 0, - 0, - 0, - 0, - 16781, - 0, - 0, - 16782, - 0, - 16784, - 0, - 0, - 16785, - 16787, - 16792, - 0, - 0, - 16794, - 0, - 0, - 0, - 16798, - 0, - 0, - 16809, - 0, - 0, - 16814, - 16816, - 16817, - 0, - 16819, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16820, - 0, - 0, - 16836, - 16839, - 0, - 0, - 16841, - 16851, - 16857, - 0, - 0, - 16858, - 16859, - 0, - 0, - 16860, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16862, - 0, - 16863, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16864, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16876, - 0, - 16881, - 16882, - 0, - 16885, - 16886, - 0, - 16887, - 0, - 0, - 0, - 16889, - 16891, - 0, - 0, - 0, - 0, - 0, - 16894, - 16895, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16897, - 0, - 16898, - 0, - 0, - 0, - 0, - 0, - 16913, - 0, - 0, - 16924, - 16925, - 16926, - 0, - 0, - 16927, - 0, - 0, - 0, - 16937, - 16938, - 0, - 0, - 0, - 16940, - 16941, - 0, - 0, - 0, - 16942, - 16945, - 0, - 16946, - 16949, - 16950, - 0, - 0, - 0, - 16952, - 16955, - 0, - 0, - 0, - 16965, - 0, - 16969, - 0, - 0, - 16975, - 0, - 0, - 16976, - 0, - 0, - 0, - 0, - 16978, - 0, - 0, - 16981, - 0, - 16983, - 16989, - 0, - 0, - 0, - 0, - 16990, - 0, - 0, - 16991, - 0, - 0, - 0, - 16993, - 0, - 16994, - 16996, - 17000, - 0, - 0, - 0, - 0, - 0, - 17002, - 17004, - 0, - 17006, - 0, - 0, - 17007, - 0, - 0, - 0, - 0, - 17008, - 17013, - 17014, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17021, - 0, - 17031, - 0, - 0, - 0, - 0, - 0, - 17033, - 17036, - 0, - 17038, - 0, - 0, - 17039, - 0, - 17045, - 0, - 0, - 17046, - 17047, - 0, - 0, - 0, - 0, - 17048, - 0, - 17049, - 17050, - 0, - 17051, - 17053, - 0, - 17054, - 0, - 17055, - 0, - 0, - 0, - 0, - 0, - 17063, - 0, - 0, - 17064, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17065, - 0, - 0, - 17068, - 0, - 0, - 0, - 0, - 0, - 17072, - 0, - 0, - 0, - 0, - 0, - 0, - 17073, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17074, - 0, - 17080, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17081, - 17083, - 17084, - 0, - 0, - 0, - 17085, - 0, - 0, - 0, - 0, - 17092, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17093, - 0, - 17095, - 17102, - 0, - 0, - 0, - 0, - 0, - 0, - 17103, - 0, - 0, - 17105, - 0, - 17107, - 0, - 0, - 0, - 0, - 17114, - 0, - 0, - 0, - 0, - 0, - 17115, - 17125, - 17127, - 0, - 0, - 17128, - 0, - 0, - 0, - 17129, - 17130, - 0, - 17131, - 0, - 0, - 0, - 0, - 0, - 17132, - 17135, - 17145, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17146, - 0, - 17147, - 0, - 17148, - 0, - 0, - 0, - 0, - 0, - 0, - 17149, - 17150, - 0, - 17151, - 17153, - 0, - 17155, - 0, - 0, - 0, - 0, - 17163, - 17171, - 0, - 17174, - 0, - 0, - 0, - 0, - 17179, - 0, - 0, - 17182, - 17185, - 0, - 0, - 0, - 0, - 0, - 17186, - 0, - 0, - 17188, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17189, - 17191, - 0, - 17194, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17195, - 17196, - 17203, - 17204, - 0, - 0, - 17205, - 17217, - 0, - 0, - 0, - 0, - 0, - 17218, - 0, - 0, - 0, - 0, - 17219, - 0, - 17220, - 0, - 17221, - 0, - 0, - 17230, - 0, - 0, - 0, - 0, - 0, - 17236, - 0, - 17238, - 17239, - 0, - 0, - 0, - 17241, - 17244, - 0, - 0, - 17245, - 0, - 17248, - 0, - 0, - 17251, - 0, - 17252, - 0, - 0, - 17264, - 0, - 17266, - 0, - 0, - 0, - 17268, - 0, - 0, - 0, - 0, - 17271, - 17272, - 0, - 17273, - 0, - 17295, - 0, - 17302, - 0, - 17305, - 0, - 0, - 0, - 17306, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17308, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17309, - 0, - 17310, - 17313, - 0, - 0, - 0, - 0, - 17314, - 17315, - 0, - 17317, - 0, - 0, - 0, - 0, - 17318, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17320, - 0, - 0, - 0, - 0, - 0, - 0, - 17334, - 0, - 17344, - 17348, - 0, - 0, - 0, - 17350, - 17351, - 0, - 0, - 17353, - 0, - 0, - 17354, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17355, - 0, - 0, - 0, - 0, - 0, - 0, - 17356, - 17357, - 0, - 0, - 17359, - 0, - 0, - 0, - 17371, - 0, - 17372, - 0, - 0, - 0, - 17393, - 0, - 0, - 0, - 0, - 17394, - 0, - 0, - 0, - 0, - 0, - 17395, - 0, - 0, - 17399, - 0, - 0, - 0, - 17401, - 17417, - 0, - 17418, - 0, - 17419, - 0, - 0, - 0, - 0, - 0, - 17422, - 17423, - 0, - 0, - 0, - 0, - 0, - 17424, - 0, - 0, - 0, - 0, - 0, - 17428, - 17429, - 17433, - 0, - 0, - 0, - 17437, - 0, - 0, - 17441, - 0, - 0, - 17442, - 0, - 0, - 17453, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17454, - 17456, - 17462, - 0, - 0, - 17466, - 0, - 0, - 17468, - 0, - 0, - 17469, - 0, - 0, - 0, - 0, - 17470, - 0, - 17475, - 0, - 0, - 0, - 0, - 0, - 17479, - 0, - 0, - 0, - 17483, - 17484, - 0, - 17485, - 0, - 17486, - 0, - 17491, - 17492, - 0, - 0, - 17493, - 0, - 17494, - 17495, - 0, - 0, - 0, - 17496, - 0, - 0, - 0, - 17497, - 0, - 0, - 0, - 17502, - 0, - 0, - 0, - 0, - 0, - 17503, - 0, - 17505, - 0, - 17507, - 0, - 0, - 0, - 17512, - 17513, - 17514, - 0, - 0, - 17515, - 0, - 0, - 0, - 17519, - 0, - 0, - 0, - 17522, - 0, - 0, - 17523, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17527, - 0, - 0, - 0, - 17528, - 0, - 0, - 0, - 17534, - 0, - 0, - 0, - 0, - 17536, - 0, - 0, - 0, - 17539, - 0, - 17540, - 17543, - 17549, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17556, - 0, - 0, - 17558, - 0, - 17559, - 0, - 0, - 17560, - 0, - 0, - 0, - 17563, - 0, - 0, - 0, - 0, - 0, - 0, - 17564, - 0, - 0, - 17565, - 17566, - 0, - 17567, - 0, - 0, - 0, - 0, - 0, - 0, - 17569, - 17570, - 0, - 17575, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17581, - 0, - 0, - 0, - 17582, - 17583, - 0, - 17586, - 0, - 0, - 17587, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17588, - 0, - 0, - 0, - 0, - 17596, - 17597, - 0, - 0, - 17598, - 17600, - 0, - 0, - 0, - 0, - 0, - 0, - 17601, - 0, - 0, - 0, - 17604, - 0, - 0, - 17605, - 0, - 0, - 17607, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17612, - 0, - 0, - 17618, - 0, - 17621, - 17622, - 0, - 0, - 0, - 0, - 17623, - 0, - 0, - 17624, - 0, - 0, - 17630, - 0, - 0, - 17631, - 17633, - 17634, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17635, - 0, - 0, - 17636, - 0, - 0, - 17637, - 0, - 17638, - 0, - 17640, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17641, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17643, - 0, - 0, - 0, - 0, - 17645, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17646, - 17662, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17663, - 17664, - 0, - 17665, - 17666, - 0, - 0, - 0, - 17669, - 17671, - 17673, - 0, - 17679, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17684, - 0, - 0, - 0, - 17686, - 0, - 17714, - 0, - 0, - 17720, - 17722, - 17726, - 0, - 0, - 17728, - 0, - 0, - 17729, - 0, - 0, - 0, - 17732, - 0, - 17733, - 0, - 17734, - 0, - 0, - 0, - 17735, - 0, - 0, - 0, - 0, - 17737, - 0, - 0, - 0, - 0, - 17739, - 0, - 0, - 0, - 17741, - 17742, - 0, - 0, - 0, - 0, - 17743, - 17744, - 17745, - 0, - 0, - 0, - 17749, - 0, - 17750, - 17751, - 17752, - 17754, - 17761, - 17762, - 0, - 17763, - 0, - 17766, - 0, - 17772, - 0, - 0, - 0, - 0, - 0, - 17775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17776, - 0, - 0, - 17777, - 0, - 0, - 17778, - 17779, - 0, - 17782, - 17783, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17784, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17821, - 0, - 0, - 0, - 17822, - 0, - 0, - 0, - 17823, - 17825, - 0, - 0, - 0, - 0, - 0, - 17826, - 17831, - 17832, - 17833, - 0, - 0, - 17845, - 0, - 0, - 0, - 17846, - 0, - 0, - 0, - 17848, - 17850, - 17854, - 0, - 17855, - 0, - 0, - 17859, - 0, - 0, - 0, - 0, - 0, - 0, - 17860, - 17861, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17870, - 17871, - 0, - 0, - 0, - 0, - 0, - 0, - 17872, - 0, - 0, - 0, - 17879, - 0, - 0, - 0, - 17881, - 17883, - 0, - 17884, - 0, - 17885, - 0, - 0, - 17886, - 0, - 0, - 17887, - 17891, - 17953, - 0, - 0, - 0, - 0, - 17954, - 0, - 0, - 17955, - 0, - 17968, - 0, - 0, - 17972, - 0, - 0, - 0, - 0, - 0, - 17974, - 0, - 0, - 0, - 0, - 17976, - 17978, - 0, - 0, - 17983, - 0, - 0, - 0, - 0, - 18003, - 0, - 0, - 0, - 0, - 0, - 18007, - 0, - 0, - 0, - 0, - 0, - 18009, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18010, - 0, - 0, - 0, - 0, - 0, - 0, - 18012, - 0, - 0, - 18014, - 0, - 0, - 0, - 18015, - 0, - 0, - 0, - 18016, - 0, - 18017, - 0, - 0, - 0, - 18030, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18031, - 0, - 0, - 18036, - 18037, - 18038, - 0, - 0, - 18049, - 18056, - 0, - 18057, - 18058, - 0, - 18059, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18062, - 0, - 0, - 0, - 0, - 18064, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18067, - 0, - 0, - 0, - 18068, - 0, - 0, - 18075, - 0, - 0, - 18078, - 18093, - 18094, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18097, - 0, - 0, - 0, - 0, - 0, - 18098, - 18100, - 0, - 0, - 0, - 18108, - 0, - 18111, - 0, - 0, - 18112, - 0, - 18113, - 0, - 0, - 18115, - 18116, - 0, - 18118, - 0, - 0, - 0, - 0, - 18121, - 0, - 0, - 0, - 0, - 18123, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18124, - 0, - 0, - 0, - 0, - 18125, - 18126, - 0, - 18127, - 0, - 0, - 18128, - 18135, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18150, - 0, - 0, - 0, - 0, - 0, - 18151, - 18152, - 0, - 0, - 18156, - 18164, - 0, - 18166, - 18171, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18172, - 18183, - 0, - 18184, - 0, - 0, - 0, - 0, - 18185, - 0, - 18187, - 0, - 0, - 0, - 0, - 0, - 18188, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18189, - 0, - 0, - 18190, - 0, - 0, - 18191, - 18192, - 0, - 0, - 18194, - 18195, - 18196, - 0, - 0, - 0, - 18197, - 0, - 18203, - 0, - 18204, - 0, - 0, - 0, - 0, - 18205, - 0, - 0, - 0, - 18207, - 18208, - 0, - 0, - 18214, - 0, - 0, - 0, - 18215, - 18216, - 0, - 0, - 0, - 18220, - 0, - 0, - 18222, - 0, - 0, - 0, - 0, - 0, - 18223, - 0, - 18225, - 18231, - 0, - 18234, - 0, - 18235, - 0, - 0, - 0, - 0, - 18240, - 0, - 0, - 18241, - 18242, - 0, - 0, - 0, - 0, - 0, - 18243, - 18251, - 0, - 18253, - 0, - 18254, - 0, - 0, - 0, - 18266, - 0, - 0, - 0, - 0, - 0, - 0, - 18269, - 18270, - 18271, - 18273, - 18281, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18282, - 0, - 18283, - 0, - 18284, - 0, - 0, - 0, - 0, - 0, - 0, - 18285, - 0, - 18287, - 18289, - 0, - 0, - 18290, - 0, - 0, - 0, - 0, - 18308, - 0, - 0, - 0, - 18310, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18311, - 0, - 18312, - 18313, - 0, - 18315, - 0, - 0, - 18316, - 18320, - 0, - 18331, - 0, - 18332, - 0, - 18336, - 0, - 0, - 0, - 0, - 18337, - 0, - 18340, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18341, - 0, - 18344, - 18345, - 0, - 18346, - 0, - 0, - 0, - 0, - 0, - 18348, - 0, - 18351, - 0, - 0, - 18356, - 0, - 0, - 0, - 0, - 0, - 0, - 18357, - 0, - 0, - 0, - 0, - 0, - 18367, - 0, - 0, - 0, - 18368, - 0, - 18369, - 0, - 18370, - 18371, - 0, - 0, - 0, - 18437, - 18444, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18445, - 18450, - 0, - 0, - 0, - 0, - 18451, - 0, - 18452, - 0, - 0, - 0, - 18453, - 0, - 0, - 0, - 0, - 0, - 18455, - 0, - 0, - 0, - 18456, - 0, - 18457, - 0, - 18460, - 0, - 0, - 18461, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18466, - 0, - 0, - 18467, - 0, - 0, - 0, - 0, - 18473, - 0, - 0, - 0, - 18476, - 0, - 18477, - 0, - 0, - 0, - 18478, - 18479, - 18480, - 0, - 0, - 0, - 18485, - 0, - 0, - 0, - 18486, - 0, - 0, - 0, - 0, - 0, - 0, - 18488, - 18490, - 0, - 0, - 0, - 0, - 0, - 0, - 18491, - 0, - 0, - 0, - 0, - 0, - 18495, - 0, - 0, - 18496, - 0, - 0, - 0, - 0, - 0, - 0, - 18505, - 0, - 18521, - 0, - 18522, - 18523, - 0, - 0, - 0, - 18525, - 18526, - 0, - 0, - 0, - 0, - 0, - 18527, - 0, - 0, - 0, - 0, - 18532, - 18533, - 0, - 18534, - 0, - 0, - 0, - 0, - 0, - 0, - 18535, - 18537, - 0, - 18538, - 0, - 0, - 0, - 0, - 0, - 0, - 18540, - 18541, - 18542, - 18543, - 0, - 18546, - 0, - 0, - 0, - 0, - 18553, - 18556, - 0, - 0, - 18558, - 0, - 0, - 18569, - 18571, - 0, - 0, - 0, - 18572, - 0, - 18574, - 0, - 0, - 0, - 0, - 18586, - 0, - 0, - 0, - 0, - 0, - 18588, - 0, - 0, - 18589, - 0, - 0, - 0, - 0, - 0, - 0, - 18590, - 0, - 18592, - 0, - 0, - 0, - 0, - 18594, - 0, - 0, - 0, - 18596, - 0, - 0, - 18597, - 18598, - 0, - 0, - 18601, - 0, - 0, - 0, - 0, - 18602, - 0, - 0, - 0, - 18603, - 18604, - 0, - 18605, - 0, - 0, - 0, - 0, - 18608, - 0, - 0, - 18611, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18612, - 0, - 18616, - 0, - 0, - 18617, - 18619, - 0, - 0, - 0, - 18628, - 0, - 0, - 0, - 18629, - 0, - 0, - 18630, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18631, - 0, - 18632, - 0, - 0, - 18635, - 18637, - 0, - 0, - 0, - 0, - 0, - 0, - 18641, - 18643, - 18648, - 0, - 18652, - 0, - 0, - 18653, - 0, - 18655, - 18656, - 0, - 0, - 0, - 18657, - 0, - 0, - 18666, - 18674, - 0, - 0, - 0, - 0, - 18677, - 18684, - 18685, - 0, - 0, - 18686, - 0, - 0, - 18690, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18695, - 18696, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18697, - 0, - 0, - 18700, - 0, - 0, - 0, - 0, - 0, - 0, - 18702, - 0, - 18708, - 0, - 0, - 18709, - 0, - 18710, - 0, - 0, - 18711, - 0, - 18714, - 0, - 0, - 18718, - 0, - 0, - 0, - 0, - 0, - 0, - 18719, - 0, - 0, - 18722, - 0, - 18726, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18731, - 0, - 0, - 0, - 0, - 0, - 18739, - 18741, - 0, - 0, - 18742, - 0, - 18743, - 18744, - 18746, - 18748, - 0, - 18752, - 18753, - 0, - 0, - 18754, - 18763, - 0, - 18765, - 0, - 0, - 0, - 18766, - 0, - 0, - 0, - 18769, - 0, - 0, - 0, - 0, - 0, - 18773, - 18778, - 18779, - 18781, - 0, - 0, - 18784, - 18787, - 0, - 18788, - 0, - 18793, - 0, - 0, - 0, - 0, - 0, - 0, - 18795, - 0, - 0, - 18800, - 0, - 0, - 0, - 0, - 0, - 18801, - 18804, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18806, - 0, - 0, - 0, - 18811, - 18815, - 18816, - 0, - 0, - 0, - 0, - 18825, - 0, - 0, - 18827, - 18829, - 0, - 0, - 18830, - 0, - 0, - 0, - 0, - 18831, - 0, - 0, - 18832, - 0, - 0, - 0, - 0, - 18833, - 0, - 18840, - 0, - 18841, - 0, - 18842, - 0, - 0, - 0, - 0, - 18843, - 0, - 18844, - 0, - 0, - 0, - 0, - 0, - 0, - 18845, - 18846, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18848, - 0, - 0, - 0, - 18853, - 18860, - 0, - 0, - 18862, - 18866, - 0, - 0, - 18867, - 18869, - 0, - 0, - 18874, - 18881, - 18891, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18892, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18895, - 0, - 18896, - 0, - 0, - 0, - 18900, - 0, - 0, - 0, - 18901, - 0, - 18902, - 18915, - 18916, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18919, - 0, - 0, - 0, - 0, - 0, - 18920, - 0, - 0, - 0, - 18921, - 18929, - 0, - 0, - 0, - 0, - 18930, - 0, - 0, - 0, - 0, - 0, - 0, - 18932, - 0, - 0, - 0, - 0, - 18934, - 18942, - 0, - 0, - 0, - 18951, - 18957, - 0, - 0, - 0, - 0, - 18958, - 0, - 0, - 0, - 0, - 18959, - 18960, - 0, - 0, - 18961, - 0, - 0, - 18962, - 0, - 0, - 0, - 0, - 18963, - 18964, - 0, - 0, - 0, - 18965, - 0, - 18967, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18968, - 0, - 18969, - 0, - 18970, - 18973, - 18976, - 0, - 0, - 0, - 0, - 0, - 0, - 18977, - 0, - 0, - 0, - 18981, - 0, - 0, - 0, - 18990, - 0, - 18998, - 0, - 0, - 0, - 0, - 0, - 18999, - 19003, - 0, - 0, - 19005, - 0, - 0, - 0, - 19006, - 0, - 0, - 0, - 0, - 0, - 0, - 19008, - 19011, - 0, - 0, - 19018, - 0, - 0, - 19019, - 0, - 19024, - 0, - 19031, - 19032, - 0, - 19039, - 0, - 19041, - 19050, - 0, - 0, - 0, - 19051, - 19055, - 19056, - 0, - 19059, - 19063, - 19064, - 0, - 0, - 19088, - 0, - 0, - 0, - 19093, - 19094, - 0, - 0, - 0, - 0, - 19095, - 0, - 19096, - 0, - 0, - 0, - 19097, - 0, - 0, - 19098, - 0, - 19099, - 19100, - 0, - 0, - 19103, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19111, - 0, - 0, - 0, - 0, - 0, - 0, - 19112, - 0, - 0, - 0, - 19116, - 19117, - 0, - 19121, - 19122, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19123, - 19124, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19125, - 19126, - 0, - 19128, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19129, - 19130, - 19131, - 19132, - 0, - 0, - 19146, - 0, - 0, - 19147, - 19156, - 19158, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19182, - 19185, - 0, - 0, - 19187, - 0, - 0, - 0, - 19193, - 0, - 0, - 0, - 0, - 0, - 19194, - 0, - 19197, - 0, - 0, - 0, - 0, - 19198, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19202, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19203, - 0, - 19205, - 19210, - 0, - 0, - 0, - 19213, - 0, - 19218, - 0, - 0, - 0, - 19223, - 19229, - 0, - 0, - 19230, - 0, - 0, - 19231, - 19232, - 19233, - 19239, - 0, - 0, - 0, - 0, - 0, - 19240, - 0, - 19248, - 19249, - 0, - 0, - 0, - 0, - 19254, - 0, - 19256, - 19258, - 19259, - 0, - 0, - 19261, - 0, - 19266, - 0, - 0, - 0, - 19272, - 0, - 19278, - 19281, - 19282, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19283, - 0, - 0, - 19284, - 0, - 0, - 19285, - 19287, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19288, - 19291, - 0, - 19292, - 0, - 0, - 0, - 0, - 19297, - 0, - 19298, - 0, - 0, - 0, - 0, - 19302, - 19303, - 0, - 0, - 0, - 0, - 19304, - 19305, - 0, - 0, - 0, - 0, - 19314, - 0, - 0, - 19315, - 0, - 0, - 19321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19322, - 0, - 19333, - 0, - 19334, - 19335, - 0, - 19336, - 19337, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19346, - 0, - 0, - 19353, - 0, - 19354, - 19362, - 0, - 19366, - 19367, - 0, - 0, - 19369, - 0, - 19375, - 0, - 19377, - 19380, - 19388, - 0, - 0, - 0, - 0, - 0, - 19389, - 19390, - 0, - 0, - 0, - 0, - 19392, - 0, - 0, - 0, - 0, - 0, - 19402, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19412, - 0, - 0, - 19413, - 19422, - 0, - 19424, - 0, - 0, - 0, - 19425, - 0, - 0, - 0, - 19428, - 0, - 0, - 0, - 0, - 19431, - 0, - 0, - 0, - 0, - 0, - 19432, - 0, - 0, - 0, - 0, - 0, - 19448, - 19459, - 0, - 0, - 19461, - 0, - 19462, - 19463, - 0, - 19467, - 19474, - 19482, - 0, - 0, - 0, - 0, - 19494, - 0, - 0, - 0, - 0, - 19501, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19502, - 19504, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19505, - 0, - 0, - 0, - 0, - 19506, - 19507, - 0, - 0, - 0, - 19508, - 0, - 0, - 19511, - 0, - 0, - 19514, - 0, - 19515, - 0, - 19516, - 0, - 19518, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19530, - 0, - 19537, - 19538, - 0, - 19543, - 19546, - 0, - 19547, - 19551, - 0, - 0, - 0, - 0, - 0, - 0, - 19552, - 19553, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19555, - 0, - 0, - 19556, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19560, - 19561, - 0, - 0, - 19562, - 0, - 0, - 0, - 0, - 0, - 0, - 19565, - 19567, - 0, - 19568, - 0, - 0, - 0, - 19569, - 19570, - 0, - 19578, - 0, - 0, - 0, - 0, - 19580, - 0, - 0, - 0, - 0, - 19581, - 19584, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19585, - 19586, - 0, - 0, - 0, - 19587, - 19588, - 0, - 19589, - 0, - 0, - 0, - 0, - 0, - 0, - 19592, - 19593, - 19599, - 0, - 19600, - 0, - 0, - 19604, - 0, - 0, - 19605, - 0, - 19606, - 19608, - 19610, - 0, - 19613, - 19614, - 0, - 0, - 0, - 0, - 0, - 0, - 19616, - 19617, - 0, - 0, - 19618, - 0, - 0, - 19619, - 0, - 0, - 0, - 19620, - 19621, - 19631, - 0, - 0, - 19632, - 19634, - 19636, - 0, - 19643, - 0, - 0, - 19644, - 19658, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19659, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19675, - 19677, - 0, - 0, - 0, - 0, - 19679, - 0, - 19683, - 0, - 19684, - 0, - 0, - 0, - 0, - 0, - 0, - 19687, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19688, - 19689, - 19692, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19695, - 19697, - 0, - 0, - 0, - 0, - 0, - 19698, - 19699, - 0, - 0, - 19700, - 0, - 19702, - 0, - 0, - 19703, - 0, - 0, - 0, - 0, - 0, - 0, - 19704, - 19708, - 0, - 19710, - 0, - 19713, - 0, - 0, - 0, - 19715, - 0, - 0, - 0, - 0, - 19718, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19720, - 0, - 19722, - 0, - 0, - 19725, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19730, - 0, - 0, - 0, - 0, - 0, - 19731, - 0, - 19734, - 19735, - 19739, - 0, - 0, - 19740, - 0, - 19741, - 0, - 0, - 0, - 19746, - 0, - 0, - 19747, - 0, - 19771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19772, - 19775, - 0, - 0, - 0, - 0, - 0, - 0, - 19778, - 0, - 0, - 0, - 0, - 0, - 19779, - 0, - 0, - 19780, - 19790, - 0, - 19791, - 0, - 0, - 19792, - 0, - 0, - 0, - 19793, - 0, - 0, - 19796, - 19797, - 0, - 0, - 0, - 19799, - 0, - 0, - 0, - 19801, - 0, - 0, - 0, - 0, - 19803, - 0, - 19804, - 0, - 19805, - 0, - 0, - 19807, - 0, - 0, - 0, - 19808, - 0, - 0, - 0, - 0, - 0, - 0, - 19809, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19816, - 0, - 19821, - 0, - 19822, - 19830, - 19831, - 0, - 0, - 0, - 19833, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19838, - 0, - 0, - 0, - 0, - 19839, - 0, - 0, - 19843, - 0, - 0, - 0, - 0, - 19845, - 0, - 0, - 0, - 0, - 19847, - 0, - 0, - 19848, - 0, - 19849, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19851, - 0, - 0, - 0, - 19854, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19864, - 0, - 19865, - 0, - 19866, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19868, - 0, - 0, - 19870, - 0, - 0, - 19871, - 0, - 0, - 19872, - 19873, - 19875, - 0, - 19880, - 19882, - 19884, - 0, - 0, - 19885, - 19886, - 19888, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19890, - 19892, - 19893, - 0, - 0, - 19894, - 0, - 0, - 0, - 19895, - 0, - 19896, - 19902, - 0, - 0, - 19903, - 0, - 0, - 19905, - 0, - 0, - 0, - 19906, - 0, - 19908, - 0, - 19909, - 19911, - 0, - 0, - 0, - 19913, - 19920, - 0, - 19938, - 19939, - 19940, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19942, - 0, - 19943, - 0, - 19945, - 0, - 0, - 0, - 19951, - 19952, - 19954, - 19960, - 0, - 19965, - 0, - 19971, - 0, - 0, - 0, - 0, - 0, - 19975, - 0, - 19976, - 0, - 19990, - 0, - 0, - 19991, - 0, - 19993, - 0, - 19995, - 0, - 0, - 0, - 19998, - 19999, - 20001, - 0, - 20003, - 20005, - 0, - 20011, - 20012, - 0, - 0, - 0, - 0, - 0, - 0, - 20014, - 0, - 20020, - 0, - 0, - 0, - 0, - 20021, - 0, - 0, - 0, - 0, - 0, - 20023, - 20024, - 0, - 0, - 0, - 0, - 0, - 20025, - 0, - 0, - 20027, - 0, - 0, - 20029, - 0, - 0, - 20032, - 0, - 0, - 0, - 0, - 20044, - 20045, - 0, - 20048, - 20049, - 0, - 0, - 20050, - 0, - 20052, - 0, - 0, - 20054, - 20057, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20059, - 0, - 0, - 20061, - 0, - 20062, - 0, - 20064, - 0, - 0, - 20066, - 0, - 0, - 20067, - 0, - 0, - 0, - 0, - 20069, - 0, - 0, - 0, - 0, - 0, - 0, - 20070, - 20071, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20072, - 0, - 0, - 20073, - 20074, - 0, - 0, - 0, - 0, - 0, - 20075, - 0, - 20078, - 0, - 0, - 0, - 0, - 20080, - 0, - 20081, - 0, - 0, - 0, - 0, - 0, - 0, - 20095, - 0, - 20098, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20107, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20112, - 0, - 0, - 0, - 20113, - 20114, - 0, - 0, - 0, - 20115, - 20123, - 20124, - 0, - 0, - 0, - 20131, - 20133, - 20134, - 0, - 0, - 0, - 0, - 20136, - 0, - 0, - 20137, - 20138, - 20150, - 0, - 20152, - 0, - 0, - 0, - 20153, - 0, - 0, - 20154, - 0, - 0, - 0, - 20158, - 0, - 20163, - 0, - 0, - 20164, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20166, - 0, - 20168, - 0, - 20170, - 0, - 20175, - 0, - 0, - 20178, - 0, - 0, - 0, - 0, - 20223, - 0, - 0, - 0, - 0, - 20224, - 0, - 20226, - 0, - 0, - 20230, - 0, - 20231, - 0, - 0, - 0, - 0, - 20232, - 0, - 0, - 20233, - 20234, - 0, - 20244, - 0, - 20247, - 0, - 0, - 0, - 0, - 0, - 0, - 20249, - 0, - 0, - 0, - 20250, - 0, - 0, - 0, - 0, - 20251, - 0, - 20253, - 0, - 20254, - 0, - 0, - 0, - 0, - 20256, - 0, - 0, - 20264, - 0, - 0, - 0, - 0, - 20266, - 0, - 0, - 0, - 20278, - 0, - 0, - 20279, - 20282, - 0, - 0, - 0, - 0, - 0, - 20283, - 0, - 20284, - 0, - 20285, - 0, - 20287, - 20290, - 0, - 0, - 0, - 0, - 20292, - 0, - 0, - 0, - 0, - 20293, - 20297, - 0, - 0, - 0, - 0, - 0, - 0, - 20299, - 0, - 20300, - 20303, - 0, - 0, - 0, - 0, - 0, - 0, - 20307, - 0, - 0, - 20308, - 0, - 20309, - 0, - 20310, - 0, - 0, - 0, - 0, - 0, - 0, - 20312, - 0, - 0, - 0, - 20314, - 0, - 0, - 0, - 0, - 20315, - 20316, - 0, - 20322, - 0, - 0, - 0, - 0, - 0, - 0, - 20339, - 0, - 0, - 0, - 20342, - 0, - 0, - 0, - 0, - 20352, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20362, - 0, - 0, - 20365, - 0, - 20375, - 20377, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20378, - 20379, - 0, - 20380, - 0, - 0, - 20381, - 0, - 20382, - 0, - 20383, - 0, - 20388, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20390, - 20392, - 20393, - 0, - 0, - 20395, - 0, - 0, - 0, - 0, - 0, - 20396, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20398, - 20415, - 0, - 0, - 0, - 20417, - 0, - 0, - 20420, - 0, - 0, - 20426, - 20428, - 0, - 20431, - 0, - 0, - 20432, - 0, - 20433, - 20434, - 20435, - 0, - 0, - 0, - 0, - 20440, - 0, - 0, - 0, - 0, - 0, - 20442, - 0, - 20443, - 0, - 20446, - 0, - 0, - 0, - 0, - 20448, - 0, - 20451, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20452, - 20453, - 0, - 0, - 20454, - 0, - 0, - 0, - 0, - 0, - 0, - 20457, - 0, - 20458, - 0, - 0, - 0, - 20465, - 0, - 0, - 0, - 0, - 0, - 20469, - 0, - 0, - 0, - 20473, - 0, - 20476, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20477, - 0, - 0, - 20485, - 0, - 0, - 20486, - 0, - 0, - 20487, - 0, - 20496, - 0, - 20497, - 0, - 0, - 20498, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20499, - 20500, - 0, - 20501, - 0, - 0, - 0, - 0, - 0, - 20520, - 20527, - 0, - 20529, - 0, - 0, - 0, - 0, - 20539, - 0, - 0, - 20540, - 0, - 0, - 0, - 20543, - 0, - 0, - 0, - 20546, - 0, - 0, - 0, - 0, - 0, - 20548, - 0, - 0, - 20563, - 0, - 0, - 20564, - 0, - 20566, - 0, - 0, - 0, - 0, - 0, - 20589, - 0, - 0, - 0, - 0, - 20590, - 0, - 0, - 20593, - 20594, - 0, - 0, - 0, - 0, - 20595, - 0, - 20597, - 20598, - 0, - 0, - 0, - 20618, - 20620, - 0, - 0, - 0, - 0, - 20621, - 0, - 0, - 0, - 0, - 20627, - 0, - 0, - 0, - 0, - 0, - 20628, - 0, - 0, - 0, - 20629, - 0, - 20630, - 0, - 0, - 20639, - 0, - 0, - 0, - 0, - 0, - 20707, - 0, - 0, - 20709, - 0, - 0, - 0, - 20713, - 20714, - 0, - 0, - 0, - 0, - 0, - 20724, - 20725, - 0, - 0, - 0, - 0, - 20726, - 20728, - 20729, - 0, - 20733, - 0, - 20734, - 0, - 20735, - 20736, - 0, - 20737, - 0, - 0, - 20744, - 0, - 20745, - 0, - 20748, - 0, - 0, - 20749, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20750, - 0, - 0, - 0, - 0, - 20754, - 0, - 0, - 0, - 20761, - 0, - 0, - 20763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20766, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20767, - 0, - 0, - 0, - 0, - 20768, - 0, - 20769, - 20777, - 0, - 0, - 0, - 0, - 0, - 0, - 20785, - 0, - 0, - 0, - 20786, - 20795, - 20801, - 0, - 20802, - 0, - 20807, - 0, - 0, - 20808, - 0, - 0, - 20810, - 0, - 0, - 20811, - 0, - 20812, - 0, - 0, - 0, - 0, - 0, - 20813, - 0, - 0, - 20818, - 20820, - 20821, - 0, - 0, - 0, - 20822, - 0, - 20823, - 0, - 0, - 0, - 20826, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20829, - 20830, - 20831, - 0, - 20832, - 20836, - 0, - 0, - 20839, - 0, - 0, - 20840, - 20842, - 0, - 20843, - 0, - 20844, - 0, - 20854, - 0, - 0, - 0, - 20855, - 0, - 0, - 0, - 0, - 20856, - 0, - 0, - 0, - 20869, - 0, - 0, - 20871, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20873, - 0, - 0, - 0, - 0, - 0, - 20876, - 0, - 0, - 0, - 0, - 0, - 20880, - 0, - 0, - 20882, - 0, - 0, - 0, - 0, - 20883, - 20884, - 0, - 0, - 20890, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20891, - 0, - 0, - 0, - 0, - 0, - 20905, - 0, - 20906, - 20910, - 0, - 0, - 20912, - 20915, - 0, - 0, - 0, - 0, - 0, - 20916, - 0, - 20917, - 0, - 20919, - 20920, - 20922, - 0, - 20927, - 0, - 20928, - 20929, - 20930, - 0, - 0, - 20935, - 0, - 0, - 20939, - 0, - 0, - 20941, - 0, - 0, - 0, - 20943, - 0, - 0, - 0, - 20946, - 20947, - 0, - 0, - 0, - 0, - 0, - 20950, - 0, - 20954, - 0, - 0, - 20955, - 20964, - 0, - 0, - 20967, - 0, - 0, - 0, - 0, - 0, - 20973, - 20975, - 0, - 0, - 0, - 20984, - 0, - 20987, - 20988, - 0, - 0, - 0, - 0, - 0, - 20989, - 0, - 0, - 0, - 20995, - 0, - 20998, - 0, - 20999, - 0, - 0, - 0, - 0, - 21000, - 21001, - 0, - 0, - 0, - 0, - 21008, - 0, - 21010, - 0, - 21016, - 0, - 0, - 0, - 21017, - 21018, - 0, - 0, - 0, - 0, - 0, - 21021, - 21026, - 21027, - 21028, - 0, - 0, - 21029, - 0, - 0, - 0, - 0, - 0, - 21030, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21031, - 21032, - 0, - 0, - 0, - 0, - 0, - 21037, - 0, - 0, - 21038, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21039, - 0, - 21041, - 0, - 21046, - 21047, - 0, - 0, - 0, - 21049, - 21053, - 0, - 0, - 21057, - 21064, - 21065, - 0, - 0, - 21066, - 21067, - 0, - 0, - 0, - 21069, - 0, - 0, - 0, - 21071, - 21072, - 0, - 0, - 21073, - 0, - 21074, - 0, - 0, - 21078, - 0, - 0, - 0, - 0, - 21079, - 0, - 0, - 21080, - 21081, - 0, - 0, - 21086, - 21087, - 0, - 21089, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21091, - 0, - 21093, - 0, - 21094, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21095, - 0, - 0, - 0, - 0, - 0, - 21096, - 0, - 21098, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21099, - 0, - 0, - 21100, - 21101, - 21102, - 0, - 0, - 0, - 0, - 0, - 21103, - 0, - 21104, - 0, - 0, - 0, - 0, - 0, - 21105, - 21108, - 21109, - 0, - 0, - 21112, - 21113, - 0, - 0, - 0, - 0, - 0, - 0, - 21115, - 21122, - 21123, - 0, - 0, - 0, - 0, - 0, - 21125, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21129, - 21131, - 0, - 0, - 21134, - 0, - 0, - 0, - 21137, - 21142, - 0, - 21143, - 0, - 0, - 21144, - 0, - 21145, - 21146, - 0, - 21152, - 21154, - 21155, - 21156, - 0, - 0, - 0, - 21160, - 0, - 0, - 0, - 0, - 0, - 0, - 21161, - 0, - 21164, - 0, - 21166, - 0, - 0, - 0, - 0, - 21170, - 0, - 0, - 0, - 0, - 21171, - 0, - 0, - 21172, - 0, - 21174, - 0, - 21175, - 0, - 0, - 0, - 0, - 0, - 21176, - 21179, - 21188, - 0, - 0, - 0, - 21189, - 0, - 0, - 21190, - 0, - 0, - 0, - 21192, - 0, - 0, - 21193, - 0, - 0, - 0, - 21198, - 0, - 21212, - 0, - 0, - 21213, - 0, - 0, - 0, - 0, - 0, - 0, - 21215, - 21216, - 0, - 0, - 21223, - 21225, - 0, - 21226, - 0, - 0, - 0, - 0, - 21227, - 21228, - 0, - 0, - 21229, - 0, - 0, - 0, - 0, - 21230, - 21236, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21237, - 0, - 0, - 21238, - 21239, - 0, - 0, - 0, - 0, - 21256, - 0, - 0, - 0, - 0, - 0, - 21257, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21259, - 0, - 0, - 0, - 21263, - 0, - 21272, - 0, - 21274, - 0, - 21282, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21283, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21294, - 0, - 0, - 21297, - 0, - 0, - 0, - 0, - 21298, - 0, - 0, - 0, - 21299, - 0, - 21300, - 21302, - 0, - 21316, - 0, - 21318, - 21322, - 21323, - 0, - 21324, - 0, - 21326, - 0, - 0, - 0, - 21327, - 21328, - 0, - 0, - 0, - 21352, - 0, - 0, - 21354, - 21361, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21362, - 0, - 0, - 0, - 21363, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21366, - 0, - 0, - 21367, - 21372, - 21374, - 0, - 0, - 0, - 21375, - 21377, - 0, - 21378, - 0, - 0, - 0, - 21380, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21381, - 0, - 0, - 0, - 0, - 0, - 0, - 21382, - 0, - 21383, - 0, - 0, - 21384, - 0, - 0, - 21385, - 0, - 0, - 0, - 0, - 21389, - 21390, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21397, - 21398, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21399, - 0, - 21400, - 0, - 0, - 0, - 0, - 21402, - 0, - 0, - 0, - 21403, - 21404, - 0, - 21405, - 21406, - 0, - 0, - 0, - 21407, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21408, - 0, - 0, - 0, - 0, - 21409, - 0, - 21421, - 0, - 21422, - 0, - 0, - 0, - 21425, - 21428, - 0, - 0, - 0, - 0, - 21429, - 0, - 0, - 0, - 0, - 0, - 21433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21434, - 0, - 21443, - 0, - 21444, - 21449, - 0, - 21452, - 0, - 21453, - 21454, - 0, - 0, - 0, - 21457, - 0, - 0, - 21458, - 0, - 0, - 0, - 21460, - 21461, - 0, - 0, - 21464, - 0, - 0, - 0, - 21473, - 21478, - 0, - 0, - 21479, - 0, - 0, - 21481, - 21483, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21484, - 0, - 0, - 21485, - 21486, - 0, - 0, - 21488, - 0, - 0, - 0, - 0, - 0, - 0, - 21523, - 0, - 0, - 21525, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21526, - 0, - 0, - 0, - 0, - 0, - 0, - 21529, - 21530, - 0, - 0, - 21531, - 0, - 0, - 21533, - 0, - 0, - 21539, - 21564, - 0, - 21567, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21575, - 0, - 0, - 0, - 0, - 21577, - 0, - 0, - 0, - 0, - 0, - 21591, - 0, - 0, - 21604, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21605, - 0, - 21606, - 0, - 0, - 21617, - 21618, - 21619, - 21620, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21623, - 0, - 0, - 0, - 0, - 21631, - 0, - 21635, - 0, - 0, - 0, - 0, - 21639, - 21646, - 21653, - 21662, - 0, - 0, - 21663, - 21664, - 0, - 21666, - 0, - 0, - 21667, - 0, - 21670, - 21672, - 21673, - 0, - 21674, - 21683, - 0, - 0, - 0, - 0, - 0, - 21684, - 0, - 21694, - 0, - 0, - 0, - 0, - 21695, - 21700, - 0, - 21703, - 0, - 21704, - 0, - 0, - 21709, - 0, - 0, - 0, - 21710, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21711, - 0, - 0, - 0, - 21712, - 0, - 21717, - 0, - 21730, - 0, - 0, - 0, - 21731, - 21733, - 0, - 0, - 0, - 0, - 21737, - 21741, - 21742, - 0, - 21747, - 0, - 0, - 0, - 21749, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21750, - 0, - 0, - 0, - 0, - 0, - 21752, - 0, - 0, - 0, - 0, - 21753, - 0, - 0, - 0, - 0, - 0, - 0, - 21755, - 21756, - 0, - 21757, - 0, - 0, - 0, - 0, - 0, - 0, - 21760, - 0, - 0, - 21763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21764, - 0, - 0, - 21766, - 0, - 0, - 21767, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21773, - 0, - 21774, - 0, - 0, - 21775, - 0, - 0, - 0, - 0, - 21776, - 0, - 0, - 21777, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21780, - 21787, - 21788, - 21791, - 0, - 0, - 0, - 21797, - 0, - 0, - 0, - 0, - 0, - 21805, - 0, - 0, - 0, - 0, - 21806, - 0, - 21807, - 21809, - 0, - 21810, - 21811, - 0, - 21817, - 21819, - 21820, - 0, - 21823, - 0, - 21824, - 0, - 0, - 21825, - 0, - 0, - 21826, - 21832, - 0, - 0, - 0, - 0, - 0, - 21833, - 21848, - 21849, - 0, - 0, - 21867, - 21870, - 21871, - 21873, - 0, - 0, - 0, - 21874, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21875, - 0, - 21878, - 0, - 0, - 0, - 21879, - 0, - 21881, - 21886, - 0, - 0, - 0, - 0, - 21887, - 0, - 0, - 21888, - 21894, - 21895, - 21897, - 0, - 21901, - 0, - 21904, - 0, - 0, - 21906, - 0, - 0, - 0, - 21909, - 21910, - 21911, - 0, - 0, - 21912, - 0, - 0, - 21913, - 21914, - 21915, - 0, - 21919, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21921, - 0, - 0, - 21922, - 21933, - 21939, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21944, - 0, - 0, - 0, - 0, - 0, - 21945, - 0, - 21947, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21949, - 0, - 0, - 0, - 21950, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21951, - 0, - 21952, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21954, - 21957, - 0, - 0, - 0, - 0, - 21958, - 0, - 21959, - 0, - 0, - 0, - 0, - 0, - 0, - 21962, - 21963, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21964, - 21965, - 0, - 0, - 21969, - 21970, - 0, - 0, - 0, - 21974, - 0, - 0, - 21980, - 21981, - 0, - 21982, - 0, - 0, - 0, - 0, - 0, - 21985, - 0, - 21988, - 0, - 21992, - 0, - 21999, - 0, - 0, - 0, - 0, - 0, - 0, - 22001, - 0, - 22002, - 0, - 0, - 0, - 0, - 0, - 0, - 22003, - 0, - 0, - 0, - 0, - 0, - 22004, - 0, - 0, - 0, - 22008, - 0, - 22009, - 22015, - 0, - 0, - 22016, - 0, - 0, - 0, - 22017, - 22019, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22020, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22021, - 22037, - 0, - 22039, - 0, - 0, - 0, - 22040, - 0, - 0, - 0, - 22048, - 22049, - 0, - 0, - 22053, - 22055, - 22056, - 22059, - 0, - 0, - 22060, - 22061, - 0, - 0, - 22064, - 0, - 0, - 0, - 0, - 22066, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22073, - 0, - 0, - 0, - 22074, - 22075, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22076, - 0, - 0, - 0, - 0, - 22077, - 22084, - 22099, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22104, - 0, - 0, - 22107, - 0, - 22108, - 0, - 22109, - 0, - 22110, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22111, - 22119, - 0, - 22120, - 22122, - 0, - 0, - 0, - 0, - 22125, - 0, - 0, - 0, - 22128, - 22129, - 0, - 0, - 0, - 0, - 0, - 0, - 22141, - 0, - 0, - 0, - 22142, - 0, - 0, - 22144, - 22146, - 0, - 22148, - 22149, - 22151, - 22154, - 0, - 0, - 0, - 22162, - 0, - 0, - 0, - 0, - 22164, - 22177, - 0, - 0, - 0, - 0, - 22179, - 0, - 22182, - 22183, - 0, - 0, - 22184, - 22188, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22190, - 0, - 22194, - 22201, - 0, - 0, - 22208, - 0, - 22209, - 0, - 22212, - 0, - 0, - 22215, - 0, - 22223, - 22231, - 0, - 0, - 22232, - 0, - 22234, - 0, - 0, - 22235, - 22236, - 0, - 22237, - 0, - 22240, - 0, - 0, - 0, - 0, - 0, - 22241, - 0, - 0, - 0, - 22242, - 22246, - 22247, - 0, - 0, - 0, - 22259, - 22268, - 0, - 22269, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22270, - 0, - 0, - 0, - 0, - 22271, - 0, - 22272, - 0, - 22277, - 0, - 0, - 0, - 0, - 0, - 22278, - 22280, - 22283, - 22286, - 0, - 0, - 22287, - 22289, - 0, - 0, - 22290, - 0, - 22293, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22295, - 0, - 22301, - 22302, - 0, - 0, - 0, - 22305, - 0, - 22308, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22315, - 0, - 0, - 0, - 22317, - 0, - 22334, - 0, - 0, - 0, - 22335, - 0, - 0, - 0, - 0, - 0, - 22336, - 0, - 22338, - 22344, - 0, - 22347, - 22349, - 0, - 22350, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22357, - 0, - 0, - 0, - 0, - 0, - 22358, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22359, - 22360, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22361, - 22366, - 0, - 0, - 22369, - 0, - 22370, - 22373, - 0, - 0, - 0, - 0, - 0, - 22375, - 0, - 22377, - 0, - 0, - 0, - 0, - 0, - 22378, - 0, - 0, - 0, - 0, - 22381, - 0, - 0, - 0, - 0, - 22382, - 0, - 22383, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22391, - 0, - 0, - 22392, - 22395, - 22396, - 22402, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22405, - 0, - 0, - 22406, - 0, - 0, - 22408, - 0, - 0, - 22409, - 22410, - 0, - 0, - 0, - 0, - 0, - 0, - 22424, - 0, - 0, - 0, - 0, - 22426, - 0, - 0, - 0, - 22427, - 0, - 22428, - 0, - 22432, - 0, - 22435, - 22442, - 22443, - 0, - 0, - 0, - 0, - 22444, - 0, - 0, - 0, - 0, - 0, - 22446, - 0, - 22454, - 0, - 22455, - 0, - 0, - 0, - 22465, - 0, - 22470, - 0, - 22471, - 0, - 0, - 0, - 0, - 22472, - 22473, - 0, - 22487, - 0, - 0, - 0, - 22488, - 0, - 0, - 0, - 0, - 22489, - 0, - 0, - 22499, - 0, - 0, - 0, - 0, - 0, - 0, - 22514, - 0, - 0, - 22515, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22516, - 0, - 0, - 0, - 22517, - 22520, - 0, - 0, - 0, - 22534, - 0, - 0, - 22535, - 0, - 0, - 22536, - 0, - 22540, - 22553, - 0, - 22555, - 0, - 0, - 0, - 0, - 22561, - 0, - 0, - 22562, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22566, - 0, - 0, - 0, - 0, - 22567, - 22568, - 0, - 0, - 22575, - 0, - 22579, - 0, - 22582, - 22583, - 22585, - 0, - 0, - 0, - 0, - 0, - 22586, - 0, - 0, - 22587, - 0, - 0, - 22590, - 0, - 0, - 0, - 0, - 0, - 22591, - 0, - 22592, - 0, - 0, - 0, - 0, - 0, - 22593, - 0, - 22602, - 0, - 0, - 22604, - 0, - 0, - 22609, - 0, - 0, - 22618, - 0, - 0, - 0, - 0, - 0, - 0, - 22619, - 0, - 22624, - 22625, - 0, - 0, - 22638, - 0, - 0, - 0, - 0, - 0, - 22639, - 0, - 0, - 22640, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22644, - 0, - 22645, - 22647, - 0, - 0, - 0, - 0, - 22652, - 22653, - 0, - 0, - 0, - 22654, - 0, - 22655, - 0, - 0, - 0, - 22656, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22673, - 22675, - 22676, - 0, - 0, - 22678, - 22679, - 0, - 22691, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22693, - 0, - 0, - 22696, - 0, - 22699, - 22707, - 22708, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22718, - 0, - 22719, - 0, - 0, - 0, - 0, - 22723, - 0, - 0, - 0, - 22724, - 22725, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22726, - 22728, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22729, - 0, - 0, - 22731, - 0, - 0, - 0, - 0, - 22732, - 22735, - 22736, - 0, - 0, - 0, - 0, - 22739, - 0, - 22749, - 0, - 0, - 22751, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22758, - 0, - 0, - 0, - 0, - 0, - 22760, - 0, - 0, - 0, - 0, - 0, - 22764, - 22765, - 22766, - 0, - 22768, - 0, - 0, - 0, - 0, - 0, - 22769, - 22770, - 0, - 0, - 0, - 0, - 0, - 0, - 22771, - 0, - 0, - 22772, - 22775, - 0, - 22776, - 22777, - 22780, - 0, - 0, - 22782, - 22784, - 0, - 22787, - 0, - 22789, - 22796, - 0, - 0, - 0, - 0, - 0, - 22798, - 0, - 0, - 0, - 0, - 0, - 0, - 22802, - 0, - 22803, - 22804, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22805, - 0, - 0, - 22810, - 22811, - 22814, - 22816, - 0, - 22825, - 22826, - 0, - 22831, - 22833, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22834, - 0, - 22836, - 22838, - 0, - 22839, - 0, - 0, - 0, - 0, - 0, - 22840, - 0, - 22847, - 0, - 0, - 0, - 0, - 0, - 22856, - 22857, - 0, - 22858, - 22859, - 0, - 0, - 22862, - 0, - 0, - 22864, - 0, - 0, - 0, - 0, - 22865, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22866, - 0, - 22867, - 22868, - 0, - 0, - 0, - 0, - 22869, - 0, - 22871, - 0, - 22872, - 0, - 22873, - 22881, - 22882, - 22884, - 22885, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22886, - 22887, - 0, - 22894, - 0, - 22895, - 0, - 0, - 0, - 22900, - 0, - 22901, - 0, - 0, - 0, - 0, - 22904, - 0, - 0, - 0, - 0, - 22905, - 22907, - 0, - 0, - 0, - 22915, - 22917, - 0, - 0, - 22918, - 0, - 0, - 0, - 22920, - 0, - 0, - 0, - 22929, - 22930, - 0, - 0, - 0, - 22941, - 22942, - 0, - 0, - 0, - 22943, - 0, - 0, - 0, - 22944, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22946, - 0, - 22947, - 0, - 0, - 22954, - 0, - 22956, - 0, - 0, - 22962, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22963, - 0, - 0, - 22964, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22965, - 0, - 22968, - 0, - 0, - 0, - 22969, - 0, - 0, - 0, - 0, - 0, - 22970, - 0, - 22971, - 0, - 0, - 0, - 0, - 0, - 22978, - 0, - 0, - 22979, - 0, - 22987, - 0, - 0, - 22989, - 0, - 0, - 0, - 0, - 0, - 0, - 22990, - 0, - 23005, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23006, - 23007, - 23008, - 0, - 0, - 23023, - 23024, - 23029, - 0, - 0, - 0, - 0, - 23030, - 0, - 0, - 0, - 0, - 0, - 23032, - 0, - 0, - 0, - 0, - 0, - 23035, - 0, - 0, - 0, - 0, - 23038, - 0, - 0, - 0, - 23048, - 0, - 23049, - 23052, - 23053, - 23060, - 23061, - 0, - 23063, - 0, - 0, - 0, - 0, - 23067, - 23068, - 0, - 0, - 0, - 23069, - 23073, - 0, - 0, - 0, - 23127, - 0, - 23128, - 0, - 0, - 0, - 0, - 0, - 23129, - 0, - 23138, - 23141, - 0, - 23149, - 0, - 0, - 23150, - 0, - 0, - 0, - 23152, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23154, - 0, - 0, - 0, - 0, - 23157, - 23159, - 23160, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23180, - 0, - 0, - 0, - 0, - 23181, - 0, - 0, - 23188, - 0, - 23189, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23195, - 0, - 0, - 23196, - 23199, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23202, - 0, - 23204, - 0, - 23207, - 0, - 23209, - 23210, - 0, - 0, - 0, - 0, - 0, - 0, - 23227, - 23229, - 0, - 0, - 23230, - 23234, - 23238, - 0, - 0, - 0, - 23245, - 23246, - 23248, - 0, - 0, - 0, - 0, - 23249, - 23254, - 0, - 0, - 0, - 23265, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23268, - 0, - 23276, - 0, - 0, - 0, - 0, - 23277, - 0, - 23297, - 0, - 23298, - 0, - 0, - 0, - 0, - 23299, - 0, - 23302, - 0, - 0, - 23303, - 23312, - 0, - 0, - 23314, - 0, - 23320, - 0, - 0, - 0, - 0, - 23324, - 0, - 23325, - 0, - 23328, - 0, - 23334, - 0, - 0, - 0, - 23337, - 0, - 0, - 0, - 0, - 23343, - 23344, - 23346, - 0, - 23348, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23353, - 0, - 0, - 0, - 0, - 23355, - 0, - 23356, - 23358, - 0, - 0, - 0, - 23359, - 23360, - 0, - 23361, - 0, - 23367, - 0, - 23369, - 0, - 0, - 23373, - 0, - 23378, - 23379, - 0, - 23382, - 23383, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23387, - 0, - 0, - 0, - 0, - 0, - 0, - 23388, - 23390, - 0, - 0, - 23393, - 23398, - 0, - 0, - 0, - 23399, - 0, - 0, - 0, - 23400, - 0, - 0, - 0, - 0, - 23401, - 0, - 0, - 0, - 23415, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23416, - 0, - 23422, - 0, - 23443, - 23444, - 0, - 0, - 0, - 0, - 23448, - 0, - 23454, - 0, - 0, - 0, - 0, - 0, - 0, - 23456, - 0, - 0, - 23458, - 23464, - 0, - 0, - 0, - 0, - 0, - 0, - 23465, - 0, - 0, - 0, - 23470, - 23471, - 0, - 0, - 23472, - 0, - 0, - 0, - 23473, - 23496, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23497, - 0, - 23499, - 0, - 0, - 23502, - 0, - 0, - 23503, - 0, - 0, - 23513, - 0, - 0, - 23515, - 0, - 0, - 0, - 23517, - 0, - 0, - 0, - 0, - 23518, - 23519, - 23521, - 23524, - 0, - 23525, - 23528, - 23539, - 0, - 0, - 0, - 0, - 0, - 23541, - 0, - 0, - 23544, - 0, - 0, - 23556, - 0, - 0, - 23557, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23559, - 0, - 23560, - 0, - 0, - 23561, - 0, - 0, - 23566, - 0, - 0, - 0, - 0, - 0, - 23568, - 23569, - 23570, - 0, - 0, - 0, - 0, - 23571, - 0, - 23574, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23575, - 0, - 23579, - 0, - 0, - 23581, - 0, - 0, - 0, - 0, - 0, - 0, - 23587, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23596, - 23598, - 0, - 0, - 0, - 0, - 23602, - 23606, - 0, - 0, - 23607, - 0, - 23608, - 0, - 0, - 0, - 23614, - 23616, - 0, - 0, - 0, - 0, - 0, - 23618, - 0, - 0, - 23619, - 0, - 0, - 0, - 0, - 23621, - 23626, - 0, - 23627, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23629, - 0, - 23630, - 0, - 0, - 0, - 0, - 23634, - 0, - 23636, - 0, - 0, - 0, - 0, - 0, - 0, - 23638, - 0, - 0, - 0, - 0, - 23640, - 23667, - 0, - 23669, - 0, - 0, - 0, - 23681, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23682, - 0, - 23683, - 0, - 0, - 0, - 0, - 0, - 23684, - 0, - 0, - 0, - 23685, - 23689, - 0, - 23693, - 23694, - 23700, - 0, - 23702, - 0, - 23709, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23712, - 0, - 0, - 0, - 0, - 0, - 23714, - 0, - 0, - 23715, - 0, - 0, - 0, - 0, - 23718, - 0, - 0, - 23720, - 0, - 0, - 0, - 0, - 23722, - 0, - 0, - 0, - 23726, - 23729, - 0, - 23741, - 23746, - 0, - 23748, - 0, - 0, - 0, - 0, - 23749, - 0, - 0, - 0, - 0, - 0, - 23750, - 0, - 0, - 0, - 0, - 23751, - 0, - 23753, - 0, - 0, - 0, - 0, - 23757, - 23765, - 0, - 0, - 0, - 23770, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23771, - 0, - 23772, - 23781, - 0, - 0, - 23796, - 0, - 0, - 0, - 0, - 23798, - 0, - 23799, - 0, - 0, - 0, - 23802, - 0, - 0, - 23806, - 0, - 23807, - 0, - 0, - 23808, - 0, - 23809, - 0, - 23819, - 0, - 0, - 0, - 23821, - 0, - 23827, - 0, - 0, - 0, - 23829, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23830, - 0, - 0, - 0, - 0, - 0, - 0, - 23832, - 23833, - 23834, - 23835, - 0, - 0, - 0, - 0, - 23837, - 23838, - 0, - 0, - 0, - 0, - 0, - 23846, - 0, - 0, - 0, - 0, - 0, - 0, - 23847, - 0, - 0, - 0, - 0, - 0, - 23879, - 23881, - 0, - 0, - 23882, - 23883, - 23895, - 0, - 23899, - 0, - 0, - 0, - 0, - 23901, - 0, - 0, - 0, - 0, - 0, - 0, - 23902, - 0, - 0, - 0, - 0, - 0, - 23903, - 23905, - 0, - 23906, - 0, - 23907, - 23918, - 23919, - 23920, - 0, - 23922, - 0, - 23924, - 0, - 23927, - 0, - 23934, - 0, - 23937, - 23941, - 0, - 23942, - 23946, - 0, - 0, - 0, - 0, - 0, - 23955, - 23956, - 23958, - 0, - 0, - 0, - 0, - 0, - 0, - 23959, - 0, - 23962, - 23965, - 0, - 23966, - 0, - 0, - 0, - 0, - 23967, - 23968, - 0, - 0, - 23973, - 0, - 0, - 23974, - 0, - 0, - 0, - 0, - 23975, - 0, - 23976, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23977, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23980, - 0, - 0, - 23984, - 0, - 23985, - 0, - 0, - 23987, - 0, - 0, - 23988, - 23990, - 23991, - 0, - 0, - 0, - 0, - 0, - 0, - 23992, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23994, - 0, - 0, - 0, - 23998, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23999, - 0, - 0, - 24003, - 0, - 24004, - 0, - 24006, - 0, - 0, - 0, - 24007, - 0, - 0, - 24008, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24009, - 0, - 0, - 24010, - 0, - 0, - 24011, - 0, - 0, - 24013, - 24014, - 0, - 0, - 24015, - 24016, - 24027, - 0, - 24028, - 24029, - 0, - 24030, - 0, - 0, - 0, - 0, - 0, - 24033, - 24034, - 0, - 24035, - 0, - 0, - 24036, - 0, - 0, - 24044, - 0, - 24048, - 24049, - 24063, - 24067, - 0, - 24068, - 24070, - 0, - 0, - 24071, - 24078, - 24087, - 0, - 24090, - 0, - 0, - 0, - 24095, - 0, - 24098, - 24101, - 24104, - 24106, - 0, - 24107, - 0, - 0, - 0, - 24108, - 0, - 0, - 0, - 0, - 24110, - 24111, - 0, - 24113, - 0, - 0, - 24115, - 24120, - 0, - 0, - 0, - 0, - 0, - 0, - 24124, - 0, - 24125, - 0, - 24126, - 0, - 24127, - 0, - 0, - 0, - 0, - 0, - 24135, - 0, - 0, - 24136, - 0, - 24137, - 24142, - 0, - 0, - 0, - 24146, - 0, - 0, - 24147, - 24149, - 24154, - 0, - 24163, - 0, - 0, - 0, - 24165, - 24166, - 24167, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24169, - 24170, - 24175, - 0, - 0, - 0, - 24178, - 0, - 0, - 24179, - 0, - 0, - 24181, - 0, - 24184, - 24197, - 0, - 24201, - 24204, - 0, - 0, - 0, - 0, - 0, - 0, - 24206, - 24212, - 24220, - 0, - 0, - 0, - 24224, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24226, - 0, - 24234, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24235, - 0, - 24236, - 0, - 0, - 0, - 0, - 0, - 24239, - 24240, - 24241, - 0, - 0, - 24248, - 0, - 0, - 24249, - 0, - 24251, - 0, - 0, - 0, - 0, - 0, - 0, - 24253, - 0, - 24268, - 0, - 0, - 0, - 24269, - 0, - 24271, - 24272, - 0, - 0, - 0, - 0, - 24273, - 0, - 0, - 24274, - 0, - 0, - 24279, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24280, - 0, - 24293, - 24294, - 0, - 0, - 0, - 0, - 0, - 0, - 24296, - 0, - 0, - 24323, - 0, - 0, - 0, - 24329, - 24330, - 24331, - 24339, - 0, - 24351, - 0, - 0, - 24369, - 24370, - 0, - 0, - 0, - 24371, - 0, - 0, - 0, - 0, - 24372, - 24373, - 24374, - 0, - 0, - 0, - 0, - 0, - 24378, - 0, - 0, - 0, - 0, - 24379, - 0, - 24381, - 0, - 24383, - 24389, - 0, - 24390, - 0, - 0, - 24394, - 24395, - 24400, - 0, - 0, - 0, - 24401, - 24402, - 0, - 24406, - 0, - 0, - 0, - 24411, - 0, - 0, - 0, - 24415, - 0, - 24416, - 0, - 0, - 0, - 0, - 0, - 24417, - 0, - 24419, - 0, - 24422, - 0, - 24423, - 24428, - 0, - 24435, - 0, - 0, - 0, - 24439, - 0, - 0, - 0, - 24440, - 24442, - 24446, - 0, - 0, - 0, - 24447, - 24448, - 24449, - 24452, - 0, - 0, - 0, - 0, - 24453, - 24457, - 0, - 0, - 24458, - 24459, - 24460, - 0, - 24465, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24470, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24471, - 0, - 24473, - 24474, - 24475, - 24476, - 0, - 24478, - 0, - 0, - 0, - 0, - 24480, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24481, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24482, - 24485, - 0, - 0, - 0, - 0, - 24486, - 0, - 0, - 0, - 24488, - 0, - 0, - 0, - 24494, - 0, - 0, - 0, - 0, - 24497, - 0, - 0, - 24498, - 0, - 0, - 0, - 24499, - 24506, - 0, - 0, - 0, - 24507, - 0, - 0, - 24511, - 0, - 0, - 24513, - 24514, - 0, - 0, - 0, - 0, - 0, - 24517, - 0, - 24518, - 0, - 24520, - 0, - 24521, - 24524, - 24525, - 0, - 0, - 0, - 0, - 0, - 24527, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24528, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24537, - 24539, - 0, - 24540, - 0, - 0, - 0, - 24548, - 0, - 0, - 0, - 0, - 0, - 24549, - 24550, - 0, - 0, - 0, - 24553, - 24554, - 0, - 24555, - 0, - 24556, - 0, - 24558, - 0, - 0, - 0, - 0, - 0, - 24560, - 0, - 0, - 0, - 24561, - 0, - 0, - 0, - 0, - 0, - 24562, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24567, - 0, - 0, - 0, - 0, - 0, - 24569, - 0, - 0, - 0, - 24574, - 0, - 24575, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24577, - 24581, - 0, - 24584, - 0, - 0, - 0, - 0, - 0, - 24585, - 0, - 0, - 0, - 0, - 0, - 24586, - 0, - 0, - 24587, - 0, - 24588, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24590, - 24591, - 0, - 0, - 0, - 0, - 24592, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24594, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24596, - 24597, - 0, - 0, - 0, - 0, - 24602, - 24603, - 0, - 0, - 0, - 0, - 24604, - 0, - 0, - 24605, - 0, - 24610, - 0, - 0, - 24611, - 0, - 0, - 0, - 0, - 24612, - 24615, - 24616, - 24624, - 0, - 0, - 0, - 24627, - 0, - 24638, - 24639, - 0, - 0, - 0, - 0, - 24640, - 0, - 0, - 0, - 24655, - 24656, - 24657, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24662, - 0, - 24663, - 24664, - 0, - 0, - 0, - 0, - 0, - 24665, - 0, - 0, - 0, - 0, - 24667, - 0, - 0, - 0, - 0, - 0, - 0, - 24668, - 24669, - 0, - 24670, - 24674, - 0, - 0, - 0, - 24675, - 0, - 24678, - 0, - 0, - 24679, - 0, - 0, - 0, - 24681, - 0, - 24683, - 0, - 0, - 0, - 0, - 24684, - 0, - 24685, - 0, - 0, - 24686, - 0, - 0, - 24688, - 24689, - 0, - 0, - 0, - 0, - 24690, - 24691, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24697, - 0, - 24698, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24709, - 0, - 0, - 0, - 0, - 0, - 24710, - 0, - 24712, - 0, - 0, - 0, - 0, - 0, - 0, - 24713, - 24714, - 0, - 24715, - 0, - 24716, - 24718, - 0, - 24719, - 0, - 0, - 0, - 0, - 24720, - 0, - 0, - 24725, - 0, - 0, - 24738, - 0, - 24749, - 24750, - 0, - 0, - 0, - 24752, - 0, - 0, - 0, - 24753, - 0, - 0, - 0, - 24758, - 0, - 0, - 0, - 0, - 0, - 24762, - 0, - 24763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24764, - 0, - 0, - 0, - 0, - 0, - 24765, - 24767, - 24768, - 0, - 24772, - 0, - 0, - 0, - 0, - 24773, - 0, - 0, - 0, - 0, - 24777, - 0, - 0, - 0, - 0, - 0, - 24785, - 0, - 24786, - 24788, - 0, - 0, - 0, - 24789, - 0, - 0, - 0, - 0, - 24794, - 24798, - 0, - 24799, - 24800, - 0, - 0, - 0, - 24803, - 0, - 24804, - 24806, - 0, - 24807, - 0, - 0, - 0, - 24810, - 0, - 0, - 0, - 0, - 0, - 0, - 24827, - 24828, - 0, - 24835, - 0, - 0, - 0, - 0, - 0, - 0, - 24836, - 0, - 0, - 0, - 0, - 0, - 24839, - 0, - 24843, - 24844, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24847, - 0, - 0, - 24848, - 0, - 0, - 0, - 0, - 0, - 0, - 24849, - 0, - 24850, - 24851, - 0, - 0, - 0, - 24852, - 0, - 24853, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24854, - 0, - 24855, - 0, - 0, - 24868, - 0, - 0, - 0, - 24883, - 0, - 0, - 0, - 24884, - 0, - 24895, - 24897, - 0, - 0, - 0, - 0, - 0, - 24899, - 0, - 0, - 0, - 0, - 0, - 24900, - 0, - 24913, - 0, - 0, - 0, - 0, - 0, - 0, - 24914, - 0, - 0, - 24917, - 24930, - 24931, - 0, - 0, - 0, - 24932, - 0, - 0, - 24939, - 0, - 0, - 24942, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24945, - 24950, - 0, - 24951, - 0, - 0, - 24953, - 0, - 0, - 0, - 24954, - 0, - 24959, - 0, - 0, - 0, - 24961, - 0, - 0, - 24962, - 0, - 24964, - 24968, - 24970, - 24972, - 0, - 0, - 0, - 0, - 0, - 24976, - 0, - 0, - 0, - 24977, - 0, - 24982, - 0, - 0, - 24983, - 0, - 0, - 24984, - 0, - 0, - 0, - 24993, - 0, - 0, - 0, - 24994, - 0, - 0, - 25001, - 0, - 0, - 0, - 25003, - 0, - 0, - 25018, - 0, - 0, - 25023, - 0, - 0, - 0, - 25034, - 0, - 0, - 25035, - 25036, - 0, - 25037, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25039, - 0, - 0, - 0, - 0, - 0, - 25040, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25042, - 0, - 0, - 25043, - 25045, - 0, - 0, - 0, - 0, - 0, - 0, - 25049, - 0, - 0, - 25051, - 0, - 25052, - 25053, - 0, - 0, - 25054, - 0, - 0, - 0, - 25055, - 0, - 0, - 0, - 0, - 25057, - 25059, - 0, - 0, - 25060, - 25064, - 0, - 25065, - 25069, - 25070, - 0, - 0, - 0, - 0, - 25072, - 0, - 25073, - 0, - 25090, - 0, - 0, - 25092, - 25093, - 25101, - 0, - 0, - 0, - 0, - 0, - 0, - 25105, - 25108, - 0, - 0, - 25113, - 0, - 0, - 25115, - 25116, - 0, - 0, - 0, - 0, - 0, - 0, - 25117, - 0, - 0, - 0, - 25120, - 25121, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25125, - 0, - 0, - 0, - 25126, - 0, - 25130, - 25134, - 0, - 25139, - 0, - 25143, - 0, - 0, - 0, - 25151, - 0, - 25161, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25163, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25174, - 0, - 25175, - 0, - 25207, - 0, - 0, - 0, - 25209, - 0, - 0, - 0, - 0, - 25213, - 0, - 25219, - 0, - 25223, - 0, - 25225, - 0, - 0, - 0, - 25227, - 0, - 0, - 0, - 25228, - 0, - 0, - 0, - 25229, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25231, - 25233, - 0, - 0, - 0, - 0, - 25237, - 25239, - 0, - 0, - 0, - 25243, - 0, - 0, - 0, - 25252, - 0, - 25257, - 25258, - 0, - 0, - 0, - 0, - 25260, - 25265, - 0, - 25268, - 0, - 0, - 25273, - 25324, - 0, - 25325, - 0, - 25326, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25327, - 0, - 0, - 0, - 0, - 0, - 25328, - 0, - 0, - 0, - 0, - 0, - 0, - 25332, - 0, - 0, - 0, - 25333, - 0, - 0, - 0, - 25336, - 25337, - 25338, - 0, - 0, - 25343, - 0, - 25350, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25352, - 0, - 25354, - 0, - 25375, - 0, - 25379, - 0, - 0, - 0, - 0, - 25384, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25386, - 0, - 25388, - 0, - 25390, - 0, - 0, - 25399, - 0, - 0, - 25401, - 0, - 0, - 0, - 25402, - 0, - 0, - 0, - 25407, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25413, - 25415, - 0, - 0, - 25417, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25419, - 0, - 0, - 0, - 25421, - 0, - 0, - 0, - 25424, - 0, - 0, - 0, - 0, - 25433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25435, - 0, - 0, - 0, - 0, - 0, - 0, - 25436, - 0, - 0, - 0, - 25437, - 0, - 0, - 25440, - 0, - 0, - 0, - 0, - 0, - 0, - 25442, - 0, - 0, - 25443, - 0, - 25446, - 0, - 0, - 25449, - 0, - 0, - 0, - 25450, - 0, - 0, - 0, - 0, - 25452, - 0, - 25453, - 25454, - 25455, - 0, - 0, - 0, - 25456, - 0, - 25457, - 0, - 0, - 0, - 25459, - 0, - 25461, - 0, - 25468, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25469, - 0, - 0, - 0, - 0, - 0, - 25471, - 0, - 0, - 0, - 0, - 0, - 25474, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25475, - 0, - 0, - 0, - 0, - 25477, - 0, - 0, - 0, - 0, - 25483, - 0, - 0, - 0, - 0, - 0, - 25484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25485, - 0, - 25497, - 0, - 0, - 25498, - 0, - 25504, - 0, - 25510, - 0, - 25512, - 0, - 0, - 25513, - 25514, - 0, - 0, - 0, - 0, - 0, - 0, - 25517, - 25518, - 25519, - 0, - 25520, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25521, - 0, - 25522, - 25527, - 25534, - 0, - 25536, - 0, - 25537, - 0, - 0, - 25548, - 25550, - 0, - 0, - 25551, - 0, - 25552, - 0, - 0, - 0, - 0, - 0, - 25554, - 0, - 25555, - 0, - 25556, - 25557, - 25568, - 0, - 0, - 0, - 25570, - 25571, - 0, - 0, - 0, - 0, - 0, - 0, - 25574, - 0, - 0, - 0, - 0, - 25579, - 0, - 0, - 0, - 25581, - 0, - 0, - 0, - 25582, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25588, - 0, - 0, - 0, - 0, - 25589, - 0, - 0, - 0, - 0, - 25590, - 0, - 25591, - 25592, - 25593, - 0, - 25594, - 0, - 0, - 0, - 25596, - 0, - 25597, - 25615, - 0, - 0, - 0, - 0, - 0, - 25618, - 0, - 0, - 0, - 0, - 25619, - 25623, - 0, - 0, - 25629, - 0, - 0, - 25631, - 0, - 0, - 0, - 25635, - 25636, - 0, - 0, - 25649, - 0, - 0, - 0, - 0, - 25654, - 0, - 0, - 0, - 25661, - 25663, - 0, - 0, - 25671, - 0, - 0, - 25678, - 25698, - 0, - 25699, - 25702, - 25703, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25704, - 0, - 0, - 0, - 0, - 0, - 25706, - 0, - 0, - 25710, - 0, - 25711, - 0, - 25712, - 0, - 25715, - 25716, - 25717, - 0, - 0, - 25718, - 25728, - 25732, - 0, - 0, - 0, - 25734, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25737, - 0, - 0, - 25739, - 0, - 0, - 0, - 25740, - 0, - 25741, - 25745, - 0, - 25746, - 0, - 25748, - 25772, - 25778, - 0, - 0, - 0, - 0, - 0, - 25780, - 0, - 0, - 0, - 0, - 25781, - 0, - 25782, - 25784, - 25785, - 0, - 0, - 0, - 25789, - 0, - 0, - 0, - 0, - 0, - 0, - 25797, - 25801, - 0, - 0, - 0, - 25808, - 25809, - 0, - 0, - 25811, - 25814, - 25815, - 0, - 0, - 25817, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25820, - 0, - 0, - 0, - 0, - 25832, - 25833, - 0, - 0, - 0, - 25846, - 0, - 0, - 0, - 25847, - 25848, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25849, - 25850, - 0, - 0, - 25851, - 0, - 0, - 25852, - 0, - 25862, - 0, - 0, - 0, - 25863, - 25865, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25867, - 25868, - 0, - 25869, - 25874, - 0, - 25875, - 0, - 25876, - 25877, - 0, - 0, - 0, - 0, - 25878, - 25902, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25903, - 25904, - 25905, - 0, - 0, - 0, - 25908, - 25909, - 0, - 0, - 0, - 0, - 25910, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25912, - 0, - 25913, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25914, - 0, - 0, - 25916, - 0, - 0, - 0, - 0, - 0, - 25917, - 25927, - 0, - 0, - 0, - 0, - 25928, - 0, - 0, - 25930, - 0, - 0, - 0, - 25933, - 0, - 0, - 25938, - 25942, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25945, - 0, - 25950, - 0, - 25956, - 0, - 0, - 25961, - 25962, - 0, - 0, - 25963, - 0, - 25964, - 25965, - 25966, - 0, - 0, - 0, - 0, - 0, - 25967, - 0, - 0, - 0, - 0, - 25968, - 0, - 0, - 0, - 25969, - 25971, - 0, - 0, - 0, - 0, - 0, - 25973, - 25975, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25978, - 0, - 25981, - 0, - 0, - 0, - 25982, - 0, - 0, - 0, - 25984, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25993, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26002, - 0, - 0, - 0, - 26005, - 0, - 0, - 0, - 26006, - 26007, - 0, - 0, - 26014, - 26015, - 26016, - 0, - 0, - 0, - 0, - 0, - 0, - 26017, - 26018, - 26020, - 0, - 26022, - 26023, - 0, - 0, - 0, - 26024, - 26028, - 0, - 26029, - 26033, - 26034, - 26044, - 0, - 0, - 0, - 0, - 0, - 26046, - 0, - 0, - 26047, - 0, - 0, - 26049, - 0, - 26050, - 0, - 26051, - 0, - 0, - 0, - 0, - 0, - 26053, - 0, - 0, - 0, - 0, - 26054, - 26059, - 0, - 0, - 0, - 0, - 0, - 0, - 26060, - 0, - 26066, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26067, - 0, - 26069, - 0, - 0, - 26071, - 0, - 0, - 0, - 26073, - 0, - 26074, - 26077, - 0, - 0, - 0, - 0, - 26078, - 0, - 0, - 0, - 26079, - 0, - 26090, - 0, - 0, - 26094, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26095, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26096, - 26101, - 0, - 26107, - 26122, - 0, - 26124, - 0, - 0, - 26125, - 0, - 0, - 0, - 0, - 0, - 0, - 26136, - 26141, - 26155, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26164, - 26166, - 0, - 0, - 0, - 26167, - 0, - 26170, - 26171, - 0, - 0, - 26172, - 0, - 0, - 26174, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26175, - 0, - 0, - 0, - 26176, - 26177, - 0, - 26321, - 26322, - 0, - 26323, - 0, - 0, - 26324, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26325, - 0, - 26331, - 0, - 0, - 0, - 0, - 0, - 0, - 26335, - 0, - 0, - 0, - 26350, - 0, - 0, - 0, - 26379, - 0, - 0, - 26382, - 26383, - 26385, - 0, - 0, - 26392, - 26406, - 0, - 0, - 0, - 0, - 26411, - 0, - 0, - 0, - 0, - 0, - 26412, - 0, - 0, - 26420, - 0, - 0, - 26423, - 0, - 26424, - 26426, - 26432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26435, - 0, - 26436, - 0, - 0, - 0, - 0, - 0, - 26441, - 0, - 26444, - 0, - 0, - 0, - 26446, - 0, - 0, - 0, - 0, - 26447, - 0, - 0, - 0, - 0, - 26449, - 0, - 26450, - 26452, - 0, - 26453, - 26454, - 0, - 0, - 0, - 26455, - 0, - 0, - 0, - 26456, - 0, - 0, - 26458, - 0, - 0, - 26460, - 0, - 26463, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26464, - 26470, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26473, - 0, - 0, - 26474, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26475, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26477, - 0, - 26485, - 0, - 0, - 26486, - 0, - 26487, - 0, - 0, - 26488, - 26493, - 26494, - 0, - 0, - 26495, - 0, - 26497, - 26504, - 26506, - 0, - 0, - 0, - 0, - 0, - 26507, - 0, - 0, - 0, - 0, - 0, - 26509, - 0, - 0, - 26510, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26512, - 0, - 26513, - 26515, - 0, - 0, - 0, - 26518, - 0, - 0, - 0, - 26519, - 0, - 26524, - 26526, - 0, - 0, - 0, - 26527, - 0, - 26532, - 0, - 26533, - 26537, - 26558, - 0, - 0, - 0, - 26559, - 0, - 0, - 0, - 26571, - 0, - 0, - 26573, - 0, - 26588, - 0, - 26593, - 0, - 0, - 0, - 0, - 0, - 0, - 26603, - 0, - 26604, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26606, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26607, - 26609, - 26611, - 26614, - 0, - 0, - 0, - 26616, - 26620, - 0, - 26621, - 0, - 0, - 0, - 0, - 0, - 26627, - 0, - 26629, - 0, - 0, - 26630, - 0, - 0, - 26632, - 26643, - 0, - 0, - 0, - 26644, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26646, - 26647, - 0, - 0, - 0, - 26650, - 0, - 0, - 26656, - 0, - 0, - 0, - 0, - 26663, - 26670, - 26671, - 0, - 0, - 0, - 26685, - 26686, - 26687, - 0, - 26689, - 0, - 0, - 0, - 0, - 26744, - 0, - 26745, - 0, - 26747, - 26748, - 0, - 26749, - 26750, - 26751, - 0, - 0, - 0, - 0, - 26752, - 26755, - 0, - 0, - 0, - 26756, - 26769, - 0, - 0, - 0, - 26774, - 0, - 0, - 0, - 0, - 0, - 26775, - 0, - 26777, - 26778, - 0, - 26786, - 0, - 0, - 0, - 26787, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26788, - 0, - 0, - 26789, - 0, - 0, - 0, - 0, - 0, - 26791, - 0, - 26792, - 26793, - 0, - 0, - 0, - 26794, - 0, - 26797, - 26798, - 0, - 0, - 0, - 26800, - 0, - 0, - 26803, - 0, - 26804, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26805, - 0, - 0, - 26808, - 0, - 0, - 26809, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26812, - 0, - 26825, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26826, - 0, - 0, - 26827, - 26829, - 26834, - 0, - 0, - 0, - 0, - 26835, - 0, - 0, - 26849, - 0, - 26851, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26852, - 0, - 26853, - 26857, - 0, - 26858, - 0, - 26859, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26876, - 0, - 26878, - 26882, - 26883, - 0, - 0, - 0, - 0, - 26890, - 26894, - 0, - 0, - 0, - 0, - 26895, - 26896, - 0, - 0, - 0, - 0, - 0, - 26900, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26911, - 26913, - 26914, - 26915, - 26916, - 26919, - 0, - 0, - 0, - 26921, - 26922, - 0, - 0, - 26925, - 0, - 0, - 0, - 26928, - 0, - 0, - 26929, - 26930, - 0, - 0, - 0, - 26931, - 0, - 26932, - 0, - 0, - 0, - 0, - 0, - 26933, - 0, - 0, - 0, - 0, - 0, - 0, - 26937, - 0, - 0, - 26943, - 0, - 0, - 26944, - 0, - 0, - 0, - 26946, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26956, - 0, - 26958, - 0, - 0, - 26963, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26965, - 0, - 26969, - 26970, - 26972, - 0, - 0, - 0, - 0, - 0, - 26973, - 0, - 26974, - 0, - 26978, - 0, - 26980, - 0, - 0, - 0, - 0, - 0, - 0, - 26982, - 0, - 26986, - 26987, - 0, - 26990, - 0, - 0, - 0, - 0, - 27003, - 27006, - 0, - 0, - 27007, - 27010, - 27012, - 27013, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27014, - 27015, - 27018, - 0, - 27019, - 0, - 0, - 0, - 0, - 0, - 27025, - 0, - 0, - 0, - 27026, - 0, - 0, - 0, - 0, - 27029, - 27030, - 27031, - 27034, - 0, - 0, - 27036, - 27037, - 0, - 0, - 0, - 27038, - 27042, - 0, - 0, - 0, - 27044, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27045, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27046, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27047, - 27049, - 0, - 27050, - 0, - 0, - 0, - 27051, - 27052, - 0, - 27055, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27056, - 27058, - 27059, - 0, - 27061, - 0, - 27064, - 0, - 0, - 0, - 0, - 0, - 27069, - 0, - 0, - 27070, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27072, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27076, - 0, - 0, - 0, - 0, - 0, - 27078, - 0, - 27079, - 0, - 0, - 0, - 27081, - 0, - 0, - 0, - 0, - 0, - 0, - 27082, - 0, - 27083, - 27086, - 0, - 0, - 0, - 0, - 27087, - 0, - 0, - 0, - 0, - 0, - 27088, - 27090, - 0, - 27094, - 0, - 0, - 27095, - 0, - 27099, - 27102, - 0, - 0, - 0, - 27103, - 0, - 0, - 0, - 0, - 27105, - 0, - 0, - 0, - 27106, - 0, - 0, - 0, - 0, - 0, - 0, - 27107, - 0, - 0, - 0, - 0, - 27108, - 27117, - 0, - 0, - 0, - 0, - 27118, - 0, - 0, - 27124, - 0, - 27126, - 0, - 0, - 27130, - 27131, - 0, - 0, - 0, - 0, - 0, - 0, - 27147, - 0, - 0, - 0, - 0, - 27148, - 27149, - 0, - 0, - 0, - 0, - 27150, - 27151, - 0, - 27152, - 0, - 27159, - 0, - 0, - 0, - 27164, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27175, - 0, - 27189, - 0, - 0, - 27191, - 0, - 27193, - 0, - 27195, - 0, - 27198, - 0, - 0, - 0, - 0, - 0, - 27200, - 0, - 0, - 0, - 0, - 27202, - 0, - 0, - 0, - 0, - 27203, - 0, - 0, - 27204, - 0, - 0, - 27206, - 0, - 27207, - 0, - 0, - 0, - 0, - 27209, - 0, - 0, - 0, - 27213, - 0, - 0, - 27216, - 27219, - 27220, - 27222, - 27223, - 0, - 27224, - 0, - 27225, - 27226, - 0, - 0, - 27233, - 0, - 0, - 0, - 0, - 27235, - 0, - 27237, - 0, - 27238, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27239, - 0, - 27242, - 27243, - 0, - 27250, - 0, - 0, - 0, - 27251, - 0, - 27253, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27254, - 27255, - 27258, - 0, - 0, - 0, - 27259, - 0, - 0, - 0, - 0, - 0, - 0, - 27267, - 0, - 27276, - 27278, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27296, - 27297, - 27301, - 0, - 0, - 0, - 0, - 0, - 0, - 27302, - 0, - 0, - 0, - 0, - 0, - 0, - 27312, - 27313, - 0, - 0, - 0, - 0, - 0, - 27318, - 0, - 27320, - 0, - 27329, - 0, - 27330, - 27331, - 0, - 27332, - 0, - 0, - 0, - 0, - 27340, - 0, - 0, - 0, - 27348, - 0, - 0, - 0, - 0, - 0, - 0, - 27350, - 0, - 27351, - 0, - 0, - 0, - 0, - 27355, - 0, - 0, - 27358, - 27359, - 27361, - 0, - 0, - 0, - 27365, - 0, - 27367, - 0, - 27376, - 27378, - 0, - 0, - 27379, - 0, - 0, - 0, - 0, - 0, - 0, - 27396, - 0, - 27397, - 27404, - 0, - 0, - 0, - 0, - 0, - 27408, - 0, - 0, - 0, - 0, - 27453, - 0, - 0, - 0, - 27456, - 0, - 0, - 0, - 27458, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27459, - 0, - 0, - 0, - 27460, - 0, - 0, - 27461, - 0, - 27465, - 27467, - 0, - 0, - 27469, - 0, - 27470, - 0, - 27471, - 0, - 27477, - 27482, - 0, - 0, - 0, - 0, - 0, - 0, - 27484, - 0, - 0, - 0, - 0, - 0, - 0, - 27485, - 0, - 0, - 0, - 0, - 0, - 27493, - 0, - 27494, - 27502, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27511, - 27532, - 0, - 0, - 0, - 27533, - 27545, - 0, - 0, - 0, - 27546, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27547, - 0, - 0, - 27549, - 27550, - 0, - 27551, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27555, - 0, - 0, - 27571, - 0, - 27573, - 27574, - 27575, - 27577, - 0, - 27578, - 0, - 0, - 27579, - 27585, - 0, - 0, - 0, - 0, - 0, - 27586, - 0, - 0, - 27588, - 27589, - 0, - 0, - 0, - 0, - 27596, - 0, - 0, - 27600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27608, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27610, - 0, - 0, - 0, - 27618, - 0, - 0, - 27620, - 0, - 0, - 0, - 27631, - 0, - 0, - 27632, - 27634, - 0, - 27636, - 27638, - 0, - 0, - 0, - 27643, - 0, - 27644, - 27649, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27651, - 27660, - 0, - 27661, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27662, - 0, - 0, - 27664, - 0, - 27665, - 0, - 0, - 0, - 27669, - 0, - 27671, - 0, - 0, - 0, - 27673, - 27674, - 0, - 0, - 0, - 27682, - 0, - 0, - 0, - 27711, - 0, - 27712, - 27713, - 27719, - 27720, - 0, - 0, - 27728, - 0, - 27729, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27731, - 0, - 0, - 27732, - 0, - 27733, - 0, - 27738, - 0, - 0, - 0, - 27742, - 0, - 0, - 0, - 27743, - 27744, - 0, - 0, - 0, - 0, - 0, - 0, - 27745, - 27746, - 0, - 0, - 0, - 27747, - 27748, - 27751, - 27752, - 0, - 0, - 0, - 27768, - 27770, - 0, - 0, - 0, - 27774, - 27775, - 0, - 27776, - 27777, - 0, - 0, - 27781, - 0, - 27784, - 0, - 27786, - 0, - 0, - 27791, - 0, - 27792, - 27793, - 27804, - 0, - 27812, - 27813, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27814, - 0, - 27825, - 0, - 27827, - 0, - 0, - 0, - 0, - 27828, - 27861, - 27862, - 0, - 0, - 0, - 27864, - 0, - 0, - 0, - 27865, - 27884, - 0, - 27889, - 0, - 0, - 0, - 0, - 0, - 27890, - 0, - 27891, - 0, - 0, - 0, - 27892, - 0, - 0, - 0, - 0, - 0, - 27897, - 27898, - 0, - 0, - 27899, - 0, - 0, - 0, - 27901, - 27905, - 0, - 0, - 27920, - 0, - 0, - 27921, - 0, - 27922, - 0, - 0, - 0, - 27931, - 27934, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27941, - 0, - 27942, - 0, - 27945, - 0, - 27947, - 27954, - 0, - 0, - 0, - 0, - 27960, - 27963, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27964, - 27965, - 0, - 0, - 0, - 27967, - 0, - 27969, - 27975, - 0, - 27976, - 27977, - 0, - 27981, - 0, - 27983, - 28051, - 28052, - 0, - 0, - 0, - 0, - 0, - 28056, - 0, - 0, - 0, - 0, - 0, - 0, - 28058, - 28059, - 0, - 0, - 28061, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28063, - 0, - 0, - 0, - 0, - 0, - 0, - 28066, - 0, - 0, - 0, - 0, - 0, - 0, - 28069, - 28070, - 28072, - 0, - 28073, - 0, - 0, - 28074, - 0, - 0, - 0, - 0, - 28075, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28078, - 0, - 0, - 0, - 0, - 28085, - 0, - 0, - 0, - 0, - 28086, - 0, - 0, - 0, - 0, - 0, - 0, - 28088, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28090, - 0, - 28097, - 28114, - 28115, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28116, - 0, - 0, - 0, - 0, - 0, - 28118, - 0, - 28129, - 0, - 28131, - 0, - 0, - 28135, - 0, - 0, - 0, - 28140, - 28141, - 0, - 0, - 0, - 28146, - 0, - 0, - 0, - 0, - 28152, - 0, - 0, - 0, - 0, - 28155, - 28157, - 28161, - 0, - 0, - 0, - 0, - 28166, - 0, - 28167, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28172, - 0, - 0, - 0, - 0, - 0, - 0, - 28173, - 0, - 0, - 28175, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28178, - 28188, - 0, - 28190, - 0, - 0, - 0, - 0, - 0, - 28191, - 0, - 28193, - 28206, - 0, - 0, - 28207, - 28209, - 0, - 28211, - 0, - 28213, - 0, - 0, - 0, - 28215, - 28216, - 28217, - 0, - 28222, - 0, - 28223, - 28225, - 0, - 0, - 0, - 28226, - 0, - 28227, - 28229, - 28232, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28235, - 0, - 28241, - 0, - 0, - 28242, - 0, - 0, - 0, - 0, - 28243, - 0, - 0, - 0, - 28245, - 0, - 0, - 0, - 28248, - 28250, - 0, - 28251, - 28252, - 0, - 0, - 0, - 0, - 0, - 0, - 28253, - 0, - 0, - 28254, - 28255, - 0, - 0, - 28256, - 0, - 0, - 28258, - 0, - 0, - 0, - 0, - 0, - 28259, - 0, - 0, - 28260, - 0, - 0, - 28261, - 0, - 0, - 0, - 0, - 28262, - 28263, - 0, - 0, - 28264, - 0, - 0, - 0, - 28266, - 0, - 28268, - 28269, - 0, - 28270, - 28272, - 28274, - 0, - 28277, - 28278, - 0, - 0, - 0, - 28279, - 0, - 28280, - 28281, - 28283, - 0, - 28292, - 0, - 28294, - 0, - 28297, - 0, - 0, - 0, - 0, - 28299, - 0, - 0, - 0, - 0, - 0, - 28300, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28302, - 28303, - 0, - 0, - 0, - 0, - 28304, - 0, - 0, - 28305, - 0, - 28312, - 0, - 28313, - 28314, - 0, - 0, - 0, - 0, - 0, - 0, - 28315, - 0, - 0, - 0, - 28320, - 28321, - 0, - 0, - 28328, - 0, - 0, - 0, - 28329, - 28338, - 0, - 28339, - 0, - 0, - 28344, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28347, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28348, - 0, - 0, - 0, - 0, - 0, - 28411, - 0, - 28412, - 28413, - 0, - 28416, - 0, - 0, - 0, - 28420, - 0, - 0, - 0, - 0, - 0, - 28421, - 0, - 0, - 0, - 0, - 28423, - 0, - 0, - 0, - 28424, - 0, - 0, - 28428, - 0, - 0, - 0, - 0, - 0, - 28429, - 0, - 0, - 0, - 28431, - 28434, - 0, - 28458, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28464, - 0, - 0, - 0, - 0, - 28465, - 0, - 28467, - 0, - 0, - 0, - 0, - 0, - 0, - 28471, - 0, - 0, - 0, - 0, - 28474, - 0, - 28480, - 0, - 28481, - 0, - 0, - 28485, - 0, - 0, - 0, - 0, - 28486, - 28488, - 0, - 0, - 28489, - 0, - 0, - 0, - 0, - 28492, - 0, - 0, - 0, - 28495, - 0, - 28497, - 0, - 28499, - 0, - 0, - 0, - 0, - 28500, - 0, - 0, - 28502, - 28503, - 0, - 0, - 0, - 28508, - 0, - 0, - 0, - 28510, - 0, - 0, - 28512, - 28513, - 28514, - 28521, - 0, - 28526, - 0, - 28527, - 28528, - 0, - 0, - 0, - 0, - 28529, - 0, - 0, - 28532, - 0, - 0, - 28537, - 28538, - 0, - 0, - 0, - 28539, - 0, - 28548, - 0, - 28553, - 28554, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28560, - 28563, - 0, - 0, - 28564, - 0, - 0, - 0, - 0, - 28565, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28566, - 28568, - 0, - 0, - 0, - 0, - 0, - 0, - 28569, - 0, - 0, - 0, - 28570, - 0, - 28572, - 28573, - 0, - 0, - 0, - 0, - 28575, - 0, - 0, - 0, - 0, - 28576, - 28581, - 28588, - 0, - 0, - 28589, - 0, - 0, - 0, - 28590, - 28595, - 0, - 28598, - 0, - 0, - 28601, - 0, - 0, - 28605, - 0, - 0, - 0, - 0, - 28614, - 28615, - 28619, - 0, - 0, - 0, - 0, - 0, - 0, - 28620, - 0, - 28626, - 0, - 0, - 28628, - 0, - 28631, - 0, - 28632, - 0, - 0, - 0, - 0, - 0, - 0, - 28635, - 0, - 0, - 0, - 28637, - 28638, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28639, - 0, - 28643, - 0, - 0, - 28652, - 0, - 0, - 0, - 28662, - 0, - 28670, - 28671, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28672, - 28673, - 28675, - 28676, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28691, - 0, - 0, - 0, - 28695, - 0, - 0, - 0, - 28696, - 0, - 28697, - 28698, - 0, - 28705, - 0, - 28707, - 28708, - 28710, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28711, - 28728, - 0, - 0, - 0, - 28736, - 0, - 0, - 0, - 28737, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28738, - 0, - 28739, - 0, - 28741, - 0, - 0, - 28742, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28745, - 0, - 0, - 0, - 0, - 0, - 0, - 28749, - 28750, - 28752, - 28754, - 28756, - 0, - 28757, - 0, - 0, - 0, - 0, - 28759, - 28760, - 0, - 0, - 0, - 0, - 0, - 0, - 28762, - 0, - 0, - 0, - 28764, - 0, - 0, - 0, - 0, - 0, - 0, - 28766, - 0, - 28767, - 28768, - 0, - 0, - 0, - 0, - 28769, - 28770, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28772, - 0, - 28773, - 0, - 28782, - 0, - 0, - 0, - 0, - 0, - 0, - 28784, - 0, - 28785, - 0, - 28786, - 0, - 0, - 0, - 28787, - 0, - 0, - 0, - 28797, - 0, - 0, - 0, - 0, - 0, - 0, - 28799, - 0, - 0, - 28801, - 0, - 0, - 0, - 0, - 28802, - 0, - 28805, - 0, - 0, - 28806, - 0, - 0, - 28807, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28808, - 0, - 0, - 0, - 0, - 0, - 28810, - 28812, - 0, - 0, - 28816, - 28819, - 0, - 0, - 28821, - 0, - 28826, - 0, - 0, - 0, - 28842, - 28852, - 0, - 0, - 28853, - 0, - 28854, - 28855, - 0, - 0, - 0, - 28857, - 0, - 0, - 0, - 28858, - 0, - 28867, - 28868, - 28869, - 0, - 0, - 0, - 28874, - 28880, - 28882, - 28890, - 28892, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28895, - 0, - 0, - 0, - 28898, - 28899, - 0, - 0, - 0, - 28900, - 0, - 0, - 28904, - 0, - 28906, - 0, - 0, - 0, - 0, - 28907, - 0, - 0, - 0, - 0, - 0, - 0, - 28908, - 0, - 0, - 0, - 28910, - 0, - 28914, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28915, - 28916, - 28919, - 0, - 0, - 28920, - 0, - 28921, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28924, - 0, - 0, - 0, - 0, - 28926, - 28929, - 0, - 0, - 0, - 28930, - 0, - 28936, - 0, - 28939, - 0, - 0, - 0, - 0, - 28942, - 0, - 0, - 0, - 0, - 0, - 0, - 28956, - 0, - 0, - 0, - 28966, - 0, - 0, - 0, - 0, - 28967, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28968, - 0, - 28971, - 0, - 28975, - 28976, - 0, - 28982, - 28983, - 0, - 0, - 28984, - 28989, - 28996, - 28997, - 28998, - 0, - 0, - 0, - 0, - 0, - 0, - 28999, - 0, - 0, - 0, - 0, - 0, - 29000, - 0, - 29001, - 0, - 0, - 0, - 29009, - 0, - 0, - 29011, - 0, - 0, - 29021, - 0, - 0, - 0, - 0, - 29024, - 0, - 29025, - 0, - 0, - 0, - 0, - 0, - 29026, - 0, - 0, - 0, - 29036, - 0, - 0, - 0, - 29037, - 0, - 0, - 0, - 0, - 29038, - 0, - 29045, - 0, - 29047, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29051, - 0, - 0, - 0, - 29054, - 29056, - 29062, - 0, - 29070, - 29082, - 0, - 0, - 0, - 29083, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29084, - 0, - 0, - 0, - 0, - 29085, - 29088, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29090, - 29097, - 0, - 0, - 0, - 29103, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29105, - 0, - 0, - 0, - 0, - 0, - 29107, - 0, - 29109, - 0, - 0, - 0, - 29115, - 0, - 0, - 29120, - 0, - 0, - 29138, - 29140, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29152, - 0, - 29160, - 29174, - 0, - 29176, - 0, - 0, - 29180, - 0, - 29181, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29228, - 0, - 0, - 29229, - 0, - 0, - 29230, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29234, - 0, - 0, - 0, - 29241, - 0, - 29245, - 0, - 29248, - 0, - 29250, - 29256, - 29280, - 0, - 29282, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29285, - 0, - 0, - 29286, - 29291, - 29292, - 0, - 0, - 0, - 0, - 29294, - 0, - 29295, - 0, - 0, - 0, - 0, - 0, - 29296, - 29297, - 29298, - 29300, - 0, - 29302, - 0, - 0, - 29304, - 29307, - 0, - 29312, - 0, - 0, - 0, - 29322, - 0, - 0, - 29323, - 0, - 0, - 29324, - 29326, - 29328, - 0, - 29335, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29338, - 29339, - 0, - 0, - 0, - 0, - 0, - 29341, - 29343, - 0, - 0, - 0, - 0, - 29344, - 0, - 0, - 0, - 0, - 0, - 29345, - 0, - 0, - 0, - 0, - 29346, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29347, - 29348, - 29349, - 0, - 0, - 29354, - 0, - 0, - 29355, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29357, - 0, - 0, - 0, - 0, - 29364, - 0, - 29365, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29366, - 0, - 0, - 29368, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29378, - 0, - 29381, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29386, - 0, - 0, - 0, - 0, - 0, - 0, - 29389, - 0, - 0, - 0, - 29390, - 0, - 0, - 29391, - 29397, - 0, - 29398, - 29412, - 29414, - 29418, - 29419, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29420, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29423, - 0, - 0, - 0, - 29435, - 0, - 0, - 0, - 29437, - 0, - 0, - 29439, - 0, - 29441, - 0, - 0, - 0, - 0, - 29443, - 0, - 29446, - 29450, - 29452, - 0, - 0, - 0, - 0, - 0, - 29456, - 0, - 0, - 0, - 0, - 0, - 29461, - 0, - 0, - 0, - 29464, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29468, - 0, - 29473, - 0, - 0, - 0, - 29486, - 0, - 0, - 0, - 29490, - 0, - 0, - 0, - 29491, - 29492, - 0, - 0, - 29497, - 0, - 0, - 0, - 29498, - 0, - 29499, - 0, - 29502, - 29505, - 0, - 29509, - 0, - 0, - 0, - 29510, - 0, - 0, - 0, - 29512, - 0, - 0, - 0, - 29516, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29518, - 0, - 29519, - 0, - 0, - 0, - 0, - 0, - 29520, - 29521, - 29529, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29530, - 0, - 0, - 29531, - 29538, - 0, - 29540, - 0, - 0, - 0, - 29542, - 0, - 29543, - 29544, - 29547, - 0, - 0, - 29548, - 0, - 0, - 0, - 29549, - 0, - 0, - 0, - 29550, - 0, - 0, - 29552, - 0, - 0, - 0, - 0, - 29558, - 29561, - 0, - 29562, - 29564, - 0, - 0, - 29565, - 0, - 0, - 29566, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29578, - 29584, - 29586, - 29591, - 0, - 0, - 0, - 0, - 29593, - 29594, - 0, - 0, - 29597, - 0, - 0, - 29613, - 0, - 29614, - 0, - 29615, - 0, - 0, - 0, - 0, - 29616, - 29617, - 0, - 0, - 29625, - 0, - 0, - 0, - 29632, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29633, - 0, - 0, - 0, - 0, - 0, - 29634, - 29635, - 29637, - 0, - 29638, - 0, - 29641, - 29643, - 0, - 0, - 0, - 0, - 0, - 0, - 29644, - 0, - 29645, - 0, - 29649, - 0, - 0, - 0, - 29650, - 0, - 29653, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29656, - 29659, - 0, - 0, - 29660, - 0, - 0, - 0, - 29661, - 0, - 0, - 0, - 0, - 0, - 29664, - 0, - 0, - 0, - 29671, - 29673, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29675, - 0, - 29677, - 29679, - 0, - 0, - 29684, - 0, - 0, - 0, - 0, - 0, - 29685, - 0, - 0, - 0, - 29687, - 0, - 0, - 0, - 29688, - 0, - 29689, - 29690, - 29700, - 0, - 29701, - 0, - 0, - 0, - 29702, - 0, - 29706, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29720, - 0, - 29721, - 0, - 29727, - 0, - 29733, - 29734, - 0, - 29750, - 29761, - 0, - 29763, - 0, - 0, - 0, - 0, - 0, - 29764, - 0, - 0, - 29765, - 0, - 0, - 0, - 29771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29772, - 0, - 0, - 0, - 29773, - 29774, - 29775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29822, - 0, - 0, - 0, - 29824, - 0, - 29825, - 0, - 0, - 0, - 0, - 0, - 29827, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29829, - 0, - 29832, - 29834, - 0, - 0, - 29835, - 0, - 0, - 29837, - 29838, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29843, - 0, - 0, - 0, - 0, - 29844, - 29845, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29849, - 0, - 0, - 29869, - 29872, - 29890, - 29905, - 0, - 0, - 0, - 0, - 0, - 29907, - 29921, - 0, - 29922, - 0, - 0, - 29923, - 29926, - 29944, - 29946, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29947, - 29948, - 0, - 0, - 0, - 29951, - 0, - 0, - 0, - 0, - 0, - 29953, - 0, - 0, - 29956, - 0, - 29957, - 0, - 0, - 29962, - 0, - 0, - 0, - 0, - 29971, - 0, - 0, - 0, - 29972, - 0, - 0, - 0, - 0, - 0, - 29978, - 0, - 29979, - 29992, - 30007, - 30008, - 30010, - 0, - 0, - 0, - 30013, - 0, - 0, - 0, - 0, - 30014, - 30016, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30017, - 0, - 0, - 0, - 0, - 0, - 30023, - 30031, - 0, - 0, - 30033, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30034, - 0, - 30038, - 0, - 30039, - 0, - 30040, - 0, - 0, - 0, - 0, - 0, - 0, - 30067, - 30068, - 0, - 0, - 0, - 30069, - 0, - 30072, - 0, - 0, - 0, - 30073, - 0, - 0, - 0, - 0, - 30075, - 0, - 0, - 0, - 0, - 0, - 0, - 30079, - 0, - 0, - 30080, - 0, - 0, - 0, - 0, - 0, - 30082, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30084, - 30090, - 0, - 0, - 30091, - 0, - 0, - 0, - 0, - 30098, - 30118, - 0, - 30119, - 0, - 30121, - 30130, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30131, - 30132, - 30133, - 0, - 0, - 0, - 0, - 0, - 0, - 30135, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30136, - 0, - 0, - 30137, - 30138, - 0, - 0, - 0, - 30139, - 30146, - 0, - 0, - 0, - 0, - 0, - 30147, - 0, - 0, - 30148, - 30151, - 0, - 0, - 0, - 30168, - 0, - 30172, - 30173, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30180, - 30181, - 0, - 30192, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30194, - 30196, - 0, - 0, - 30199, - 0, - 0, - 30202, - 0, - 0, - 0, - 0, - 30203, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30213, - 0, - 0, - 0, - 30216, - 0, - 0, - 30217, - 0, - 0, - 0, - 30218, - 0, - 0, - 0, - 0, - 30219, - 0, - 30220, - 0, - 30222, - 30227, - 0, - 0, - 0, - 0, - 0, - 30231, - 0, - 0, - 30233, - 30235, - 0, - 0, - 0, - 0, - 30238, - 0, - 30240, - 30243, - 30245, - 0, - 30250, - 30252, - 0, - 0, - 0, - 30269, - 0, - 0, - 30271, - 30272, - 0, - 0, - 0, - 30278, - 30280, - 0, - 0, - 30282, - 0, - 30284, - 0, - 30294, - 0, - 0, - 0, - 0, - 30295, - 30296, - 0, - 0, - 0, - 0, - 0, - 30298, - 30299, - 30302, - 30304, - 30306, - 0, - 0, - 0, - 0, - 0, - 0, - 30316, - 30317, - 0, - 0, - 0, - 30318, - 0, - 0, - 0, - 30319, - 0, - 30320, - 30322, - 30326, - 0, - 0, - 0, - 0, - 0, - 30327, - 0, - 30332, - 30348, - 30349, - 0, - 0, - 30356, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30357, - 0, - 30358, - 0, - 30359, - 30360, - 0, - 0, - 30365, - 30366, - 30378, - 0, - 0, - 0, - 0, - 30379, - 0, - 0, - 30381, - 0, - 30385, - 0, - 30388, - 30397, - 0, - 0, - 0, - 30401, - 0, - 0, - 0, - 0, - 30403, - 0, - 0, - 0, - 0, - 0, - 30404, - 0, - 0, - 30405, - 0, - 30406, - 30408, - 0, - 30409, - 0, - 30410, - 0, - 0, - 0, - 30417, - 0, - 0, - 30418, - 30419, - 0, - 30420, - 0, - 30424, - 0, - 0, - 0, - 30427, - 30430, - 30432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30436, - 0, - 30437, - 30438, - 0, - 30441, - 30442, - 0, - 0, - 0, - 30445, - 0, - 0, - 0, - 0, - 30452, - 30456, - 30457, - 0, - 0, - 0, - 30458, - 0, - 30464, - 0, - 0, - 0, - 0, - 0, - 0, - 30467, - 0, - 30469, - 0, - 0, - 0, - 0, - 0, - 30477, - 0, - 0, - 30484, - 0, - 0, - 0, - 0, - 0, - 30485, - 0, - 0, - 0, - 0, - 0, - 30486, - 30487, - 30497, - 30498, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30505, - 0, - 30508, - 0, - 0, - 0, - 30509, - 30510, - 0, - 30514, - 30516, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30523, - 0, - 30524, - 0, - 30525, - 0, - 0, - 0, - 0, - 30537, - 0, - 0, - 30538, - 0, - 0, - 0, - 0, - 0, - 30553, - 0, - 0, - 30555, - 30556, - 30558, - 30559, - 30560, - 0, - 0, - 30561, - 0, - 30562, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30563, - 30570, - 30571, - 0, - 30586, - 30587, - 0, - 0, - 30590, - 0, - 0, - 30594, - 0, - 0, - 0, - 0, - 30611, - 30612, - 30623, - 30634, - 0, - 0, - 30636, - 30640, - 30655, - 30656, - 0, - 30657, - 0, - 0, - 30658, - 30669, - 0, - 30670, - 0, - 30676, - 30678, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30679, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30695, - 0, - 0, - 30698, - 0, - 0, - 0, - 0, - 30700, - 0, - 0, - 0, - 0, - 30701, - 0, - 30702, - 30703, - 0, - 0, - 0, - 0, - 30707, - 0, - 0, - 0, - 30709, - 0, - 0, - 30710, - 30719, - 30729, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30731, - 0, - 0, - 30733, - 0, - 0, - 0, - 30734, - 0, - 0, - 0, - 0, - 0, - 30736, - 30737, - 0, - 0, - 0, - 30740, - 0, - 0, - 0, - 30743, - 0, - 30746, - 0, - 30747, - 30748, - 0, - 0, - 30751, - 30752, - 30753, - 0, - 0, - 0, - 30754, - 0, - 0, - 30760, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30763, - 0, - 30764, - 0, - 0, - 30766, - 0, - 30769, - 30770, - 30771, - 30774, - 30777, - 0, - 0, - 30779, - 30780, - 30781, - 0, - 0, - 0, - 0, - 30790, - 0, - 0, - 0, - 30792, - 0, - 0, - 0, - 0, - 30810, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30812, - 30819, - 0, - 0, - 30823, - 30824, - 0, - 30825, - 0, - 30827, - 0, - 0, - 0, - 0, - 0, - 0, - 30828, - 0, - 0, - 30830, - 0, - 0, - 0, - 30834, - 0, - 30835, - 0, - 30837, - 30838, - 0, - 30845, - 0, - 0, - 0, - 0, - 0, - 30846, - 30847, - 0, - 0, - 30849, - 0, - 30851, - 0, - 0, - 0, - 0, - 0, - 30852, - 30858, - 0, - 0, - 30859, - 0, - 30865, - 0, - 0, - 30866, - 0, - 0, - 30868, - 0, - 0, - 30869, - 0, - 0, - 0, - 30881, - 30883, - 0, - 0, - 0, - 0, - 0, - 30889, - 0, - 30891, - 0, - 0, - 0, - 0, - 30894, - 0, - 30895, - 0, - 30897, - 0, - 30898, - 0, - 0, - 0, - 30904, - 30906, - 0, - 30909, - 0, - 0, - 0, - 0, - 0, - 0, - 30910, - 0, - 0, - 0, - 30915, - 30933, - 30942, - 0, - 0, - 0, - 0, - 30943, - 0, - 0, - 30945, - 0, - 0, - 0, - 0, - 0, - 0, - 30946, - 0, - 0, - 30947, - 0, - 0, - 30955, - 30956, - 0, - 0, - 30960, - 0, - 0, - 30961, - 30962, - 30966, - 0, - 0, - 30969, - 30974, - 0, - 0, - 0, - 30976, - 0, - 0, - 30977, - 0, - 30978, - 30982, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30994, - 30995, - 30998, - 0, - 31000, - 0, - 0, - 31001, - 0, - 0, - 31003, - 31005, - 0, - 0, - 31006, - 31011, - 0, - 0, - 31014, - 0, - 31016, - 0, - 0, - 0, - 0, - 31018, - 0, - 0, - 31020, - 31023, - 31024, - 31025, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31027, - 31028, - 31029, - 0, - 0, - 0, - 0, - 0, - 0, - 31032, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31036, - 31037, - 31038, - 0, - 0, - 0, - 31041, - 31043, - 31045, - 0, - 31047, - 0, - 0, - 0, - 31048, - 0, - 31049, - 0, - 0, - 0, - 31053, - 31054, - 31055, - 0, - 0, - 31063, - 0, - 0, - 0, - 0, - 0, - 31066, - 0, - 31068, - 31071, - 0, - 0, - 0, - 31072, - 31073, - 0, - 0, - 0, - 0, - 31075, - 0, - 0, - 31076, - 0, - 0, - 0, - 31077, - 31079, - 0, - 31080, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31087, - 0, - 31142, - 0, - 31144, - 0, - 0, - 31145, - 31146, - 31147, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31149, - 0, - 31151, - 31152, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31162, - 31171, - 31174, - 31175, - 0, - 0, - 0, - 31176, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31179, - 0, - 0, - 0, - 31186, - 0, - 0, - 0, - 31192, - 31195, - 0, - 0, - 31196, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31198, - 0, - 0, - 0, - 0, - 0, - 31199, - 0, - 0, - 0, - 31205, - 0, - 0, - 0, - 0, - 31211, - 31215, - 0, - 0, - 0, - 0, - 31231, - 0, - 31232, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31233, - 31236, - 31253, - 0, - 31254, - 0, - 0, - 0, - 0, - 0, - 0, - 31255, - 0, - 0, - 31257, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31258, - 31259, - 0, - 0, - 31260, - 0, - 31261, - 0, - 0, - 0, - 0, - 0, - 31262, - 31263, - 0, - 0, - 31264, - 0, - 31266, - 0, - 31267, - 0, - 0, - 0, - 0, - 0, - 31281, - 0, - 31282, - 0, - 31284, - 0, - 0, - 31285, - 31287, - 31288, - 0, - 0, - 31290, - 0, - 0, - 0, - 31292, - 31295, - 0, - 31299, - 0, - 31300, - 0, - 0, - 0, - 0, - 0, - 31302, - 0, - 0, - 0, - 0, - 31303, - 0, - 0, - 0, - 0, - 0, - 0, - 31304, - 0, - 0, - 0, - 0, - 0, - 31305, - 31308, - 31309, - 31315, - 0, - 31317, - 0, - 0, - 0, - 0, - 0, - 31323, - 0, - 31324, - 0, - 0, - 0, - 0, - 0, - 31325, - 31327, - 0, - 0, - 31331, - 0, - 0, - 0, - 0, - 0, - 31333, - 0, - 0, - 0, - 0, - 0, - 31336, - 0, - 0, - 31337, - 0, - 0, - 0, - 0, - 0, - 0, - 31338, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31339, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31342, - 0, - 0, - 0, - 0, - 31345, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31347, - 0, - 0, - 0, - 0, - 0, - 0, - 31348, - 0, - 0, - 31350, - 31351, - 0, - 31352, - 0, - 0, - 31354, - 0, - 0, - 0, - 0, - 31355, - 0, - 0, - 31356, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31363, - 0, - 31372, - 0, - 0, - 31373, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31376, - 0, - 31388, - 0, - 31389, - 0, - 31392, - 0, - 31401, - 0, - 31405, - 31407, - 31408, - 0, - 31409, - 0, - 0, - 0, - 0, - 0, - 0, - 31413, - 31415, - 0, - 0, - 0, - 31416, - 31418, - 0, - 0, - 0, - 0, - 0, - 0, - 31422, - 31423, - 0, - 0, - 31424, - 0, - 31425, - 31432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31434, - 0, - 0, - 0, - 0, - 0, - 0, - 31435, - 0, - 0, - 0, - 0, - 31438, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31442, - 0, - 31444, - 0, - 31448, - 0, - 0, - 31451, - 0, - 0, - 0, - 0, - 31452, - 0, - 31461, - 31465, - 0, - 0, - 31466, - 0, - 0, - 31467, - 0, - 0, - 31468, - 0, - 0, - 0, - 31469, - 31473, - 0, - 31476, - 0, - 0, - 0, - 0, - 31489, - 31490, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31492, - 31493, - 31494, - 0, - 0, - 0, - 0, - 31501, - 31504, - 31505, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31509, - 0, - 0, - 0, - 0, - 31510, - 0, - 0, - 31511, - 0, - 0, - 31513, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31514, - 0, - 31522, - 31536, - 31539, - 31540, - 0, - 31541, - 0, - 0, - 0, - 0, - 0, - 0, - 31546, - 31553, - 31559, - 0, - 0, - 0, - 31560, - 31561, - 31562, - 0, - 0, - 31564, - 31567, - 0, - 31569, - 0, - 0, - 0, - 31570, - 0, - 0, - 0, - 0, - 31571, - 0, - 0, - 0, - 0, - 0, - 0, - 31572, - 31574, - 31580, - 31581, - 0, - 0, - 31582, - 31584, - 31585, - 31586, - 31595, - 0, - 31596, - 0, - 0, - 0, - 0, - 31597, - 0, - 31599, - 0, - 31600, - 31601, - 0, - 0, - 31603, - 31604, - 0, - 0, - 31608, - 31610, - 0, - 0, - 0, - 31611, - 0, - 31615, - 0, - 0, - 0, - 0, - 31616, - 0, - 0, - 0, - 0, - 0, - 0, - 31617, - 0, - 0, - 0, - 0, - 0, - 31618, - 0, - 0, - 0, - 0, - 0, - 0, - 31621, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31622, - 31625, - 0, - 0, - 0, - 0, - 31627, - 0, - 31641, - 0, - 0, - 31642, - 0, - 0, - 31643, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31644, - 0, - 31646, - 0, - 0, - 0, - 0, - 31648, - 0, - 0, - 0, - 31652, - 0, - 0, - 0, - 31657, - 0, - 0, - 31676, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31689, - 31691, - 31692, - 0, - 31694, - 0, - 0, - 0, - 31696, - 0, - 31702, - 0, - 31703, - 0, -} - -var kStaticDictionaryWords = [31705]dictWord{ - dictWord{0, 0, 0}, - dictWord{8, 0, 1002}, - dictWord{136, 0, 1015}, - dictWord{4, 0, 683}, - dictWord{4, 10, 325}, - dictWord{138, 10, 125}, - dictWord{7, 11, 572}, - dictWord{ - 9, - 11, - 592, - }, - dictWord{11, 11, 680}, - dictWord{11, 11, 842}, - dictWord{11, 11, 924}, - dictWord{12, 11, 356}, - dictWord{12, 11, 550}, - dictWord{13, 11, 317}, - dictWord{13, 11, 370}, - dictWord{13, 11, 469}, - dictWord{13, 11, 471}, - dictWord{14, 11, 397}, - dictWord{18, 11, 69}, - dictWord{146, 11, 145}, - dictWord{ - 134, - 0, - 1265, - }, - dictWord{136, 11, 534}, - dictWord{134, 0, 1431}, - dictWord{11, 0, 138}, - dictWord{140, 0, 40}, - dictWord{4, 0, 155}, - dictWord{7, 0, 1689}, - dictWord{ - 4, - 10, - 718, - }, - dictWord{135, 10, 1216}, - dictWord{4, 0, 245}, - dictWord{5, 0, 151}, - dictWord{5, 0, 741}, - dictWord{6, 0, 1147}, - dictWord{7, 0, 498}, - dictWord{7, 0, 870}, - dictWord{7, 0, 1542}, - dictWord{12, 0, 213}, - dictWord{14, 0, 36}, - dictWord{14, 0, 391}, - dictWord{17, 0, 111}, - dictWord{18, 0, 6}, - dictWord{18, 0, 46}, - dictWord{ - 18, - 0, - 151, - }, - dictWord{19, 0, 36}, - dictWord{20, 0, 32}, - dictWord{20, 0, 56}, - dictWord{20, 0, 69}, - dictWord{20, 0, 102}, - dictWord{21, 0, 4}, - dictWord{22, 0, 8}, - dictWord{ - 22, - 0, - 10, - }, - dictWord{22, 0, 14}, - dictWord{150, 0, 31}, - dictWord{4, 0, 624}, - dictWord{135, 0, 1752}, - dictWord{5, 10, 124}, - dictWord{5, 10, 144}, - dictWord{6, 10, 548}, - dictWord{7, 10, 15}, - dictWord{7, 10, 153}, - dictWord{137, 10, 629}, - dictWord{6, 0, 503}, - dictWord{9, 0, 586}, - dictWord{13, 0, 468}, - dictWord{14, 0, 66}, - dictWord{ - 16, - 0, - 58, - }, - dictWord{7, 10, 1531}, - dictWord{8, 10, 416}, - dictWord{9, 10, 275}, - dictWord{10, 10, 100}, - dictWord{11, 10, 658}, - dictWord{11, 10, 979}, - dictWord{ - 12, - 10, - 86, - }, - dictWord{14, 10, 207}, - dictWord{15, 10, 20}, - dictWord{143, 10, 25}, - dictWord{5, 0, 603}, - dictWord{7, 0, 1212}, - dictWord{9, 0, 565}, - dictWord{ - 14, - 0, - 301, - }, - dictWord{5, 10, 915}, - dictWord{6, 10, 1783}, - dictWord{7, 10, 211}, - dictWord{7, 10, 1353}, - dictWord{9, 10, 83}, - dictWord{10, 10, 376}, - dictWord{ - 10, - 10, - 431, - }, - dictWord{11, 10, 543}, - dictWord{12, 10, 664}, - dictWord{13, 10, 280}, - dictWord{13, 10, 428}, - dictWord{14, 10, 128}, - dictWord{17, 10, 52}, - dictWord{ - 145, - 10, - 81, - }, - dictWord{4, 0, 492}, - dictWord{133, 0, 451}, - dictWord{135, 0, 835}, - dictWord{141, 0, 70}, - dictWord{132, 0, 539}, - dictWord{7, 11, 748}, - dictWord{ - 139, - 11, - 700, - }, - dictWord{7, 11, 1517}, - dictWord{11, 11, 597}, - dictWord{14, 11, 76}, - dictWord{14, 11, 335}, - dictWord{148, 11, 33}, - dictWord{6, 0, 113}, - dictWord{135, 0, 436}, - dictWord{4, 10, 338}, - dictWord{133, 10, 400}, - dictWord{136, 0, 718}, - dictWord{133, 11, 127}, - dictWord{133, 11, 418}, - dictWord{ - 6, - 0, - 1505, - }, - dictWord{7, 0, 520}, - dictWord{6, 11, 198}, - dictWord{11, 10, 892}, - dictWord{140, 11, 83}, - dictWord{4, 10, 221}, - dictWord{5, 10, 659}, - dictWord{ - 5, - 10, - 989, - }, - dictWord{7, 10, 697}, - dictWord{7, 10, 1211}, - dictWord{138, 10, 284}, - dictWord{135, 0, 1070}, - dictWord{5, 11, 276}, - dictWord{6, 11, 55}, - dictWord{ - 135, - 11, - 1369, - }, - dictWord{134, 0, 1515}, - dictWord{6, 11, 1752}, - dictWord{136, 11, 726}, - dictWord{138, 10, 507}, - dictWord{15, 0, 78}, - dictWord{4, 10, 188}, - dictWord{135, 10, 805}, - dictWord{5, 10, 884}, - dictWord{139, 10, 991}, - dictWord{133, 11, 764}, - dictWord{134, 10, 1653}, - dictWord{6, 11, 309}, - dictWord{ - 7, - 11, - 331, - }, - dictWord{138, 11, 550}, - dictWord{135, 11, 1861}, - dictWord{132, 11, 348}, - dictWord{135, 11, 986}, - dictWord{135, 11, 1573}, - dictWord{ - 12, - 0, - 610, - }, - dictWord{13, 0, 431}, - dictWord{144, 0, 59}, - dictWord{9, 11, 799}, - dictWord{140, 10, 166}, - dictWord{134, 0, 1530}, - dictWord{132, 0, 750}, - dictWord{132, 0, 307}, - dictWord{133, 0, 964}, - dictWord{6, 11, 194}, - dictWord{7, 11, 133}, - dictWord{10, 11, 493}, - dictWord{10, 11, 570}, - dictWord{139, 11, 664}, - dictWord{5, 11, 24}, - dictWord{5, 11, 569}, - dictWord{6, 11, 3}, - dictWord{6, 11, 119}, - dictWord{6, 11, 143}, - dictWord{6, 11, 440}, - dictWord{7, 11, 295}, - dictWord{ - 7, - 11, - 599, - }, - dictWord{7, 11, 1686}, - dictWord{7, 11, 1854}, - dictWord{8, 11, 424}, - dictWord{9, 11, 43}, - dictWord{9, 11, 584}, - dictWord{9, 11, 760}, - dictWord{ - 10, - 11, - 148, - }, - dictWord{10, 11, 328}, - dictWord{11, 11, 159}, - dictWord{11, 11, 253}, - dictWord{11, 11, 506}, - dictWord{12, 11, 487}, - dictWord{12, 11, 531}, - dictWord{144, 11, 33}, - dictWord{136, 10, 760}, - dictWord{5, 11, 14}, - dictWord{5, 11, 892}, - dictWord{6, 11, 283}, - dictWord{7, 11, 234}, - dictWord{136, 11, 537}, - dictWord{135, 11, 1251}, - dictWord{4, 11, 126}, - dictWord{8, 11, 635}, - dictWord{147, 11, 34}, - dictWord{4, 11, 316}, - dictWord{135, 11, 1561}, - dictWord{ - 6, - 0, - 999, - }, - dictWord{6, 0, 1310}, - dictWord{137, 11, 861}, - dictWord{4, 11, 64}, - dictWord{5, 11, 352}, - dictWord{5, 11, 720}, - dictWord{6, 11, 368}, - dictWord{ - 139, - 11, - 359, - }, - dictWord{4, 0, 75}, - dictWord{5, 0, 180}, - dictWord{6, 0, 500}, - dictWord{7, 0, 58}, - dictWord{7, 0, 710}, - dictWord{10, 0, 645}, - dictWord{136, 10, 770}, - dictWord{133, 0, 649}, - dictWord{6, 0, 276}, - dictWord{7, 0, 282}, - dictWord{7, 0, 879}, - dictWord{7, 0, 924}, - dictWord{8, 0, 459}, - dictWord{9, 0, 599}, - dictWord{9, 0, 754}, - dictWord{11, 0, 574}, - dictWord{12, 0, 128}, - dictWord{12, 0, 494}, - dictWord{13, 0, 52}, - dictWord{13, 0, 301}, - dictWord{15, 0, 30}, - dictWord{143, 0, 132}, - dictWord{132, 0, 200}, - dictWord{4, 10, 89}, - dictWord{5, 10, 489}, - dictWord{6, 10, 315}, - dictWord{7, 10, 553}, - dictWord{7, 10, 1745}, - dictWord{138, 10, 243}, - dictWord{135, 11, 1050}, - dictWord{7, 0, 1621}, - dictWord{6, 10, 1658}, - dictWord{9, 10, 3}, - dictWord{10, 10, 154}, - dictWord{11, 10, 641}, - dictWord{13, 10, 85}, - dictWord{13, 10, 201}, - dictWord{141, 10, 346}, - dictWord{6, 11, 175}, - dictWord{137, 11, 289}, - dictWord{5, 11, 432}, - dictWord{133, 11, 913}, - dictWord{ - 6, - 0, - 225, - }, - dictWord{137, 0, 211}, - dictWord{7, 0, 718}, - dictWord{8, 0, 687}, - dictWord{139, 0, 374}, - dictWord{4, 10, 166}, - dictWord{133, 10, 505}, - dictWord{ - 9, - 0, - 110, - }, - dictWord{134, 10, 1670}, - dictWord{8, 0, 58}, - dictWord{9, 0, 724}, - dictWord{11, 0, 809}, - dictWord{13, 0, 113}, - dictWord{145, 0, 72}, - dictWord{6, 0, 345}, - dictWord{7, 0, 1247}, - dictWord{144, 11, 82}, - dictWord{5, 11, 931}, - dictWord{134, 11, 1698}, - dictWord{8, 0, 767}, - dictWord{8, 0, 803}, - dictWord{9, 0, 301}, - dictWord{137, 0, 903}, - dictWord{139, 0, 203}, - dictWord{134, 0, 1154}, - dictWord{7, 0, 1949}, - dictWord{136, 0, 674}, - dictWord{134, 0, 259}, - dictWord{ - 135, - 0, - 1275, - }, - dictWord{5, 11, 774}, - dictWord{6, 11, 1637}, - dictWord{6, 11, 1686}, - dictWord{134, 11, 1751}, - dictWord{134, 0, 1231}, - dictWord{7, 10, 445}, - dictWord{8, 10, 307}, - dictWord{8, 10, 704}, - dictWord{10, 10, 41}, - dictWord{10, 10, 439}, - dictWord{11, 10, 237}, - dictWord{11, 10, 622}, - dictWord{140, 10, 201}, - dictWord{136, 0, 254}, - dictWord{6, 11, 260}, - dictWord{135, 11, 1484}, - dictWord{139, 0, 277}, - dictWord{135, 10, 1977}, - dictWord{4, 10, 189}, - dictWord{ - 5, - 10, - 713, - }, - dictWord{6, 11, 573}, - dictWord{136, 10, 57}, - dictWord{138, 10, 371}, - dictWord{132, 10, 552}, - dictWord{134, 11, 344}, - dictWord{133, 0, 248}, - dictWord{9, 0, 800}, - dictWord{10, 0, 693}, - dictWord{11, 0, 482}, - dictWord{11, 0, 734}, - dictWord{11, 0, 789}, - dictWord{134, 11, 240}, - dictWord{4, 0, 116}, - dictWord{ - 5, - 0, - 95, - }, - dictWord{5, 0, 445}, - dictWord{7, 0, 1688}, - dictWord{8, 0, 29}, - dictWord{9, 0, 272}, - dictWord{11, 0, 509}, - dictWord{11, 0, 915}, - dictWord{4, 11, 292}, - dictWord{4, 11, 736}, - dictWord{5, 11, 871}, - dictWord{6, 11, 171}, - dictWord{6, 11, 1689}, - dictWord{7, 11, 1324}, - dictWord{7, 11, 1944}, - dictWord{9, 11, 415}, - dictWord{9, 11, 580}, - dictWord{14, 11, 230}, - dictWord{146, 11, 68}, - dictWord{7, 0, 490}, - dictWord{13, 0, 100}, - dictWord{143, 0, 75}, - dictWord{135, 0, 1641}, - dictWord{133, 0, 543}, - dictWord{7, 11, 209}, - dictWord{8, 11, 661}, - dictWord{10, 11, 42}, - dictWord{11, 11, 58}, - dictWord{12, 11, 58}, - dictWord{12, 11, 118}, - dictWord{141, 11, 32}, - dictWord{5, 0, 181}, - dictWord{8, 0, 41}, - dictWord{6, 11, 63}, - dictWord{135, 11, 920}, - dictWord{133, 0, 657}, - dictWord{133, 11, 793}, - dictWord{138, 0, 709}, - dictWord{7, 0, 25}, - dictWord{8, 0, 202}, - dictWord{138, 0, 536}, - dictWord{5, 11, 665}, - dictWord{135, 10, 1788}, - dictWord{145, 10, 49}, - dictWord{9, 0, 423}, - dictWord{140, 0, 89}, - dictWord{5, 11, 67}, - dictWord{6, 11, 62}, - dictWord{6, 11, 374}, - dictWord{135, 11, 1391}, - dictWord{8, 0, 113}, - dictWord{ - 9, - 0, - 877, - }, - dictWord{10, 0, 554}, - dictWord{11, 0, 83}, - dictWord{12, 0, 136}, - dictWord{19, 0, 109}, - dictWord{9, 11, 790}, - dictWord{140, 11, 47}, - dictWord{ - 138, - 10, - 661, - }, - dictWord{4, 0, 963}, - dictWord{10, 0, 927}, - dictWord{14, 0, 442}, - dictWord{135, 10, 1945}, - dictWord{133, 0, 976}, - dictWord{132, 0, 206}, - dictWord{ - 4, - 11, - 391, - }, - dictWord{135, 11, 1169}, - dictWord{134, 0, 2002}, - dictWord{6, 0, 696}, - dictWord{134, 0, 1008}, - dictWord{134, 0, 1170}, - dictWord{132, 11, 271}, - dictWord{7, 0, 13}, - dictWord{8, 0, 226}, - dictWord{10, 0, 537}, - dictWord{11, 0, 570}, - dictWord{11, 0, 605}, - dictWord{11, 0, 799}, - dictWord{11, 0, 804}, - dictWord{ - 12, - 0, - 85, - }, - dictWord{12, 0, 516}, - dictWord{12, 0, 623}, - dictWord{13, 0, 112}, - dictWord{13, 0, 361}, - dictWord{14, 0, 77}, - dictWord{14, 0, 78}, - dictWord{17, 0, 28}, - dictWord{19, 0, 110}, - dictWord{140, 11, 314}, - dictWord{132, 0, 769}, - dictWord{134, 0, 1544}, - dictWord{4, 0, 551}, - dictWord{137, 0, 678}, - dictWord{5, 10, 84}, - dictWord{134, 10, 163}, - dictWord{9, 0, 57}, - dictWord{9, 0, 459}, - dictWord{10, 0, 425}, - dictWord{11, 0, 119}, - dictWord{12, 0, 184}, - dictWord{12, 0, 371}, - dictWord{ - 13, - 0, - 358, - }, - dictWord{145, 0, 51}, - dictWord{5, 0, 188}, - dictWord{5, 0, 814}, - dictWord{8, 0, 10}, - dictWord{9, 0, 421}, - dictWord{9, 0, 729}, - dictWord{10, 0, 609}, - dictWord{11, 0, 689}, - dictWord{4, 11, 253}, - dictWord{5, 10, 410}, - dictWord{5, 11, 544}, - dictWord{7, 11, 300}, - dictWord{137, 11, 340}, - dictWord{134, 0, 624}, - dictWord{138, 11, 321}, - dictWord{135, 0, 1941}, - dictWord{18, 0, 130}, - dictWord{5, 10, 322}, - dictWord{8, 10, 186}, - dictWord{9, 10, 262}, - dictWord{10, 10, 187}, - dictWord{142, 10, 208}, - dictWord{5, 11, 53}, - dictWord{5, 11, 541}, - dictWord{6, 11, 94}, - dictWord{6, 11, 499}, - dictWord{7, 11, 230}, - dictWord{139, 11, 321}, - dictWord{133, 10, 227}, - dictWord{4, 0, 378}, - dictWord{4, 11, 920}, - dictWord{5, 11, 25}, - dictWord{5, 11, 790}, - dictWord{6, 11, 457}, - dictWord{135, 11, 853}, - dictWord{137, 0, 269}, - dictWord{132, 0, 528}, - dictWord{134, 0, 1146}, - dictWord{7, 10, 1395}, - dictWord{8, 10, 486}, - dictWord{9, 10, 236}, - dictWord{9, 10, 878}, - dictWord{10, 10, 218}, - dictWord{11, 10, 95}, - dictWord{19, 10, 17}, - dictWord{147, 10, 31}, - dictWord{7, 10, 2043}, - dictWord{8, 10, 672}, - dictWord{ - 141, - 10, - 448, - }, - dictWord{134, 0, 1105}, - dictWord{134, 0, 1616}, - dictWord{134, 11, 1765}, - dictWord{140, 11, 163}, - dictWord{5, 10, 412}, - dictWord{133, 11, 822}, - dictWord{132, 11, 634}, - dictWord{6, 0, 656}, - dictWord{134, 11, 1730}, - dictWord{134, 0, 1940}, - dictWord{5, 0, 104}, - dictWord{6, 0, 173}, - dictWord{ - 135, - 0, - 1631, - }, - dictWord{136, 10, 562}, - dictWord{6, 11, 36}, - dictWord{7, 11, 658}, - dictWord{8, 11, 454}, - dictWord{147, 11, 86}, - dictWord{5, 0, 457}, - dictWord{ - 134, - 10, - 1771, - }, - dictWord{7, 0, 810}, - dictWord{8, 0, 138}, - dictWord{8, 0, 342}, - dictWord{9, 0, 84}, - dictWord{10, 0, 193}, - dictWord{11, 0, 883}, - dictWord{140, 0, 359}, - dictWord{9, 0, 620}, - dictWord{135, 10, 1190}, - dictWord{137, 10, 132}, - dictWord{7, 11, 975}, - dictWord{137, 11, 789}, - dictWord{6, 0, 95}, - dictWord{6, 0, 1934}, - dictWord{136, 0, 967}, - dictWord{141, 11, 335}, - dictWord{6, 0, 406}, - dictWord{10, 0, 409}, - dictWord{10, 0, 447}, - dictWord{11, 0, 44}, - dictWord{140, 0, 100}, - dictWord{4, 10, 317}, - dictWord{135, 10, 1279}, - dictWord{132, 0, 477}, - dictWord{134, 0, 1268}, - dictWord{6, 0, 1941}, - dictWord{8, 0, 944}, - dictWord{5, 10, 63}, - dictWord{133, 10, 509}, - dictWord{132, 0, 629}, - dictWord{132, 11, 104}, - dictWord{4, 0, 246}, - dictWord{133, 0, 375}, - dictWord{6, 0, 1636}, - dictWord{ - 132, - 10, - 288, - }, - dictWord{135, 11, 1614}, - dictWord{9, 0, 49}, - dictWord{10, 0, 774}, - dictWord{8, 10, 89}, - dictWord{8, 10, 620}, - dictWord{11, 10, 628}, - dictWord{ - 12, - 10, - 322, - }, - dictWord{143, 10, 124}, - dictWord{4, 0, 282}, - dictWord{7, 0, 1034}, - dictWord{11, 0, 398}, - dictWord{11, 0, 634}, - dictWord{12, 0, 1}, - dictWord{12, 0, 79}, - dictWord{12, 0, 544}, - dictWord{14, 0, 237}, - dictWord{17, 0, 10}, - dictWord{146, 0, 20}, - dictWord{132, 0, 824}, - dictWord{7, 11, 45}, - dictWord{9, 11, 542}, - dictWord{ - 9, - 11, - 566, - }, - dictWord{138, 11, 728}, - dictWord{5, 0, 118}, - dictWord{5, 0, 499}, - dictWord{6, 0, 476}, - dictWord{6, 0, 665}, - dictWord{6, 0, 1176}, - dictWord{ - 6, - 0, - 1196, - }, - dictWord{7, 0, 600}, - dictWord{7, 0, 888}, - dictWord{135, 0, 1096}, - dictWord{7, 0, 296}, - dictWord{7, 0, 596}, - dictWord{8, 0, 560}, - dictWord{8, 0, 586}, - dictWord{9, 0, 612}, - dictWord{11, 0, 304}, - dictWord{12, 0, 46}, - dictWord{13, 0, 89}, - dictWord{14, 0, 112}, - dictWord{145, 0, 122}, - dictWord{5, 0, 894}, - dictWord{ - 6, - 0, - 1772, - }, - dictWord{9, 0, 1009}, - dictWord{138, 10, 120}, - dictWord{5, 11, 533}, - dictWord{7, 11, 755}, - dictWord{138, 11, 780}, - dictWord{151, 10, 1}, - dictWord{ - 6, - 0, - 1474, - }, - dictWord{7, 11, 87}, - dictWord{142, 11, 288}, - dictWord{139, 0, 366}, - dictWord{137, 10, 461}, - dictWord{7, 11, 988}, - dictWord{7, 11, 1939}, - dictWord{ - 9, - 11, - 64, - }, - dictWord{9, 11, 502}, - dictWord{12, 11, 7}, - dictWord{12, 11, 34}, - dictWord{13, 11, 12}, - dictWord{13, 11, 234}, - dictWord{147, 11, 77}, - dictWord{ - 7, - 0, - 1599, - }, - dictWord{7, 0, 1723}, - dictWord{8, 0, 79}, - dictWord{8, 0, 106}, - dictWord{8, 0, 190}, - dictWord{8, 0, 302}, - dictWord{8, 0, 383}, - dictWord{8, 0, 713}, - dictWord{ - 9, - 0, - 119, - }, - dictWord{9, 0, 233}, - dictWord{9, 0, 419}, - dictWord{9, 0, 471}, - dictWord{10, 0, 181}, - dictWord{10, 0, 406}, - dictWord{11, 0, 57}, - dictWord{11, 0, 85}, - dictWord{11, 0, 120}, - dictWord{11, 0, 177}, - dictWord{11, 0, 296}, - dictWord{11, 0, 382}, - dictWord{11, 0, 454}, - dictWord{11, 0, 758}, - dictWord{11, 0, 999}, - dictWord{ - 12, - 0, - 27, - }, - dictWord{12, 0, 98}, - dictWord{12, 0, 131}, - dictWord{12, 0, 245}, - dictWord{12, 0, 312}, - dictWord{12, 0, 446}, - dictWord{12, 0, 454}, - dictWord{13, 0, 25}, - dictWord{13, 0, 98}, - dictWord{13, 0, 426}, - dictWord{13, 0, 508}, - dictWord{14, 0, 70}, - dictWord{14, 0, 163}, - dictWord{14, 0, 272}, - dictWord{14, 0, 277}, - dictWord{ - 14, - 0, - 370, - }, - dictWord{15, 0, 95}, - dictWord{15, 0, 138}, - dictWord{15, 0, 167}, - dictWord{17, 0, 38}, - dictWord{148, 0, 96}, - dictWord{135, 10, 1346}, - dictWord{ - 10, - 0, - 200, - }, - dictWord{19, 0, 2}, - dictWord{151, 0, 22}, - dictWord{135, 11, 141}, - dictWord{134, 10, 85}, - dictWord{134, 0, 1759}, - dictWord{138, 0, 372}, - dictWord{ - 145, - 0, - 16, - }, - dictWord{8, 0, 943}, - dictWord{132, 11, 619}, - dictWord{139, 11, 88}, - dictWord{5, 11, 246}, - dictWord{8, 11, 189}, - dictWord{9, 11, 355}, - dictWord{ - 9, - 11, - 512, - }, - dictWord{10, 11, 124}, - dictWord{10, 11, 453}, - dictWord{11, 11, 143}, - dictWord{11, 11, 416}, - dictWord{11, 11, 859}, - dictWord{141, 11, 341}, - dictWord{ - 5, - 0, - 258, - }, - dictWord{134, 0, 719}, - dictWord{6, 0, 1798}, - dictWord{6, 0, 1839}, - dictWord{8, 0, 900}, - dictWord{10, 0, 874}, - dictWord{10, 0, 886}, - dictWord{ - 12, - 0, - 698, - }, - dictWord{12, 0, 732}, - dictWord{12, 0, 770}, - dictWord{16, 0, 106}, - dictWord{18, 0, 163}, - dictWord{18, 0, 170}, - dictWord{18, 0, 171}, - dictWord{152, 0, 20}, - dictWord{9, 0, 707}, - dictWord{11, 0, 326}, - dictWord{11, 0, 339}, - dictWord{12, 0, 423}, - dictWord{12, 0, 502}, - dictWord{20, 0, 62}, - dictWord{9, 11, 707}, - dictWord{ - 11, - 11, - 326, - }, - dictWord{11, 11, 339}, - dictWord{12, 11, 423}, - dictWord{12, 11, 502}, - dictWord{148, 11, 62}, - dictWord{5, 0, 30}, - dictWord{7, 0, 495}, - dictWord{ - 8, - 0, - 134, - }, - dictWord{9, 0, 788}, - dictWord{140, 0, 438}, - dictWord{133, 11, 678}, - dictWord{5, 10, 279}, - dictWord{6, 10, 235}, - dictWord{7, 10, 468}, - dictWord{ - 8, - 10, - 446, - }, - dictWord{9, 10, 637}, - dictWord{10, 10, 717}, - dictWord{11, 10, 738}, - dictWord{140, 10, 514}, - dictWord{5, 11, 35}, - dictWord{6, 11, 287}, - dictWord{ - 7, - 11, - 862, - }, - dictWord{7, 11, 1886}, - dictWord{138, 11, 179}, - dictWord{7, 0, 1948}, - dictWord{7, 0, 2004}, - dictWord{132, 11, 517}, - dictWord{5, 10, 17}, - dictWord{ - 6, - 10, - 371, - }, - dictWord{137, 10, 528}, - dictWord{4, 0, 115}, - dictWord{5, 0, 669}, - dictWord{6, 0, 407}, - dictWord{8, 0, 311}, - dictWord{11, 0, 10}, - dictWord{141, 0, 5}, - dictWord{137, 0, 381}, - dictWord{5, 0, 50}, - dictWord{6, 0, 439}, - dictWord{7, 0, 780}, - dictWord{135, 0, 1040}, - dictWord{136, 11, 667}, - dictWord{11, 11, 403}, - dictWord{146, 11, 83}, - dictWord{5, 0, 1}, - dictWord{6, 0, 81}, - dictWord{138, 0, 520}, - dictWord{134, 0, 738}, - dictWord{5, 0, 482}, - dictWord{8, 0, 98}, - dictWord{9, 0, 172}, - dictWord{10, 0, 360}, - dictWord{10, 0, 700}, - dictWord{10, 0, 822}, - dictWord{11, 0, 302}, - dictWord{11, 0, 778}, - dictWord{12, 0, 50}, - dictWord{12, 0, 127}, - dictWord{ - 12, - 0, - 396, - }, - dictWord{13, 0, 62}, - dictWord{13, 0, 328}, - dictWord{14, 0, 122}, - dictWord{147, 0, 72}, - dictWord{9, 11, 157}, - dictWord{10, 11, 131}, - dictWord{ - 140, - 11, - 72, - }, - dictWord{135, 11, 714}, - dictWord{135, 11, 539}, - dictWord{5, 0, 2}, - dictWord{6, 0, 512}, - dictWord{7, 0, 797}, - dictWord{7, 0, 1494}, - dictWord{8, 0, 253}, - dictWord{8, 0, 589}, - dictWord{9, 0, 77}, - dictWord{10, 0, 1}, - dictWord{10, 0, 129}, - dictWord{10, 0, 225}, - dictWord{11, 0, 118}, - dictWord{11, 0, 226}, - dictWord{ - 11, - 0, - 251, - }, - dictWord{11, 0, 430}, - dictWord{11, 0, 701}, - dictWord{11, 0, 974}, - dictWord{11, 0, 982}, - dictWord{12, 0, 64}, - dictWord{12, 0, 260}, - dictWord{12, 0, 488}, - dictWord{140, 0, 690}, - dictWord{5, 11, 394}, - dictWord{7, 11, 367}, - dictWord{7, 11, 487}, - dictWord{7, 11, 857}, - dictWord{7, 11, 1713}, - dictWord{8, 11, 246}, - dictWord{9, 11, 537}, - dictWord{10, 11, 165}, - dictWord{12, 11, 219}, - dictWord{140, 11, 561}, - dictWord{136, 0, 557}, - dictWord{5, 10, 779}, - dictWord{5, 10, 807}, - dictWord{6, 10, 1655}, - dictWord{134, 10, 1676}, - dictWord{4, 10, 196}, - dictWord{5, 10, 558}, - dictWord{133, 10, 949}, - dictWord{11, 11, 827}, - dictWord{ - 12, - 11, - 56, - }, - dictWord{14, 11, 34}, - dictWord{143, 11, 148}, - dictWord{137, 0, 347}, - dictWord{133, 0, 572}, - dictWord{134, 0, 832}, - dictWord{4, 0, 12}, - dictWord{ - 7, - 0, - 504, - }, - dictWord{7, 0, 522}, - dictWord{7, 0, 809}, - dictWord{8, 0, 797}, - dictWord{141, 0, 88}, - dictWord{4, 10, 752}, - dictWord{133, 11, 449}, - dictWord{7, 11, 86}, - dictWord{8, 11, 103}, - dictWord{145, 11, 69}, - dictWord{7, 11, 2028}, - dictWord{138, 11, 641}, - dictWord{5, 0, 528}, - dictWord{6, 11, 1}, - dictWord{142, 11, 2}, - dictWord{134, 0, 861}, - dictWord{10, 0, 294}, - dictWord{4, 10, 227}, - dictWord{5, 10, 159}, - dictWord{5, 10, 409}, - dictWord{7, 10, 80}, - dictWord{10, 10, 479}, - dictWord{ - 12, - 10, - 418, - }, - dictWord{14, 10, 50}, - dictWord{14, 10, 249}, - dictWord{142, 10, 295}, - dictWord{7, 10, 1470}, - dictWord{8, 10, 66}, - dictWord{8, 10, 137}, - dictWord{ - 8, - 10, - 761, - }, - dictWord{9, 10, 638}, - dictWord{11, 10, 80}, - dictWord{11, 10, 212}, - dictWord{11, 10, 368}, - dictWord{11, 10, 418}, - dictWord{12, 10, 8}, - dictWord{ - 13, - 10, - 15, - }, - dictWord{16, 10, 61}, - dictWord{17, 10, 59}, - dictWord{19, 10, 28}, - dictWord{148, 10, 84}, - dictWord{20, 0, 109}, - dictWord{135, 11, 1148}, - dictWord{ - 6, - 11, - 277, - }, - dictWord{7, 11, 1274}, - dictWord{7, 11, 1386}, - dictWord{7, 11, 1392}, - dictWord{12, 11, 129}, - dictWord{146, 11, 87}, - dictWord{6, 11, 187}, - dictWord{7, 11, 39}, - dictWord{7, 11, 1203}, - dictWord{8, 11, 380}, - dictWord{8, 11, 542}, - dictWord{14, 11, 117}, - dictWord{149, 11, 28}, - dictWord{134, 0, 1187}, - dictWord{5, 0, 266}, - dictWord{9, 0, 290}, - dictWord{9, 0, 364}, - dictWord{10, 0, 293}, - dictWord{11, 0, 606}, - dictWord{142, 0, 45}, - dictWord{6, 11, 297}, - dictWord{ - 7, - 11, - 793, - }, - dictWord{139, 11, 938}, - dictWord{4, 0, 50}, - dictWord{6, 0, 594}, - dictWord{9, 0, 121}, - dictWord{10, 0, 49}, - dictWord{10, 0, 412}, - dictWord{139, 0, 834}, - dictWord{136, 0, 748}, - dictWord{7, 11, 464}, - dictWord{8, 11, 438}, - dictWord{11, 11, 105}, - dictWord{11, 11, 363}, - dictWord{12, 11, 231}, - dictWord{ - 14, - 11, - 386, - }, - dictWord{15, 11, 102}, - dictWord{148, 11, 75}, - dictWord{132, 0, 466}, - dictWord{13, 0, 399}, - dictWord{14, 0, 337}, - dictWord{6, 10, 38}, - dictWord{ - 7, - 10, - 1220, - }, - dictWord{8, 10, 185}, - dictWord{8, 10, 256}, - dictWord{9, 10, 22}, - dictWord{9, 10, 331}, - dictWord{10, 10, 738}, - dictWord{11, 10, 205}, - dictWord{ - 11, - 10, - 540, - }, - dictWord{11, 10, 746}, - dictWord{13, 10, 465}, - dictWord{142, 10, 194}, - dictWord{9, 0, 378}, - dictWord{141, 0, 162}, - dictWord{137, 0, 519}, - dictWord{ - 4, - 10, - 159, - }, - dictWord{6, 10, 115}, - dictWord{7, 10, 252}, - dictWord{7, 10, 257}, - dictWord{7, 10, 1928}, - dictWord{8, 10, 69}, - dictWord{9, 10, 384}, - dictWord{ - 10, - 10, - 91, - }, - dictWord{10, 10, 615}, - dictWord{12, 10, 375}, - dictWord{14, 10, 235}, - dictWord{18, 10, 117}, - dictWord{147, 10, 123}, - dictWord{5, 11, 604}, - dictWord{ - 5, - 10, - 911, - }, - dictWord{136, 10, 278}, - dictWord{132, 0, 667}, - dictWord{8, 0, 351}, - dictWord{9, 0, 322}, - dictWord{4, 10, 151}, - dictWord{135, 10, 1567}, - dictWord{134, 0, 902}, - dictWord{133, 10, 990}, - dictWord{12, 0, 180}, - dictWord{5, 10, 194}, - dictWord{7, 10, 1662}, - dictWord{137, 10, 90}, - dictWord{4, 0, 869}, - dictWord{134, 0, 1996}, - dictWord{134, 0, 813}, - dictWord{133, 10, 425}, - dictWord{137, 11, 761}, - dictWord{132, 0, 260}, - dictWord{133, 10, 971}, - dictWord{ - 5, - 11, - 20, - }, - dictWord{6, 11, 298}, - dictWord{7, 11, 659}, - dictWord{7, 11, 1366}, - dictWord{137, 11, 219}, - dictWord{4, 0, 39}, - dictWord{5, 0, 36}, - dictWord{ - 7, - 0, - 1843, - }, - dictWord{8, 0, 407}, - dictWord{11, 0, 144}, - dictWord{140, 0, 523}, - dictWord{4, 0, 510}, - dictWord{10, 0, 587}, - dictWord{139, 10, 752}, - dictWord{7, 0, 29}, - dictWord{7, 0, 66}, - dictWord{7, 0, 1980}, - dictWord{10, 0, 487}, - dictWord{138, 0, 809}, - dictWord{13, 0, 260}, - dictWord{14, 0, 82}, - dictWord{18, 0, 63}, - dictWord{ - 137, - 10, - 662, - }, - dictWord{5, 10, 72}, - dictWord{6, 10, 264}, - dictWord{7, 10, 21}, - dictWord{7, 10, 46}, - dictWord{7, 10, 2013}, - dictWord{8, 10, 215}, - dictWord{ - 8, - 10, - 513, - }, - dictWord{10, 10, 266}, - dictWord{139, 10, 22}, - dictWord{134, 0, 570}, - dictWord{6, 0, 565}, - dictWord{7, 0, 1667}, - dictWord{4, 11, 439}, - dictWord{ - 10, - 10, - 95, - }, - dictWord{11, 10, 603}, - dictWord{12, 11, 242}, - dictWord{13, 10, 443}, - dictWord{14, 10, 160}, - dictWord{143, 10, 4}, - dictWord{134, 0, 1464}, - dictWord{ - 134, - 10, - 431, - }, - dictWord{9, 0, 372}, - dictWord{15, 0, 2}, - dictWord{19, 0, 10}, - dictWord{19, 0, 18}, - dictWord{5, 10, 874}, - dictWord{6, 10, 1677}, - dictWord{143, 10, 0}, - dictWord{132, 0, 787}, - dictWord{6, 0, 380}, - dictWord{12, 0, 399}, - dictWord{21, 0, 19}, - dictWord{7, 10, 939}, - dictWord{7, 10, 1172}, - dictWord{7, 10, 1671}, - dictWord{9, 10, 540}, - dictWord{10, 10, 696}, - dictWord{11, 10, 265}, - dictWord{11, 10, 732}, - dictWord{11, 10, 928}, - dictWord{11, 10, 937}, - dictWord{ - 141, - 10, - 438, - }, - dictWord{137, 0, 200}, - dictWord{132, 11, 233}, - dictWord{132, 0, 516}, - dictWord{134, 11, 577}, - dictWord{132, 0, 844}, - dictWord{11, 0, 887}, - dictWord{14, 0, 365}, - dictWord{142, 0, 375}, - dictWord{132, 11, 482}, - dictWord{8, 0, 821}, - dictWord{140, 0, 44}, - dictWord{7, 0, 1655}, - dictWord{136, 0, 305}, - dictWord{5, 10, 682}, - dictWord{135, 10, 1887}, - dictWord{135, 11, 346}, - dictWord{132, 10, 696}, - dictWord{4, 0, 10}, - dictWord{7, 0, 917}, - dictWord{139, 0, 786}, - dictWord{5, 11, 795}, - dictWord{6, 11, 1741}, - dictWord{8, 11, 417}, - dictWord{137, 11, 782}, - dictWord{4, 0, 1016}, - dictWord{134, 0, 2031}, - dictWord{5, 0, 684}, - dictWord{4, 10, 726}, - dictWord{133, 10, 630}, - dictWord{6, 0, 1021}, - dictWord{134, 0, 1480}, - dictWord{8, 10, 802}, - dictWord{136, 10, 838}, - dictWord{ - 134, - 0, - 27, - }, - dictWord{134, 0, 395}, - dictWord{135, 11, 622}, - dictWord{7, 11, 625}, - dictWord{135, 11, 1750}, - dictWord{4, 11, 203}, - dictWord{135, 11, 1936}, - dictWord{6, 10, 118}, - dictWord{7, 10, 215}, - dictWord{7, 10, 1521}, - dictWord{140, 10, 11}, - dictWord{132, 0, 813}, - dictWord{136, 0, 511}, - dictWord{7, 10, 615}, - dictWord{138, 10, 251}, - dictWord{135, 10, 1044}, - dictWord{145, 0, 56}, - dictWord{133, 10, 225}, - dictWord{6, 0, 342}, - dictWord{6, 0, 496}, - dictWord{8, 0, 275}, - dictWord{137, 0, 206}, - dictWord{4, 0, 909}, - dictWord{133, 0, 940}, - dictWord{132, 0, 891}, - dictWord{7, 11, 311}, - dictWord{9, 11, 308}, - dictWord{ - 140, - 11, - 255, - }, - dictWord{4, 10, 370}, - dictWord{5, 10, 756}, - dictWord{135, 10, 1326}, - dictWord{4, 0, 687}, - dictWord{134, 0, 1596}, - dictWord{134, 0, 1342}, - dictWord{ - 6, - 10, - 1662, - }, - dictWord{7, 10, 48}, - dictWord{8, 10, 771}, - dictWord{10, 10, 116}, - dictWord{13, 10, 104}, - dictWord{14, 10, 105}, - dictWord{14, 10, 184}, - dictWord{15, 10, 168}, - dictWord{19, 10, 92}, - dictWord{148, 10, 68}, - dictWord{138, 10, 209}, - dictWord{4, 11, 400}, - dictWord{5, 11, 267}, - dictWord{135, 11, 232}, - dictWord{151, 11, 12}, - dictWord{6, 0, 41}, - dictWord{141, 0, 160}, - dictWord{141, 11, 314}, - dictWord{134, 0, 1718}, - dictWord{136, 0, 778}, - dictWord{ - 142, - 11, - 261, - }, - dictWord{134, 0, 1610}, - dictWord{133, 0, 115}, - dictWord{132, 0, 294}, - dictWord{14, 0, 314}, - dictWord{132, 10, 120}, - dictWord{132, 0, 983}, - dictWord{5, 0, 193}, - dictWord{140, 0, 178}, - dictWord{138, 10, 429}, - dictWord{5, 10, 820}, - dictWord{135, 10, 931}, - dictWord{6, 0, 994}, - dictWord{6, 0, 1051}, - dictWord{6, 0, 1439}, - dictWord{7, 0, 174}, - dictWord{133, 11, 732}, - dictWord{4, 11, 100}, - dictWord{7, 11, 679}, - dictWord{8, 11, 313}, - dictWord{138, 10, 199}, - dictWord{6, 10, 151}, - dictWord{6, 10, 1675}, - dictWord{7, 10, 383}, - dictWord{151, 10, 10}, - dictWord{6, 0, 1796}, - dictWord{8, 0, 848}, - dictWord{8, 0, 867}, - dictWord{ - 8, - 0, - 907, - }, - dictWord{10, 0, 855}, - dictWord{140, 0, 703}, - dictWord{140, 0, 221}, - dictWord{4, 0, 122}, - dictWord{5, 0, 796}, - dictWord{5, 0, 952}, - dictWord{6, 0, 1660}, - dictWord{6, 0, 1671}, - dictWord{8, 0, 567}, - dictWord{9, 0, 687}, - dictWord{9, 0, 742}, - dictWord{10, 0, 686}, - dictWord{11, 0, 682}, - dictWord{11, 0, 909}, - dictWord{ - 140, - 0, - 281, - }, - dictWord{5, 11, 362}, - dictWord{5, 11, 443}, - dictWord{6, 11, 318}, - dictWord{7, 11, 1019}, - dictWord{139, 11, 623}, - dictWord{5, 11, 463}, - dictWord{136, 11, 296}, - dictWord{11, 0, 583}, - dictWord{13, 0, 262}, - dictWord{6, 10, 1624}, - dictWord{12, 10, 422}, - dictWord{142, 10, 360}, - dictWord{5, 0, 179}, - dictWord{7, 0, 1095}, - dictWord{135, 0, 1213}, - dictWord{4, 10, 43}, - dictWord{4, 11, 454}, - dictWord{5, 10, 344}, - dictWord{133, 10, 357}, - dictWord{4, 0, 66}, - dictWord{7, 0, 722}, - dictWord{135, 0, 904}, - dictWord{134, 0, 773}, - dictWord{7, 0, 352}, - dictWord{133, 10, 888}, - dictWord{5, 11, 48}, - dictWord{5, 11, 404}, - dictWord{ - 6, - 11, - 557, - }, - dictWord{7, 11, 458}, - dictWord{8, 11, 597}, - dictWord{10, 11, 455}, - dictWord{10, 11, 606}, - dictWord{11, 11, 49}, - dictWord{11, 11, 548}, - dictWord{ - 12, - 11, - 476, - }, - dictWord{13, 11, 18}, - dictWord{141, 11, 450}, - dictWord{134, 11, 418}, - dictWord{132, 10, 711}, - dictWord{5, 11, 442}, - dictWord{ - 135, - 11, - 1984, - }, - dictWord{141, 0, 35}, - dictWord{137, 0, 152}, - dictWord{134, 0, 1197}, - dictWord{135, 11, 1093}, - dictWord{137, 11, 203}, - dictWord{137, 10, 440}, - dictWord{10, 0, 592}, - dictWord{10, 0, 753}, - dictWord{12, 0, 317}, - dictWord{12, 0, 355}, - dictWord{12, 0, 465}, - dictWord{12, 0, 469}, - dictWord{12, 0, 560}, - dictWord{12, 0, 578}, - dictWord{141, 0, 243}, - dictWord{133, 0, 564}, - dictWord{134, 0, 797}, - dictWord{5, 10, 958}, - dictWord{133, 10, 987}, - dictWord{5, 11, 55}, - dictWord{7, 11, 376}, - dictWord{140, 11, 161}, - dictWord{133, 11, 450}, - dictWord{134, 0, 556}, - dictWord{134, 0, 819}, - dictWord{11, 10, 276}, - dictWord{ - 142, - 10, - 293, - }, - dictWord{7, 0, 544}, - dictWord{138, 0, 61}, - dictWord{8, 0, 719}, - dictWord{4, 10, 65}, - dictWord{5, 10, 479}, - dictWord{5, 10, 1004}, - dictWord{7, 10, 1913}, - dictWord{8, 10, 317}, - dictWord{9, 10, 302}, - dictWord{10, 10, 612}, - dictWord{141, 10, 22}, - dictWord{4, 0, 5}, - dictWord{5, 0, 498}, - dictWord{8, 0, 637}, - dictWord{ - 9, - 0, - 521, - }, - dictWord{4, 11, 213}, - dictWord{4, 10, 261}, - dictWord{7, 11, 223}, - dictWord{7, 10, 510}, - dictWord{136, 11, 80}, - dictWord{5, 0, 927}, - dictWord{7, 0, 101}, - dictWord{4, 10, 291}, - dictWord{7, 11, 381}, - dictWord{7, 11, 806}, - dictWord{7, 11, 820}, - dictWord{8, 11, 354}, - dictWord{8, 11, 437}, - dictWord{8, 11, 787}, - dictWord{9, 10, 515}, - dictWord{9, 11, 657}, - dictWord{10, 11, 58}, - dictWord{10, 11, 339}, - dictWord{10, 11, 749}, - dictWord{11, 11, 914}, - dictWord{12, 10, 152}, - dictWord{12, 11, 162}, - dictWord{12, 10, 443}, - dictWord{13, 11, 75}, - dictWord{13, 10, 392}, - dictWord{14, 11, 106}, - dictWord{14, 11, 198}, - dictWord{ - 14, - 11, - 320, - }, - dictWord{14, 10, 357}, - dictWord{14, 11, 413}, - dictWord{146, 11, 43}, - dictWord{6, 0, 1153}, - dictWord{7, 0, 1441}, - dictWord{136, 11, 747}, - dictWord{ - 4, - 0, - 893, - }, - dictWord{5, 0, 780}, - dictWord{133, 0, 893}, - dictWord{138, 11, 654}, - dictWord{133, 11, 692}, - dictWord{133, 0, 238}, - dictWord{134, 11, 191}, - dictWord{4, 10, 130}, - dictWord{135, 10, 843}, - dictWord{6, 0, 1296}, - dictWord{5, 10, 42}, - dictWord{5, 10, 879}, - dictWord{7, 10, 245}, - dictWord{7, 10, 324}, - dictWord{ - 7, - 10, - 1532, - }, - dictWord{11, 10, 463}, - dictWord{11, 10, 472}, - dictWord{13, 10, 363}, - dictWord{144, 10, 52}, - dictWord{134, 0, 1729}, - dictWord{6, 0, 1999}, - dictWord{136, 0, 969}, - dictWord{4, 10, 134}, - dictWord{133, 10, 372}, - dictWord{4, 0, 60}, - dictWord{7, 0, 941}, - dictWord{7, 0, 1800}, - dictWord{8, 0, 314}, - dictWord{ - 9, - 0, - 700, - }, - dictWord{139, 0, 487}, - dictWord{134, 0, 1144}, - dictWord{6, 11, 162}, - dictWord{7, 11, 1960}, - dictWord{136, 11, 831}, - dictWord{132, 11, 706}, - dictWord{135, 0, 1147}, - dictWord{138, 11, 426}, - dictWord{138, 11, 89}, - dictWord{7, 0, 1853}, - dictWord{138, 0, 437}, - dictWord{136, 0, 419}, - dictWord{ - 135, - 10, - 1634, - }, - dictWord{133, 0, 828}, - dictWord{5, 0, 806}, - dictWord{7, 0, 176}, - dictWord{7, 0, 178}, - dictWord{7, 0, 1240}, - dictWord{7, 0, 1976}, - dictWord{ - 132, - 10, - 644, - }, - dictWord{135, 11, 1877}, - dictWord{5, 11, 420}, - dictWord{135, 11, 1449}, - dictWord{4, 0, 51}, - dictWord{5, 0, 39}, - dictWord{6, 0, 4}, - dictWord{7, 0, 591}, - dictWord{7, 0, 849}, - dictWord{7, 0, 951}, - dictWord{7, 0, 1613}, - dictWord{7, 0, 1760}, - dictWord{7, 0, 1988}, - dictWord{9, 0, 434}, - dictWord{10, 0, 754}, - dictWord{ - 11, - 0, - 25, - }, - dictWord{139, 0, 37}, - dictWord{10, 11, 57}, - dictWord{138, 11, 277}, - dictWord{135, 10, 540}, - dictWord{132, 11, 204}, - dictWord{135, 0, 159}, - dictWord{139, 11, 231}, - dictWord{133, 0, 902}, - dictWord{7, 0, 928}, - dictWord{7, 11, 366}, - dictWord{9, 11, 287}, - dictWord{12, 11, 199}, - dictWord{12, 11, 556}, - dictWord{140, 11, 577}, - dictWord{6, 10, 623}, - dictWord{136, 10, 789}, - dictWord{4, 10, 908}, - dictWord{5, 10, 359}, - dictWord{5, 10, 508}, - dictWord{6, 10, 1723}, - dictWord{7, 10, 343}, - dictWord{7, 10, 1996}, - dictWord{135, 10, 2026}, - dictWord{134, 0, 270}, - dictWord{4, 10, 341}, - dictWord{135, 10, 480}, - dictWord{ - 5, - 11, - 356, - }, - dictWord{135, 11, 224}, - dictWord{11, 11, 588}, - dictWord{11, 11, 864}, - dictWord{11, 11, 968}, - dictWord{143, 11, 160}, - dictWord{132, 0, 556}, - dictWord{137, 0, 801}, - dictWord{132, 0, 416}, - dictWord{142, 0, 372}, - dictWord{5, 0, 152}, - dictWord{5, 0, 197}, - dictWord{7, 0, 340}, - dictWord{7, 0, 867}, - dictWord{ - 10, - 0, - 548, - }, - dictWord{10, 0, 581}, - dictWord{11, 0, 6}, - dictWord{12, 0, 3}, - dictWord{12, 0, 19}, - dictWord{14, 0, 110}, - dictWord{142, 0, 289}, - dictWord{139, 0, 369}, - dictWord{7, 11, 630}, - dictWord{9, 11, 567}, - dictWord{11, 11, 150}, - dictWord{11, 11, 444}, - dictWord{141, 11, 119}, - dictWord{134, 11, 539}, - dictWord{ - 7, - 10, - 1995, - }, - dictWord{8, 10, 299}, - dictWord{11, 10, 890}, - dictWord{140, 10, 674}, - dictWord{7, 0, 34}, - dictWord{7, 0, 190}, - dictWord{8, 0, 28}, - dictWord{8, 0, 141}, - dictWord{8, 0, 444}, - dictWord{8, 0, 811}, - dictWord{9, 0, 468}, - dictWord{11, 0, 334}, - dictWord{12, 0, 24}, - dictWord{12, 0, 386}, - dictWord{140, 0, 576}, - dictWord{ - 133, - 0, - 757, - }, - dictWord{7, 0, 1553}, - dictWord{136, 0, 898}, - dictWord{133, 0, 721}, - dictWord{136, 0, 1012}, - dictWord{4, 0, 789}, - dictWord{5, 0, 647}, - dictWord{ - 135, - 0, - 1102, - }, - dictWord{132, 0, 898}, - dictWord{10, 0, 183}, - dictWord{4, 10, 238}, - dictWord{5, 10, 503}, - dictWord{6, 10, 179}, - dictWord{7, 10, 2003}, - dictWord{ - 8, - 10, - 381, - }, - dictWord{8, 10, 473}, - dictWord{9, 10, 149}, - dictWord{10, 10, 788}, - dictWord{15, 10, 45}, - dictWord{15, 10, 86}, - dictWord{20, 10, 110}, - dictWord{ - 150, - 10, - 57, - }, - dictWord{9, 0, 136}, - dictWord{19, 0, 107}, - dictWord{4, 10, 121}, - dictWord{5, 10, 156}, - dictWord{5, 10, 349}, - dictWord{10, 10, 605}, - dictWord{ - 142, - 10, - 342, - }, - dictWord{4, 11, 235}, - dictWord{135, 11, 255}, - dictWord{4, 11, 194}, - dictWord{5, 11, 584}, - dictWord{6, 11, 384}, - dictWord{7, 11, 583}, - dictWord{ - 10, - 11, - 761, - }, - dictWord{11, 11, 760}, - dictWord{139, 11, 851}, - dictWord{6, 10, 80}, - dictWord{6, 10, 1694}, - dictWord{7, 10, 173}, - dictWord{7, 10, 1974}, - dictWord{ - 9, - 10, - 547, - }, - dictWord{10, 10, 730}, - dictWord{14, 10, 18}, - dictWord{150, 10, 39}, - dictWord{4, 10, 923}, - dictWord{134, 10, 1711}, - dictWord{5, 0, 277}, - dictWord{141, 0, 247}, - dictWord{132, 0, 435}, - dictWord{133, 11, 562}, - dictWord{134, 0, 1311}, - dictWord{5, 11, 191}, - dictWord{137, 11, 271}, - dictWord{ - 132, - 10, - 595, - }, - dictWord{7, 11, 1537}, - dictWord{14, 11, 96}, - dictWord{143, 11, 73}, - dictWord{5, 0, 437}, - dictWord{7, 0, 502}, - dictWord{7, 0, 519}, - dictWord{7, 0, 1122}, - dictWord{7, 0, 1751}, - dictWord{14, 0, 211}, - dictWord{6, 10, 459}, - dictWord{7, 10, 1753}, - dictWord{7, 10, 1805}, - dictWord{8, 10, 658}, - dictWord{9, 10, 1}, - dictWord{11, 10, 959}, - dictWord{141, 10, 446}, - dictWord{6, 0, 814}, - dictWord{4, 11, 470}, - dictWord{5, 11, 473}, - dictWord{6, 11, 153}, - dictWord{7, 11, 1503}, - dictWord{7, 11, 1923}, - dictWord{10, 11, 701}, - dictWord{11, 11, 132}, - dictWord{11, 11, 168}, - dictWord{11, 11, 227}, - dictWord{11, 11, 320}, - dictWord{ - 11, - 11, - 436, - }, - dictWord{11, 11, 525}, - dictWord{11, 11, 855}, - dictWord{12, 11, 41}, - dictWord{12, 11, 286}, - dictWord{13, 11, 103}, - dictWord{13, 11, 284}, - dictWord{ - 14, - 11, - 255, - }, - dictWord{14, 11, 262}, - dictWord{15, 11, 117}, - dictWord{143, 11, 127}, - dictWord{5, 0, 265}, - dictWord{6, 0, 212}, - dictWord{135, 0, 28}, - dictWord{ - 138, - 0, - 750, - }, - dictWord{133, 11, 327}, - dictWord{6, 11, 552}, - dictWord{7, 11, 1754}, - dictWord{137, 11, 604}, - dictWord{134, 0, 2012}, - dictWord{132, 0, 702}, - dictWord{5, 11, 80}, - dictWord{6, 11, 405}, - dictWord{7, 11, 403}, - dictWord{7, 11, 1502}, - dictWord{7, 11, 1626}, - dictWord{8, 11, 456}, - dictWord{9, 11, 487}, - dictWord{9, 11, 853}, - dictWord{9, 11, 889}, - dictWord{10, 11, 309}, - dictWord{11, 11, 721}, - dictWord{11, 11, 994}, - dictWord{12, 11, 430}, - dictWord{ - 141, - 11, - 165, - }, - dictWord{5, 0, 808}, - dictWord{135, 0, 2045}, - dictWord{5, 0, 166}, - dictWord{8, 0, 739}, - dictWord{140, 0, 511}, - dictWord{134, 10, 490}, - dictWord{ - 4, - 11, - 453, - }, - dictWord{5, 11, 887}, - dictWord{6, 11, 535}, - dictWord{8, 11, 6}, - dictWord{136, 11, 543}, - dictWord{4, 0, 119}, - dictWord{5, 0, 170}, - dictWord{5, 0, 447}, - dictWord{7, 0, 1708}, - dictWord{7, 0, 1889}, - dictWord{9, 0, 357}, - dictWord{9, 0, 719}, - dictWord{12, 0, 486}, - dictWord{140, 0, 596}, - dictWord{137, 0, 500}, - dictWord{ - 7, - 10, - 250, - }, - dictWord{136, 10, 507}, - dictWord{132, 10, 158}, - dictWord{6, 0, 809}, - dictWord{134, 0, 1500}, - dictWord{9, 0, 327}, - dictWord{11, 0, 350}, - dictWord{11, 0, 831}, - dictWord{13, 0, 352}, - dictWord{4, 10, 140}, - dictWord{7, 10, 362}, - dictWord{8, 10, 209}, - dictWord{9, 10, 10}, - dictWord{9, 10, 503}, - dictWord{ - 9, - 10, - 614, - }, - dictWord{10, 10, 689}, - dictWord{11, 10, 327}, - dictWord{11, 10, 725}, - dictWord{12, 10, 252}, - dictWord{12, 10, 583}, - dictWord{13, 10, 192}, - dictWord{14, 10, 269}, - dictWord{14, 10, 356}, - dictWord{148, 10, 50}, - dictWord{135, 11, 741}, - dictWord{4, 0, 450}, - dictWord{7, 0, 1158}, - dictWord{19, 10, 1}, - dictWord{19, 10, 26}, - dictWord{150, 10, 9}, - dictWord{6, 0, 597}, - dictWord{135, 0, 1318}, - dictWord{134, 0, 1602}, - dictWord{6, 10, 228}, - dictWord{7, 10, 1341}, - dictWord{9, 10, 408}, - dictWord{138, 10, 343}, - dictWord{7, 0, 1375}, - dictWord{7, 0, 1466}, - dictWord{138, 0, 331}, - dictWord{132, 0, 754}, - dictWord{ - 132, - 10, - 557, - }, - dictWord{5, 11, 101}, - dictWord{6, 11, 88}, - dictWord{6, 11, 543}, - dictWord{7, 11, 1677}, - dictWord{9, 11, 100}, - dictWord{10, 11, 677}, - dictWord{ - 14, - 11, - 169, - }, - dictWord{14, 11, 302}, - dictWord{14, 11, 313}, - dictWord{15, 11, 48}, - dictWord{143, 11, 84}, - dictWord{134, 0, 1368}, - dictWord{4, 11, 310}, - dictWord{ - 9, - 11, - 795, - }, - dictWord{10, 11, 733}, - dictWord{11, 11, 451}, - dictWord{12, 11, 249}, - dictWord{14, 11, 115}, - dictWord{14, 11, 286}, - dictWord{143, 11, 100}, - dictWord{132, 10, 548}, - dictWord{10, 0, 557}, - dictWord{7, 10, 197}, - dictWord{8, 10, 142}, - dictWord{8, 10, 325}, - dictWord{9, 10, 150}, - dictWord{9, 10, 596}, - dictWord{10, 10, 353}, - dictWord{11, 10, 74}, - dictWord{11, 10, 315}, - dictWord{12, 10, 662}, - dictWord{12, 10, 681}, - dictWord{14, 10, 423}, - dictWord{ - 143, - 10, - 141, - }, - dictWord{133, 11, 587}, - dictWord{5, 0, 850}, - dictWord{136, 0, 799}, - dictWord{10, 0, 908}, - dictWord{12, 0, 701}, - dictWord{12, 0, 757}, - dictWord{ - 142, - 0, - 466, - }, - dictWord{4, 0, 62}, - dictWord{5, 0, 275}, - dictWord{18, 0, 19}, - dictWord{6, 10, 399}, - dictWord{6, 10, 579}, - dictWord{7, 10, 692}, - dictWord{7, 10, 846}, - dictWord{ - 7, - 10, - 1015, - }, - dictWord{7, 10, 1799}, - dictWord{8, 10, 403}, - dictWord{9, 10, 394}, - dictWord{10, 10, 133}, - dictWord{12, 10, 4}, - dictWord{12, 10, 297}, - dictWord{12, 10, 452}, - dictWord{16, 10, 81}, - dictWord{18, 10, 25}, - dictWord{21, 10, 14}, - dictWord{22, 10, 12}, - dictWord{151, 10, 18}, - dictWord{12, 0, 459}, - dictWord{ - 7, - 10, - 1546, - }, - dictWord{11, 10, 299}, - dictWord{142, 10, 407}, - dictWord{132, 10, 177}, - dictWord{132, 11, 498}, - dictWord{7, 11, 217}, - dictWord{ - 8, - 11, - 140, - }, - dictWord{138, 11, 610}, - dictWord{5, 10, 411}, - dictWord{135, 10, 653}, - dictWord{134, 0, 1802}, - dictWord{7, 10, 439}, - dictWord{10, 10, 727}, - dictWord{11, 10, 260}, - dictWord{139, 10, 684}, - dictWord{133, 11, 905}, - dictWord{11, 11, 580}, - dictWord{142, 11, 201}, - dictWord{134, 0, 1397}, - dictWord{ - 5, - 10, - 208, - }, - dictWord{7, 10, 753}, - dictWord{135, 10, 1528}, - dictWord{7, 0, 238}, - dictWord{7, 0, 2033}, - dictWord{8, 0, 120}, - dictWord{8, 0, 188}, - dictWord{8, 0, 659}, - dictWord{9, 0, 598}, - dictWord{10, 0, 466}, - dictWord{12, 0, 342}, - dictWord{12, 0, 588}, - dictWord{13, 0, 503}, - dictWord{14, 0, 246}, - dictWord{143, 0, 92}, - dictWord{135, 11, 1041}, - dictWord{4, 11, 456}, - dictWord{7, 11, 105}, - dictWord{7, 11, 358}, - dictWord{7, 11, 1637}, - dictWord{8, 11, 643}, - dictWord{139, 11, 483}, - dictWord{6, 0, 1318}, - dictWord{134, 0, 1324}, - dictWord{4, 0, 201}, - dictWord{7, 0, 1744}, - dictWord{8, 0, 602}, - dictWord{11, 0, 247}, - dictWord{11, 0, 826}, - dictWord{17, 0, 65}, - dictWord{133, 10, 242}, - dictWord{8, 0, 164}, - dictWord{146, 0, 62}, - dictWord{133, 10, 953}, - dictWord{139, 10, 802}, - dictWord{133, 0, 615}, - dictWord{7, 11, 1566}, - dictWord{8, 11, 269}, - dictWord{9, 11, 212}, - dictWord{9, 11, 718}, - dictWord{14, 11, 15}, - dictWord{14, 11, 132}, - dictWord{142, 11, 227}, - dictWord{133, 10, 290}, - dictWord{132, 10, 380}, - dictWord{5, 10, 52}, - dictWord{7, 10, 277}, - dictWord{9, 10, 368}, - dictWord{139, 10, 791}, - dictWord{ - 135, - 0, - 1243, - }, - dictWord{133, 11, 539}, - dictWord{11, 11, 919}, - dictWord{141, 11, 409}, - dictWord{136, 0, 968}, - dictWord{133, 11, 470}, - dictWord{134, 0, 882}, - dictWord{132, 0, 907}, - dictWord{5, 0, 100}, - dictWord{10, 0, 329}, - dictWord{12, 0, 416}, - dictWord{149, 0, 29}, - dictWord{10, 10, 138}, - dictWord{139, 10, 476}, - dictWord{5, 10, 725}, - dictWord{5, 10, 727}, - dictWord{6, 11, 91}, - dictWord{7, 11, 435}, - dictWord{135, 10, 1811}, - dictWord{4, 11, 16}, - dictWord{5, 11, 316}, - dictWord{5, 11, 842}, - dictWord{6, 11, 370}, - dictWord{6, 11, 1778}, - dictWord{8, 11, 166}, - dictWord{11, 11, 812}, - dictWord{12, 11, 206}, - dictWord{12, 11, 351}, - dictWord{14, 11, 418}, - dictWord{16, 11, 15}, - dictWord{16, 11, 34}, - dictWord{18, 11, 3}, - dictWord{19, 11, 3}, - dictWord{19, 11, 7}, - dictWord{20, 11, 4}, - dictWord{ - 149, - 11, - 21, - }, - dictWord{132, 0, 176}, - dictWord{5, 0, 636}, - dictWord{5, 0, 998}, - dictWord{7, 0, 9}, - dictWord{7, 0, 1508}, - dictWord{8, 0, 26}, - dictWord{9, 0, 317}, - dictWord{ - 9, - 0, - 358, - }, - dictWord{10, 0, 210}, - dictWord{10, 0, 292}, - dictWord{10, 0, 533}, - dictWord{11, 0, 555}, - dictWord{12, 0, 526}, - dictWord{12, 0, 607}, - dictWord{ - 13, - 0, - 263, - }, - dictWord{13, 0, 459}, - dictWord{142, 0, 271}, - dictWord{6, 0, 256}, - dictWord{8, 0, 265}, - dictWord{4, 10, 38}, - dictWord{7, 10, 307}, - dictWord{7, 10, 999}, - dictWord{7, 10, 1481}, - dictWord{7, 10, 1732}, - dictWord{7, 10, 1738}, - dictWord{9, 10, 414}, - dictWord{11, 10, 316}, - dictWord{12, 10, 52}, - dictWord{13, 10, 420}, - dictWord{147, 10, 100}, - dictWord{135, 10, 1296}, - dictWord{4, 11, 611}, - dictWord{133, 11, 606}, - dictWord{4, 0, 643}, - dictWord{142, 11, 21}, - dictWord{ - 133, - 11, - 715, - }, - dictWord{133, 10, 723}, - dictWord{6, 0, 610}, - dictWord{135, 11, 597}, - dictWord{10, 0, 127}, - dictWord{141, 0, 27}, - dictWord{6, 0, 1995}, - dictWord{ - 6, - 0, - 2001, - }, - dictWord{8, 0, 119}, - dictWord{136, 0, 973}, - dictWord{4, 11, 149}, - dictWord{138, 11, 368}, - dictWord{12, 0, 522}, - dictWord{4, 11, 154}, - dictWord{ - 5, - 10, - 109, - }, - dictWord{6, 10, 1784}, - dictWord{7, 11, 1134}, - dictWord{7, 10, 1895}, - dictWord{8, 11, 105}, - dictWord{12, 10, 296}, - dictWord{140, 10, 302}, - dictWord{4, 11, 31}, - dictWord{6, 11, 429}, - dictWord{7, 11, 962}, - dictWord{9, 11, 458}, - dictWord{139, 11, 691}, - dictWord{10, 0, 553}, - dictWord{11, 0, 876}, - dictWord{13, 0, 193}, - dictWord{13, 0, 423}, - dictWord{14, 0, 166}, - dictWord{19, 0, 84}, - dictWord{4, 11, 312}, - dictWord{5, 10, 216}, - dictWord{7, 10, 1879}, - dictWord{ - 9, - 10, - 141, - }, - dictWord{9, 10, 270}, - dictWord{9, 10, 679}, - dictWord{10, 10, 159}, - dictWord{11, 10, 197}, - dictWord{12, 10, 538}, - dictWord{12, 10, 559}, - dictWord{14, 10, 144}, - dictWord{14, 10, 167}, - dictWord{143, 10, 67}, - dictWord{134, 0, 1582}, - dictWord{7, 0, 1578}, - dictWord{135, 11, 1578}, - dictWord{ - 137, - 10, - 81, - }, - dictWord{132, 11, 236}, - dictWord{134, 10, 391}, - dictWord{134, 0, 795}, - dictWord{7, 10, 322}, - dictWord{136, 10, 249}, - dictWord{5, 11, 836}, - dictWord{ - 5, - 11, - 857, - }, - dictWord{6, 11, 1680}, - dictWord{7, 11, 59}, - dictWord{147, 11, 53}, - dictWord{135, 0, 432}, - dictWord{10, 11, 68}, - dictWord{139, 11, 494}, - dictWord{4, 11, 81}, - dictWord{139, 11, 867}, - dictWord{7, 0, 126}, - dictWord{136, 0, 84}, - dictWord{142, 11, 280}, - dictWord{5, 11, 282}, - dictWord{8, 11, 650}, - dictWord{ - 9, - 11, - 295, - }, - dictWord{9, 11, 907}, - dictWord{138, 11, 443}, - dictWord{136, 0, 790}, - dictWord{5, 10, 632}, - dictWord{138, 10, 526}, - dictWord{6, 0, 64}, - dictWord{12, 0, 377}, - dictWord{13, 0, 309}, - dictWord{14, 0, 141}, - dictWord{14, 0, 429}, - dictWord{14, 11, 141}, - dictWord{142, 11, 429}, - dictWord{134, 0, 1529}, - dictWord{6, 0, 321}, - dictWord{7, 0, 1857}, - dictWord{9, 0, 530}, - dictWord{19, 0, 99}, - dictWord{7, 10, 948}, - dictWord{7, 10, 1042}, - dictWord{8, 10, 235}, - dictWord{ - 8, - 10, - 461, - }, - dictWord{9, 10, 453}, - dictWord{10, 10, 354}, - dictWord{145, 10, 77}, - dictWord{7, 0, 1104}, - dictWord{11, 0, 269}, - dictWord{11, 0, 539}, - dictWord{ - 11, - 0, - 627, - }, - dictWord{11, 0, 706}, - dictWord{11, 0, 975}, - dictWord{12, 0, 248}, - dictWord{12, 0, 434}, - dictWord{12, 0, 600}, - dictWord{12, 0, 622}, - dictWord{ - 13, - 0, - 297, - }, - dictWord{13, 0, 485}, - dictWord{14, 0, 69}, - dictWord{14, 0, 409}, - dictWord{143, 0, 108}, - dictWord{4, 10, 362}, - dictWord{7, 10, 52}, - dictWord{7, 10, 303}, - dictWord{10, 11, 70}, - dictWord{12, 11, 26}, - dictWord{14, 11, 17}, - dictWord{14, 11, 178}, - dictWord{15, 11, 34}, - dictWord{149, 11, 12}, - dictWord{11, 0, 977}, - dictWord{141, 0, 507}, - dictWord{9, 0, 34}, - dictWord{139, 0, 484}, - dictWord{5, 10, 196}, - dictWord{6, 10, 486}, - dictWord{7, 10, 212}, - dictWord{8, 10, 309}, - dictWord{136, 10, 346}, - dictWord{6, 0, 1700}, - dictWord{7, 0, 26}, - dictWord{7, 0, 293}, - dictWord{7, 0, 382}, - dictWord{7, 0, 1026}, - dictWord{7, 0, 1087}, - dictWord{ - 7, - 0, - 2027, - }, - dictWord{8, 0, 24}, - dictWord{8, 0, 114}, - dictWord{8, 0, 252}, - dictWord{8, 0, 727}, - dictWord{8, 0, 729}, - dictWord{9, 0, 30}, - dictWord{9, 0, 199}, - dictWord{ - 9, - 0, - 231, - }, - dictWord{9, 0, 251}, - dictWord{9, 0, 334}, - dictWord{9, 0, 361}, - dictWord{9, 0, 712}, - dictWord{10, 0, 55}, - dictWord{10, 0, 60}, - dictWord{10, 0, 232}, - dictWord{ - 10, - 0, - 332, - }, - dictWord{10, 0, 384}, - dictWord{10, 0, 396}, - dictWord{10, 0, 504}, - dictWord{10, 0, 542}, - dictWord{10, 0, 652}, - dictWord{11, 0, 20}, - dictWord{11, 0, 48}, - dictWord{11, 0, 207}, - dictWord{11, 0, 291}, - dictWord{11, 0, 298}, - dictWord{11, 0, 342}, - dictWord{11, 0, 365}, - dictWord{11, 0, 394}, - dictWord{11, 0, 620}, - dictWord{11, 0, 705}, - dictWord{11, 0, 1017}, - dictWord{12, 0, 123}, - dictWord{12, 0, 340}, - dictWord{12, 0, 406}, - dictWord{12, 0, 643}, - dictWord{13, 0, 61}, - dictWord{ - 13, - 0, - 269, - }, - dictWord{13, 0, 311}, - dictWord{13, 0, 319}, - dictWord{13, 0, 486}, - dictWord{14, 0, 234}, - dictWord{15, 0, 62}, - dictWord{15, 0, 85}, - dictWord{16, 0, 71}, - dictWord{18, 0, 119}, - dictWord{20, 0, 105}, - dictWord{135, 10, 1912}, - dictWord{4, 11, 71}, - dictWord{5, 11, 376}, - dictWord{7, 11, 119}, - dictWord{138, 11, 665}, - dictWord{10, 0, 918}, - dictWord{10, 0, 926}, - dictWord{4, 10, 686}, - dictWord{136, 11, 55}, - dictWord{138, 10, 625}, - dictWord{136, 10, 706}, - dictWord{ - 132, - 11, - 479, - }, - dictWord{4, 10, 30}, - dictWord{133, 10, 43}, - dictWord{6, 0, 379}, - dictWord{7, 0, 270}, - dictWord{8, 0, 176}, - dictWord{8, 0, 183}, - dictWord{9, 0, 432}, - dictWord{ - 9, - 0, - 661, - }, - dictWord{12, 0, 247}, - dictWord{12, 0, 617}, - dictWord{18, 0, 125}, - dictWord{7, 11, 607}, - dictWord{8, 11, 99}, - dictWord{152, 11, 4}, - dictWord{ - 5, - 0, - 792, - }, - dictWord{133, 0, 900}, - dictWord{4, 11, 612}, - dictWord{133, 11, 561}, - dictWord{4, 11, 41}, - dictWord{4, 10, 220}, - dictWord{5, 11, 74}, - dictWord{ - 7, - 10, - 1535, - }, - dictWord{7, 11, 1627}, - dictWord{11, 11, 871}, - dictWord{140, 11, 619}, - dictWord{135, 0, 1920}, - dictWord{7, 11, 94}, - dictWord{11, 11, 329}, - dictWord{11, 11, 965}, - dictWord{12, 11, 241}, - dictWord{14, 11, 354}, - dictWord{15, 11, 22}, - dictWord{148, 11, 63}, - dictWord{9, 11, 209}, - dictWord{137, 11, 300}, - dictWord{134, 0, 771}, - dictWord{135, 0, 1979}, - dictWord{4, 0, 901}, - dictWord{133, 0, 776}, - dictWord{142, 0, 254}, - dictWord{133, 11, 98}, - dictWord{ - 9, - 11, - 16, - }, - dictWord{141, 11, 386}, - dictWord{133, 11, 984}, - dictWord{4, 11, 182}, - dictWord{6, 11, 205}, - dictWord{135, 11, 220}, - dictWord{7, 10, 1725}, - dictWord{ - 7, - 10, - 1774, - }, - dictWord{138, 10, 393}, - dictWord{5, 10, 263}, - dictWord{134, 10, 414}, - dictWord{4, 11, 42}, - dictWord{9, 11, 205}, - dictWord{9, 11, 786}, - dictWord{138, 11, 659}, - dictWord{14, 0, 140}, - dictWord{148, 0, 41}, - dictWord{8, 0, 440}, - dictWord{10, 0, 359}, - dictWord{6, 10, 178}, - dictWord{6, 11, 289}, - dictWord{ - 6, - 10, - 1750, - }, - dictWord{7, 11, 1670}, - dictWord{9, 10, 690}, - dictWord{10, 10, 155}, - dictWord{10, 10, 373}, - dictWord{11, 10, 698}, - dictWord{12, 11, 57}, - dictWord{13, 10, 155}, - dictWord{20, 10, 93}, - dictWord{151, 11, 4}, - dictWord{4, 0, 37}, - dictWord{5, 0, 334}, - dictWord{7, 0, 1253}, - dictWord{151, 11, 25}, - dictWord{ - 4, - 0, - 508, - }, - dictWord{4, 11, 635}, - dictWord{5, 10, 97}, - dictWord{137, 10, 393}, - dictWord{139, 11, 533}, - dictWord{4, 0, 640}, - dictWord{133, 0, 513}, - dictWord{ - 134, - 10, - 1639, - }, - dictWord{132, 11, 371}, - dictWord{4, 11, 272}, - dictWord{7, 11, 836}, - dictWord{7, 11, 1651}, - dictWord{145, 11, 89}, - dictWord{5, 11, 825}, - dictWord{6, 11, 444}, - dictWord{6, 11, 1640}, - dictWord{136, 11, 308}, - dictWord{4, 10, 191}, - dictWord{7, 10, 934}, - dictWord{8, 10, 647}, - dictWord{145, 10, 97}, - dictWord{12, 0, 246}, - dictWord{15, 0, 162}, - dictWord{19, 0, 64}, - dictWord{20, 0, 8}, - dictWord{20, 0, 95}, - dictWord{22, 0, 24}, - dictWord{152, 0, 17}, - dictWord{4, 0, 533}, - dictWord{5, 10, 165}, - dictWord{9, 10, 346}, - dictWord{138, 10, 655}, - dictWord{5, 11, 737}, - dictWord{139, 10, 885}, - dictWord{133, 10, 877}, - dictWord{ - 8, - 10, - 128, - }, - dictWord{139, 10, 179}, - dictWord{137, 11, 307}, - dictWord{140, 0, 752}, - dictWord{133, 0, 920}, - dictWord{135, 0, 1048}, - dictWord{5, 0, 153}, - dictWord{ - 6, - 0, - 580, - }, - dictWord{6, 10, 1663}, - dictWord{7, 10, 132}, - dictWord{7, 10, 1154}, - dictWord{7, 10, 1415}, - dictWord{7, 10, 1507}, - dictWord{12, 10, 493}, - dictWord{15, 10, 105}, - dictWord{151, 10, 15}, - dictWord{5, 10, 459}, - dictWord{7, 10, 1073}, - dictWord{8, 10, 241}, - dictWord{136, 10, 334}, - dictWord{138, 0, 391}, - dictWord{135, 0, 1952}, - dictWord{133, 11, 525}, - dictWord{8, 11, 641}, - dictWord{11, 11, 388}, - dictWord{140, 11, 580}, - dictWord{142, 0, 126}, - dictWord{ - 134, - 0, - 640, - }, - dictWord{132, 0, 483}, - dictWord{7, 0, 1616}, - dictWord{9, 0, 69}, - dictWord{6, 10, 324}, - dictWord{6, 10, 520}, - dictWord{7, 10, 338}, - dictWord{ - 7, - 10, - 1729, - }, - dictWord{8, 10, 228}, - dictWord{139, 10, 750}, - dictWord{5, 11, 493}, - dictWord{134, 11, 528}, - dictWord{135, 0, 734}, - dictWord{4, 11, 174}, - dictWord{135, 11, 911}, - dictWord{138, 0, 480}, - dictWord{9, 0, 495}, - dictWord{146, 0, 104}, - dictWord{135, 10, 705}, - dictWord{9, 0, 472}, - dictWord{4, 10, 73}, - dictWord{6, 10, 612}, - dictWord{7, 10, 927}, - dictWord{7, 10, 1330}, - dictWord{7, 10, 1822}, - dictWord{8, 10, 217}, - dictWord{9, 10, 765}, - dictWord{9, 10, 766}, - dictWord{10, 10, 408}, - dictWord{11, 10, 51}, - dictWord{11, 10, 793}, - dictWord{12, 10, 266}, - dictWord{15, 10, 158}, - dictWord{20, 10, 89}, - dictWord{150, 10, 32}, - dictWord{7, 11, 548}, - dictWord{137, 11, 58}, - dictWord{4, 11, 32}, - dictWord{5, 11, 215}, - dictWord{6, 11, 269}, - dictWord{7, 11, 1782}, - dictWord{7, 11, 1892}, - dictWord{10, 11, 16}, - dictWord{11, 11, 822}, - dictWord{11, 11, 954}, - dictWord{141, 11, 481}, - dictWord{132, 0, 874}, - dictWord{9, 0, 229}, - dictWord{5, 10, 389}, - dictWord{136, 10, 636}, - dictWord{7, 11, 1749}, - dictWord{136, 11, 477}, - dictWord{134, 0, 948}, - dictWord{5, 11, 308}, - dictWord{135, 11, 1088}, - dictWord{ - 4, - 0, - 748, - }, - dictWord{139, 0, 1009}, - dictWord{136, 10, 21}, - dictWord{6, 0, 555}, - dictWord{135, 0, 485}, - dictWord{5, 11, 126}, - dictWord{8, 11, 297}, - dictWord{ - 9, - 11, - 366, - }, - dictWord{9, 11, 445}, - dictWord{12, 11, 53}, - dictWord{12, 11, 374}, - dictWord{141, 11, 492}, - dictWord{7, 11, 1551}, - dictWord{139, 11, 361}, - dictWord{136, 0, 193}, - dictWord{136, 0, 472}, - dictWord{8, 0, 653}, - dictWord{13, 0, 93}, - dictWord{147, 0, 14}, - dictWord{132, 0, 984}, - dictWord{132, 11, 175}, - dictWord{5, 0, 172}, - dictWord{6, 0, 1971}, - dictWord{132, 11, 685}, - dictWord{149, 11, 8}, - dictWord{133, 11, 797}, - dictWord{13, 0, 83}, - dictWord{5, 10, 189}, - dictWord{ - 7, - 10, - 442, - }, - dictWord{7, 10, 443}, - dictWord{8, 10, 281}, - dictWord{12, 10, 174}, - dictWord{141, 10, 261}, - dictWord{134, 0, 1568}, - dictWord{133, 11, 565}, - dictWord{139, 0, 384}, - dictWord{133, 0, 260}, - dictWord{7, 0, 758}, - dictWord{7, 0, 880}, - dictWord{7, 0, 1359}, - dictWord{9, 0, 164}, - dictWord{9, 0, 167}, - dictWord{ - 10, - 0, - 156, - }, - dictWord{10, 0, 588}, - dictWord{12, 0, 101}, - dictWord{14, 0, 48}, - dictWord{15, 0, 70}, - dictWord{6, 10, 2}, - dictWord{7, 10, 1262}, - dictWord{ - 7, - 10, - 1737, - }, - dictWord{8, 10, 22}, - dictWord{8, 10, 270}, - dictWord{8, 10, 612}, - dictWord{9, 10, 312}, - dictWord{9, 10, 436}, - dictWord{10, 10, 311}, - dictWord{ - 10, - 10, - 623, - }, - dictWord{11, 10, 72}, - dictWord{11, 10, 330}, - dictWord{11, 10, 455}, - dictWord{12, 10, 321}, - dictWord{12, 10, 504}, - dictWord{12, 10, 530}, - dictWord{ - 12, - 10, - 543, - }, - dictWord{13, 10, 17}, - dictWord{13, 10, 156}, - dictWord{13, 10, 334}, - dictWord{17, 10, 60}, - dictWord{148, 10, 64}, - dictWord{4, 11, 252}, - dictWord{ - 7, - 11, - 1068, - }, - dictWord{10, 11, 434}, - dictWord{11, 11, 228}, - dictWord{11, 11, 426}, - dictWord{13, 11, 231}, - dictWord{18, 11, 106}, - dictWord{148, 11, 87}, - dictWord{7, 10, 354}, - dictWord{10, 10, 410}, - dictWord{139, 10, 815}, - dictWord{6, 0, 367}, - dictWord{7, 10, 670}, - dictWord{7, 10, 1327}, - dictWord{8, 10, 411}, - dictWord{8, 10, 435}, - dictWord{9, 10, 653}, - dictWord{9, 10, 740}, - dictWord{10, 10, 385}, - dictWord{11, 10, 222}, - dictWord{11, 10, 324}, - dictWord{11, 10, 829}, - dictWord{140, 10, 611}, - dictWord{7, 0, 1174}, - dictWord{6, 10, 166}, - dictWord{135, 10, 374}, - dictWord{146, 0, 121}, - dictWord{132, 0, 828}, - dictWord{ - 5, - 11, - 231, - }, - dictWord{138, 11, 509}, - dictWord{7, 11, 601}, - dictWord{9, 11, 277}, - dictWord{9, 11, 674}, - dictWord{10, 11, 178}, - dictWord{10, 11, 257}, - dictWord{ - 10, - 11, - 418, - }, - dictWord{11, 11, 531}, - dictWord{11, 11, 544}, - dictWord{11, 11, 585}, - dictWord{12, 11, 113}, - dictWord{12, 11, 475}, - dictWord{13, 11, 99}, - dictWord{142, 11, 428}, - dictWord{134, 0, 1541}, - dictWord{135, 11, 1779}, - dictWord{5, 0, 343}, - dictWord{134, 10, 398}, - dictWord{135, 10, 50}, - dictWord{ - 135, - 11, - 1683, - }, - dictWord{4, 0, 440}, - dictWord{7, 0, 57}, - dictWord{8, 0, 167}, - dictWord{8, 0, 375}, - dictWord{9, 0, 82}, - dictWord{9, 0, 561}, - dictWord{9, 0, 744}, - dictWord{ - 10, - 0, - 620, - }, - dictWord{137, 11, 744}, - dictWord{134, 0, 926}, - dictWord{6, 10, 517}, - dictWord{7, 10, 1159}, - dictWord{10, 10, 621}, - dictWord{139, 10, 192}, - dictWord{137, 0, 827}, - dictWord{8, 0, 194}, - dictWord{136, 0, 756}, - dictWord{10, 10, 223}, - dictWord{139, 10, 645}, - dictWord{7, 10, 64}, - dictWord{ - 136, - 10, - 245, - }, - dictWord{4, 11, 399}, - dictWord{5, 11, 119}, - dictWord{5, 11, 494}, - dictWord{7, 11, 751}, - dictWord{137, 11, 556}, - dictWord{132, 0, 808}, - dictWord{ - 135, - 0, - 22, - }, - dictWord{7, 10, 1763}, - dictWord{140, 10, 310}, - dictWord{5, 0, 639}, - dictWord{7, 0, 1249}, - dictWord{11, 0, 896}, - dictWord{134, 11, 584}, - dictWord{ - 134, - 0, - 1614, - }, - dictWord{135, 0, 860}, - dictWord{135, 11, 1121}, - dictWord{5, 10, 129}, - dictWord{6, 10, 61}, - dictWord{135, 10, 947}, - dictWord{4, 0, 102}, - dictWord{ - 7, - 0, - 815, - }, - dictWord{7, 0, 1699}, - dictWord{139, 0, 964}, - dictWord{13, 10, 505}, - dictWord{141, 10, 506}, - dictWord{139, 10, 1000}, - dictWord{ - 132, - 11, - 679, - }, - dictWord{132, 0, 899}, - dictWord{132, 0, 569}, - dictWord{5, 11, 694}, - dictWord{137, 11, 714}, - dictWord{136, 0, 795}, - dictWord{6, 0, 2045}, - dictWord{ - 139, - 11, - 7, - }, - dictWord{6, 0, 52}, - dictWord{9, 0, 104}, - dictWord{9, 0, 559}, - dictWord{12, 0, 308}, - dictWord{147, 0, 87}, - dictWord{4, 0, 301}, - dictWord{132, 0, 604}, - dictWord{133, 10, 637}, - dictWord{136, 0, 779}, - dictWord{5, 11, 143}, - dictWord{5, 11, 769}, - dictWord{6, 11, 1760}, - dictWord{7, 11, 682}, - dictWord{7, 11, 1992}, - dictWord{136, 11, 736}, - dictWord{137, 10, 590}, - dictWord{147, 0, 32}, - dictWord{137, 11, 527}, - dictWord{5, 10, 280}, - dictWord{135, 10, 1226}, - dictWord{134, 0, 494}, - dictWord{6, 0, 677}, - dictWord{6, 0, 682}, - dictWord{134, 0, 1044}, - dictWord{133, 10, 281}, - dictWord{135, 10, 1064}, - dictWord{7, 0, 508}, - dictWord{133, 11, 860}, - dictWord{6, 11, 422}, - dictWord{7, 11, 0}, - dictWord{7, 11, 1544}, - dictWord{9, 11, 577}, - dictWord{11, 11, 990}, - dictWord{12, 11, 141}, - dictWord{12, 11, 453}, - dictWord{13, 11, 47}, - dictWord{141, 11, 266}, - dictWord{134, 0, 1014}, - dictWord{5, 11, 515}, - dictWord{137, 11, 131}, - dictWord{ - 134, - 0, - 957, - }, - dictWord{132, 11, 646}, - dictWord{6, 0, 310}, - dictWord{7, 0, 1849}, - dictWord{8, 0, 72}, - dictWord{8, 0, 272}, - dictWord{8, 0, 431}, - dictWord{9, 0, 12}, - dictWord{ - 9, - 0, - 376, - }, - dictWord{10, 0, 563}, - dictWord{10, 0, 630}, - dictWord{10, 0, 796}, - dictWord{10, 0, 810}, - dictWord{11, 0, 367}, - dictWord{11, 0, 599}, - dictWord{ - 11, - 0, - 686, - }, - dictWord{140, 0, 672}, - dictWord{7, 0, 570}, - dictWord{4, 11, 396}, - dictWord{7, 10, 120}, - dictWord{7, 11, 728}, - dictWord{8, 10, 489}, - dictWord{9, 11, 117}, - dictWord{9, 10, 319}, - dictWord{10, 10, 820}, - dictWord{11, 10, 1004}, - dictWord{12, 10, 379}, - dictWord{12, 10, 679}, - dictWord{13, 10, 117}, - dictWord{ - 13, - 11, - 202, - }, - dictWord{13, 10, 412}, - dictWord{14, 10, 25}, - dictWord{15, 10, 52}, - dictWord{15, 10, 161}, - dictWord{16, 10, 47}, - dictWord{20, 11, 51}, - dictWord{ - 149, - 10, - 2, - }, - dictWord{6, 11, 121}, - dictWord{6, 11, 124}, - dictWord{6, 11, 357}, - dictWord{7, 11, 1138}, - dictWord{7, 11, 1295}, - dictWord{8, 11, 162}, - dictWord{ - 139, - 11, - 655, - }, - dictWord{8, 0, 449}, - dictWord{4, 10, 937}, - dictWord{5, 10, 801}, - dictWord{136, 11, 449}, - dictWord{139, 11, 958}, - dictWord{6, 0, 181}, - dictWord{ - 7, - 0, - 537, - }, - dictWord{8, 0, 64}, - dictWord{9, 0, 127}, - dictWord{10, 0, 496}, - dictWord{12, 0, 510}, - dictWord{141, 0, 384}, - dictWord{138, 11, 253}, - dictWord{4, 0, 244}, - dictWord{135, 0, 233}, - dictWord{133, 11, 237}, - dictWord{132, 10, 365}, - dictWord{6, 0, 1650}, - dictWord{10, 0, 702}, - dictWord{139, 0, 245}, - dictWord{ - 5, - 10, - 7, - }, - dictWord{139, 10, 774}, - dictWord{13, 0, 463}, - dictWord{20, 0, 49}, - dictWord{13, 11, 463}, - dictWord{148, 11, 49}, - dictWord{4, 10, 734}, - dictWord{ - 5, - 10, - 662, - }, - dictWord{134, 10, 430}, - dictWord{4, 10, 746}, - dictWord{135, 10, 1090}, - dictWord{5, 10, 360}, - dictWord{136, 10, 237}, - dictWord{137, 0, 338}, - dictWord{143, 11, 10}, - dictWord{7, 11, 571}, - dictWord{138, 11, 366}, - dictWord{134, 0, 1279}, - dictWord{9, 11, 513}, - dictWord{10, 11, 22}, - dictWord{10, 11, 39}, - dictWord{12, 11, 122}, - dictWord{140, 11, 187}, - dictWord{133, 0, 896}, - dictWord{146, 0, 178}, - dictWord{134, 0, 695}, - dictWord{137, 0, 808}, - dictWord{ - 134, - 11, - 587, - }, - dictWord{7, 11, 107}, - dictWord{7, 11, 838}, - dictWord{8, 11, 550}, - dictWord{138, 11, 401}, - dictWord{7, 0, 1117}, - dictWord{136, 0, 539}, - dictWord{ - 4, - 10, - 277, - }, - dictWord{5, 10, 608}, - dictWord{6, 10, 493}, - dictWord{7, 10, 457}, - dictWord{140, 10, 384}, - dictWord{133, 11, 768}, - dictWord{12, 0, 257}, - dictWord{ - 7, - 10, - 27, - }, - dictWord{135, 10, 316}, - dictWord{140, 0, 1003}, - dictWord{4, 0, 207}, - dictWord{5, 0, 586}, - dictWord{5, 0, 676}, - dictWord{6, 0, 448}, - dictWord{ - 8, - 0, - 244, - }, - dictWord{11, 0, 1}, - dictWord{13, 0, 3}, - dictWord{16, 0, 54}, - dictWord{17, 0, 4}, - dictWord{18, 0, 13}, - dictWord{133, 10, 552}, - dictWord{4, 10, 401}, - dictWord{ - 137, - 10, - 264, - }, - dictWord{5, 0, 516}, - dictWord{7, 0, 1883}, - dictWord{135, 11, 1883}, - dictWord{12, 0, 960}, - dictWord{132, 11, 894}, - dictWord{5, 0, 4}, - dictWord{ - 5, - 0, - 810, - }, - dictWord{6, 0, 13}, - dictWord{6, 0, 538}, - dictWord{6, 0, 1690}, - dictWord{6, 0, 1726}, - dictWord{7, 0, 499}, - dictWord{7, 0, 1819}, - dictWord{8, 0, 148}, - dictWord{ - 8, - 0, - 696, - }, - dictWord{8, 0, 791}, - dictWord{12, 0, 125}, - dictWord{143, 0, 9}, - dictWord{135, 0, 1268}, - dictWord{11, 0, 30}, - dictWord{14, 0, 315}, - dictWord{ - 9, - 10, - 543, - }, - dictWord{10, 10, 524}, - dictWord{12, 10, 524}, - dictWord{16, 10, 18}, - dictWord{20, 10, 26}, - dictWord{148, 10, 65}, - dictWord{6, 0, 748}, - dictWord{ - 4, - 10, - 205, - }, - dictWord{5, 10, 623}, - dictWord{7, 10, 104}, - dictWord{136, 10, 519}, - dictWord{11, 0, 542}, - dictWord{139, 0, 852}, - dictWord{140, 0, 6}, - dictWord{ - 132, - 0, - 848, - }, - dictWord{7, 0, 1385}, - dictWord{11, 0, 582}, - dictWord{11, 0, 650}, - dictWord{11, 0, 901}, - dictWord{11, 0, 949}, - dictWord{12, 0, 232}, - dictWord{12, 0, 236}, - dictWord{13, 0, 413}, - dictWord{13, 0, 501}, - dictWord{18, 0, 116}, - dictWord{7, 10, 579}, - dictWord{9, 10, 41}, - dictWord{9, 10, 244}, - dictWord{9, 10, 669}, - dictWord{10, 10, 5}, - dictWord{11, 10, 861}, - dictWord{11, 10, 951}, - dictWord{139, 10, 980}, - dictWord{4, 0, 945}, - dictWord{6, 0, 1811}, - dictWord{6, 0, 1845}, - dictWord{ - 6, - 0, - 1853, - }, - dictWord{6, 0, 1858}, - dictWord{8, 0, 862}, - dictWord{12, 0, 782}, - dictWord{12, 0, 788}, - dictWord{18, 0, 160}, - dictWord{148, 0, 117}, - dictWord{ - 132, - 10, - 717, - }, - dictWord{4, 0, 925}, - dictWord{5, 0, 803}, - dictWord{8, 0, 698}, - dictWord{138, 0, 828}, - dictWord{134, 0, 1416}, - dictWord{132, 0, 610}, - dictWord{ - 139, - 0, - 992, - }, - dictWord{6, 0, 878}, - dictWord{134, 0, 1477}, - dictWord{135, 0, 1847}, - dictWord{138, 11, 531}, - dictWord{137, 11, 539}, - dictWord{134, 11, 272}, - dictWord{133, 0, 383}, - dictWord{134, 0, 1404}, - dictWord{132, 10, 489}, - dictWord{4, 11, 9}, - dictWord{5, 11, 128}, - dictWord{7, 11, 368}, - dictWord{ - 11, - 11, - 480, - }, - dictWord{148, 11, 3}, - dictWord{136, 0, 986}, - dictWord{9, 0, 660}, - dictWord{138, 0, 347}, - dictWord{135, 10, 892}, - dictWord{136, 11, 682}, - dictWord{ - 7, - 0, - 572, - }, - dictWord{9, 0, 592}, - dictWord{11, 0, 680}, - dictWord{12, 0, 356}, - dictWord{140, 0, 550}, - dictWord{7, 0, 1411}, - dictWord{138, 11, 527}, - dictWord{ - 4, - 11, - 2, - }, - dictWord{7, 11, 545}, - dictWord{135, 11, 894}, - dictWord{137, 10, 473}, - dictWord{11, 0, 64}, - dictWord{7, 11, 481}, - dictWord{7, 10, 819}, - dictWord{9, 10, 26}, - dictWord{9, 10, 392}, - dictWord{9, 11, 792}, - dictWord{10, 10, 152}, - dictWord{10, 10, 226}, - dictWord{12, 10, 276}, - dictWord{12, 10, 426}, - dictWord{ - 12, - 10, - 589, - }, - dictWord{13, 10, 460}, - dictWord{15, 10, 97}, - dictWord{19, 10, 48}, - dictWord{148, 10, 104}, - dictWord{135, 10, 51}, - dictWord{136, 11, 445}, - dictWord{136, 11, 646}, - dictWord{135, 0, 606}, - dictWord{132, 10, 674}, - dictWord{6, 0, 1829}, - dictWord{134, 0, 1830}, - dictWord{132, 10, 770}, - dictWord{ - 5, - 10, - 79, - }, - dictWord{7, 10, 1027}, - dictWord{7, 10, 1477}, - dictWord{139, 10, 52}, - dictWord{5, 11, 530}, - dictWord{142, 11, 113}, - dictWord{134, 10, 1666}, - dictWord{ - 7, - 0, - 748, - }, - dictWord{139, 0, 700}, - dictWord{134, 10, 195}, - dictWord{133, 10, 789}, - dictWord{9, 0, 87}, - dictWord{10, 0, 365}, - dictWord{4, 10, 251}, - dictWord{ - 4, - 10, - 688, - }, - dictWord{7, 10, 513}, - dictWord{135, 10, 1284}, - dictWord{136, 11, 111}, - dictWord{133, 0, 127}, - dictWord{6, 0, 198}, - dictWord{140, 0, 83}, - dictWord{133, 11, 556}, - dictWord{133, 10, 889}, - dictWord{4, 10, 160}, - dictWord{5, 10, 330}, - dictWord{7, 10, 1434}, - dictWord{136, 10, 174}, - dictWord{5, 0, 276}, - dictWord{6, 0, 55}, - dictWord{7, 0, 1369}, - dictWord{138, 0, 864}, - dictWord{8, 11, 16}, - dictWord{140, 11, 568}, - dictWord{6, 0, 1752}, - dictWord{136, 0, 726}, - dictWord{135, 0, 1066}, - dictWord{133, 0, 764}, - dictWord{6, 11, 186}, - dictWord{137, 11, 426}, - dictWord{11, 0, 683}, - dictWord{139, 11, 683}, - dictWord{ - 6, - 0, - 309, - }, - dictWord{7, 0, 331}, - dictWord{138, 0, 550}, - dictWord{133, 10, 374}, - dictWord{6, 0, 1212}, - dictWord{6, 0, 1852}, - dictWord{7, 0, 1062}, - dictWord{ - 8, - 0, - 874, - }, - dictWord{8, 0, 882}, - dictWord{138, 0, 936}, - dictWord{132, 11, 585}, - dictWord{134, 0, 1364}, - dictWord{7, 0, 986}, - dictWord{133, 10, 731}, - dictWord{ - 6, - 0, - 723, - }, - dictWord{6, 0, 1408}, - dictWord{138, 0, 381}, - dictWord{135, 0, 1573}, - dictWord{134, 0, 1025}, - dictWord{4, 10, 626}, - dictWord{5, 10, 642}, - dictWord{ - 6, - 10, - 425, - }, - dictWord{10, 10, 202}, - dictWord{139, 10, 141}, - dictWord{4, 11, 93}, - dictWord{5, 11, 252}, - dictWord{6, 11, 229}, - dictWord{7, 11, 291}, - dictWord{ - 9, - 11, - 550, - }, - dictWord{139, 11, 644}, - dictWord{137, 11, 749}, - dictWord{137, 11, 162}, - dictWord{132, 11, 381}, - dictWord{135, 0, 1559}, - dictWord{ - 6, - 0, - 194, - }, - dictWord{7, 0, 133}, - dictWord{10, 0, 493}, - dictWord{10, 0, 570}, - dictWord{139, 0, 664}, - dictWord{5, 0, 24}, - dictWord{5, 0, 569}, - dictWord{6, 0, 3}, - dictWord{ - 6, - 0, - 119, - }, - dictWord{6, 0, 143}, - dictWord{6, 0, 440}, - dictWord{7, 0, 295}, - dictWord{7, 0, 599}, - dictWord{7, 0, 1686}, - dictWord{7, 0, 1854}, - dictWord{8, 0, 424}, - dictWord{ - 9, - 0, - 43, - }, - dictWord{9, 0, 584}, - dictWord{9, 0, 760}, - dictWord{10, 0, 148}, - dictWord{10, 0, 328}, - dictWord{11, 0, 159}, - dictWord{11, 0, 253}, - dictWord{11, 0, 506}, - dictWord{12, 0, 487}, - dictWord{140, 0, 531}, - dictWord{6, 0, 661}, - dictWord{134, 0, 1517}, - dictWord{136, 10, 835}, - dictWord{151, 10, 17}, - dictWord{5, 0, 14}, - dictWord{5, 0, 892}, - dictWord{6, 0, 283}, - dictWord{7, 0, 234}, - dictWord{136, 0, 537}, - dictWord{139, 0, 541}, - dictWord{4, 0, 126}, - dictWord{8, 0, 635}, - dictWord{ - 147, - 0, - 34, - }, - dictWord{4, 0, 316}, - dictWord{4, 0, 495}, - dictWord{135, 0, 1561}, - dictWord{4, 11, 187}, - dictWord{5, 11, 184}, - dictWord{5, 11, 690}, - dictWord{ - 7, - 11, - 1869, - }, - dictWord{138, 11, 756}, - dictWord{139, 11, 783}, - dictWord{4, 0, 998}, - dictWord{137, 0, 861}, - dictWord{136, 0, 1009}, - dictWord{139, 11, 292}, - dictWord{5, 11, 21}, - dictWord{6, 11, 77}, - dictWord{6, 11, 157}, - dictWord{7, 11, 974}, - dictWord{7, 11, 1301}, - dictWord{7, 11, 1339}, - dictWord{7, 11, 1490}, - dictWord{ - 7, - 11, - 1873, - }, - dictWord{137, 11, 628}, - dictWord{7, 11, 1283}, - dictWord{9, 11, 227}, - dictWord{9, 11, 499}, - dictWord{10, 11, 341}, - dictWord{11, 11, 325}, - dictWord{11, 11, 408}, - dictWord{14, 11, 180}, - dictWord{15, 11, 144}, - dictWord{18, 11, 47}, - dictWord{147, 11, 49}, - dictWord{4, 0, 64}, - dictWord{5, 0, 352}, - dictWord{5, 0, 720}, - dictWord{6, 0, 368}, - dictWord{139, 0, 359}, - dictWord{5, 10, 384}, - dictWord{8, 10, 455}, - dictWord{140, 10, 48}, - dictWord{5, 10, 264}, - dictWord{ - 134, - 10, - 184, - }, - dictWord{7, 0, 1577}, - dictWord{10, 0, 304}, - dictWord{10, 0, 549}, - dictWord{12, 0, 365}, - dictWord{13, 0, 220}, - dictWord{13, 0, 240}, - dictWord{ - 142, - 0, - 33, - }, - dictWord{134, 0, 1107}, - dictWord{134, 0, 929}, - dictWord{135, 0, 1142}, - dictWord{6, 0, 175}, - dictWord{137, 0, 289}, - dictWord{5, 0, 432}, - dictWord{ - 133, - 0, - 913, - }, - dictWord{6, 0, 279}, - dictWord{7, 0, 219}, - dictWord{5, 10, 633}, - dictWord{135, 10, 1323}, - dictWord{7, 0, 785}, - dictWord{7, 10, 359}, - dictWord{ - 8, - 10, - 243, - }, - dictWord{140, 10, 175}, - dictWord{139, 0, 595}, - dictWord{132, 10, 105}, - dictWord{8, 11, 398}, - dictWord{9, 11, 681}, - dictWord{139, 11, 632}, - dictWord{140, 0, 80}, - dictWord{5, 0, 931}, - dictWord{134, 0, 1698}, - dictWord{142, 11, 241}, - dictWord{134, 11, 20}, - dictWord{134, 0, 1323}, - dictWord{11, 0, 526}, - dictWord{11, 0, 939}, - dictWord{141, 0, 290}, - dictWord{5, 0, 774}, - dictWord{6, 0, 780}, - dictWord{6, 0, 1637}, - dictWord{6, 0, 1686}, - dictWord{6, 0, 1751}, - dictWord{ - 8, - 0, - 559, - }, - dictWord{141, 0, 109}, - dictWord{141, 0, 127}, - dictWord{7, 0, 1167}, - dictWord{11, 0, 934}, - dictWord{13, 0, 391}, - dictWord{17, 0, 76}, - dictWord{ - 135, - 11, - 709, - }, - dictWord{135, 0, 963}, - dictWord{6, 0, 260}, - dictWord{135, 0, 1484}, - dictWord{134, 0, 573}, - dictWord{4, 10, 758}, - dictWord{139, 11, 941}, - dictWord{135, 10, 1649}, - dictWord{145, 11, 36}, - dictWord{4, 0, 292}, - dictWord{137, 0, 580}, - dictWord{4, 0, 736}, - dictWord{5, 0, 871}, - dictWord{6, 0, 1689}, - dictWord{135, 0, 1944}, - dictWord{7, 11, 945}, - dictWord{11, 11, 713}, - dictWord{139, 11, 744}, - dictWord{134, 0, 1164}, - dictWord{135, 11, 937}, - dictWord{ - 6, - 0, - 1922, - }, - dictWord{9, 0, 982}, - dictWord{15, 0, 173}, - dictWord{15, 0, 178}, - dictWord{15, 0, 200}, - dictWord{18, 0, 189}, - dictWord{18, 0, 207}, - dictWord{21, 0, 47}, - dictWord{135, 11, 1652}, - dictWord{7, 0, 1695}, - dictWord{139, 10, 128}, - dictWord{6, 0, 63}, - dictWord{135, 0, 920}, - dictWord{133, 0, 793}, - dictWord{ - 143, - 11, - 134, - }, - dictWord{133, 10, 918}, - dictWord{5, 0, 67}, - dictWord{6, 0, 62}, - dictWord{6, 0, 374}, - dictWord{135, 0, 1391}, - dictWord{9, 0, 790}, - dictWord{12, 0, 47}, - dictWord{4, 11, 579}, - dictWord{5, 11, 226}, - dictWord{5, 11, 323}, - dictWord{135, 11, 960}, - dictWord{10, 11, 784}, - dictWord{141, 11, 191}, - dictWord{4, 0, 391}, - dictWord{135, 0, 1169}, - dictWord{137, 0, 443}, - dictWord{13, 11, 232}, - dictWord{146, 11, 35}, - dictWord{132, 10, 340}, - dictWord{132, 0, 271}, - dictWord{ - 137, - 11, - 313, - }, - dictWord{5, 11, 973}, - dictWord{137, 11, 659}, - dictWord{134, 0, 1140}, - dictWord{6, 11, 135}, - dictWord{135, 11, 1176}, - dictWord{4, 0, 253}, - dictWord{5, 0, 544}, - dictWord{7, 0, 300}, - dictWord{137, 0, 340}, - dictWord{7, 0, 897}, - dictWord{5, 10, 985}, - dictWord{7, 10, 509}, - dictWord{145, 10, 96}, - dictWord{ - 138, - 11, - 735, - }, - dictWord{135, 10, 1919}, - dictWord{138, 0, 890}, - dictWord{5, 0, 818}, - dictWord{134, 0, 1122}, - dictWord{5, 0, 53}, - dictWord{5, 0, 541}, - dictWord{ - 6, - 0, - 94, - }, - dictWord{6, 0, 499}, - dictWord{7, 0, 230}, - dictWord{139, 0, 321}, - dictWord{4, 0, 920}, - dictWord{5, 0, 25}, - dictWord{5, 0, 790}, - dictWord{6, 0, 457}, - dictWord{ - 7, - 0, - 853, - }, - dictWord{8, 0, 788}, - dictWord{142, 11, 31}, - dictWord{132, 10, 247}, - dictWord{135, 11, 314}, - dictWord{132, 0, 468}, - dictWord{7, 0, 243}, - dictWord{ - 6, - 10, - 337, - }, - dictWord{7, 10, 494}, - dictWord{8, 10, 27}, - dictWord{8, 10, 599}, - dictWord{138, 10, 153}, - dictWord{4, 10, 184}, - dictWord{5, 10, 390}, - dictWord{ - 7, - 10, - 618, - }, - dictWord{7, 10, 1456}, - dictWord{139, 10, 710}, - dictWord{134, 0, 870}, - dictWord{134, 0, 1238}, - dictWord{134, 0, 1765}, - dictWord{10, 0, 853}, - dictWord{10, 0, 943}, - dictWord{14, 0, 437}, - dictWord{14, 0, 439}, - dictWord{14, 0, 443}, - dictWord{14, 0, 446}, - dictWord{14, 0, 452}, - dictWord{14, 0, 469}, - dictWord{ - 14, - 0, - 471, - }, - dictWord{14, 0, 473}, - dictWord{16, 0, 93}, - dictWord{16, 0, 102}, - dictWord{16, 0, 110}, - dictWord{148, 0, 121}, - dictWord{4, 0, 605}, - dictWord{ - 7, - 0, - 518, - }, - dictWord{7, 0, 1282}, - dictWord{7, 0, 1918}, - dictWord{10, 0, 180}, - dictWord{139, 0, 218}, - dictWord{133, 0, 822}, - dictWord{4, 0, 634}, - dictWord{ - 11, - 0, - 916, - }, - dictWord{142, 0, 419}, - dictWord{6, 11, 281}, - dictWord{7, 11, 6}, - dictWord{8, 11, 282}, - dictWord{8, 11, 480}, - dictWord{8, 11, 499}, - dictWord{9, 11, 198}, - dictWord{10, 11, 143}, - dictWord{10, 11, 169}, - dictWord{10, 11, 211}, - dictWord{10, 11, 417}, - dictWord{10, 11, 574}, - dictWord{11, 11, 147}, - dictWord{ - 11, - 11, - 395, - }, - dictWord{12, 11, 75}, - dictWord{12, 11, 407}, - dictWord{12, 11, 608}, - dictWord{13, 11, 500}, - dictWord{142, 11, 251}, - dictWord{134, 0, 898}, - dictWord{ - 6, - 0, - 36, - }, - dictWord{7, 0, 658}, - dictWord{8, 0, 454}, - dictWord{150, 11, 48}, - dictWord{133, 11, 674}, - dictWord{135, 11, 1776}, - dictWord{4, 11, 419}, - dictWord{ - 10, - 10, - 227, - }, - dictWord{11, 10, 497}, - dictWord{11, 10, 709}, - dictWord{140, 10, 415}, - dictWord{6, 10, 360}, - dictWord{7, 10, 1664}, - dictWord{136, 10, 478}, - dictWord{137, 0, 806}, - dictWord{12, 11, 508}, - dictWord{14, 11, 102}, - dictWord{14, 11, 226}, - dictWord{144, 11, 57}, - dictWord{135, 11, 1123}, - dictWord{ - 4, - 11, - 138, - }, - dictWord{7, 11, 1012}, - dictWord{7, 11, 1280}, - dictWord{137, 11, 76}, - dictWord{5, 11, 29}, - dictWord{140, 11, 638}, - dictWord{136, 10, 699}, - dictWord{134, 0, 1326}, - dictWord{132, 0, 104}, - dictWord{135, 11, 735}, - dictWord{132, 10, 739}, - dictWord{134, 0, 1331}, - dictWord{7, 0, 260}, - dictWord{ - 135, - 11, - 260, - }, - dictWord{135, 11, 1063}, - dictWord{7, 0, 45}, - dictWord{9, 0, 542}, - dictWord{9, 0, 566}, - dictWord{10, 0, 728}, - dictWord{137, 10, 869}, - dictWord{ - 4, - 10, - 67, - }, - dictWord{5, 10, 422}, - dictWord{7, 10, 1037}, - dictWord{7, 10, 1289}, - dictWord{7, 10, 1555}, - dictWord{9, 10, 741}, - dictWord{145, 10, 108}, - dictWord{ - 139, - 0, - 263, - }, - dictWord{134, 0, 1516}, - dictWord{14, 0, 146}, - dictWord{15, 0, 42}, - dictWord{16, 0, 23}, - dictWord{17, 0, 86}, - dictWord{146, 0, 17}, - dictWord{ - 138, - 0, - 468, - }, - dictWord{136, 0, 1005}, - dictWord{4, 11, 17}, - dictWord{5, 11, 23}, - dictWord{7, 11, 995}, - dictWord{11, 11, 383}, - dictWord{11, 11, 437}, - dictWord{ - 12, - 11, - 460, - }, - dictWord{140, 11, 532}, - dictWord{7, 0, 87}, - dictWord{142, 0, 288}, - dictWord{138, 10, 96}, - dictWord{135, 11, 626}, - dictWord{144, 10, 26}, - dictWord{ - 7, - 0, - 988, - }, - dictWord{7, 0, 1939}, - dictWord{9, 0, 64}, - dictWord{9, 0, 502}, - dictWord{12, 0, 22}, - dictWord{12, 0, 34}, - dictWord{13, 0, 12}, - dictWord{13, 0, 234}, - dictWord{147, 0, 77}, - dictWord{13, 0, 133}, - dictWord{8, 10, 203}, - dictWord{11, 10, 823}, - dictWord{11, 10, 846}, - dictWord{12, 10, 482}, - dictWord{13, 10, 277}, - dictWord{13, 10, 302}, - dictWord{13, 10, 464}, - dictWord{14, 10, 205}, - dictWord{142, 10, 221}, - dictWord{4, 10, 449}, - dictWord{133, 10, 718}, - dictWord{ - 135, - 0, - 141, - }, - dictWord{6, 0, 1842}, - dictWord{136, 0, 872}, - dictWord{8, 11, 70}, - dictWord{12, 11, 171}, - dictWord{141, 11, 272}, - dictWord{4, 10, 355}, - dictWord{ - 6, - 10, - 311, - }, - dictWord{9, 10, 256}, - dictWord{138, 10, 404}, - dictWord{132, 0, 619}, - dictWord{137, 0, 261}, - dictWord{10, 11, 233}, - dictWord{10, 10, 758}, - dictWord{139, 11, 76}, - dictWord{5, 0, 246}, - dictWord{8, 0, 189}, - dictWord{9, 0, 355}, - dictWord{9, 0, 512}, - dictWord{10, 0, 124}, - dictWord{10, 0, 453}, - dictWord{ - 11, - 0, - 143, - }, - dictWord{11, 0, 416}, - dictWord{11, 0, 859}, - dictWord{141, 0, 341}, - dictWord{134, 11, 442}, - dictWord{133, 10, 827}, - dictWord{5, 10, 64}, - dictWord{ - 140, - 10, - 581, - }, - dictWord{4, 10, 442}, - dictWord{7, 10, 1047}, - dictWord{7, 10, 1352}, - dictWord{135, 10, 1643}, - dictWord{134, 11, 1709}, - dictWord{5, 0, 678}, - dictWord{6, 0, 305}, - dictWord{7, 0, 775}, - dictWord{7, 0, 1065}, - dictWord{133, 10, 977}, - dictWord{11, 11, 69}, - dictWord{12, 11, 105}, - dictWord{12, 11, 117}, - dictWord{13, 11, 213}, - dictWord{14, 11, 13}, - dictWord{14, 11, 62}, - dictWord{14, 11, 177}, - dictWord{14, 11, 421}, - dictWord{15, 11, 19}, - dictWord{146, 11, 141}, - dictWord{137, 11, 309}, - dictWord{5, 0, 35}, - dictWord{7, 0, 862}, - dictWord{7, 0, 1886}, - dictWord{138, 0, 179}, - dictWord{136, 0, 285}, - dictWord{132, 0, 517}, - dictWord{7, 11, 976}, - dictWord{9, 11, 146}, - dictWord{10, 11, 206}, - dictWord{10, 11, 596}, - dictWord{13, 11, 218}, - dictWord{142, 11, 153}, - dictWord{ - 132, - 10, - 254, - }, - dictWord{6, 0, 214}, - dictWord{12, 0, 540}, - dictWord{4, 10, 275}, - dictWord{7, 10, 1219}, - dictWord{140, 10, 376}, - dictWord{8, 0, 667}, - dictWord{ - 11, - 0, - 403, - }, - dictWord{146, 0, 83}, - dictWord{12, 0, 74}, - dictWord{10, 11, 648}, - dictWord{11, 11, 671}, - dictWord{143, 11, 46}, - dictWord{135, 0, 125}, - dictWord{ - 134, - 10, - 1753, - }, - dictWord{133, 0, 761}, - dictWord{6, 0, 912}, - dictWord{4, 11, 518}, - dictWord{6, 10, 369}, - dictWord{6, 10, 502}, - dictWord{7, 10, 1036}, - dictWord{ - 7, - 11, - 1136, - }, - dictWord{8, 10, 348}, - dictWord{9, 10, 452}, - dictWord{10, 10, 26}, - dictWord{11, 10, 224}, - dictWord{11, 10, 387}, - dictWord{11, 10, 772}, - dictWord{12, 10, 95}, - dictWord{12, 10, 629}, - dictWord{13, 10, 195}, - dictWord{13, 10, 207}, - dictWord{13, 10, 241}, - dictWord{14, 10, 260}, - dictWord{14, 10, 270}, - dictWord{143, 10, 140}, - dictWord{10, 0, 131}, - dictWord{140, 0, 72}, - dictWord{132, 10, 269}, - dictWord{5, 10, 480}, - dictWord{7, 10, 532}, - dictWord{ - 7, - 10, - 1197, - }, - dictWord{7, 10, 1358}, - dictWord{8, 10, 291}, - dictWord{11, 10, 349}, - dictWord{142, 10, 396}, - dictWord{8, 11, 689}, - dictWord{137, 11, 863}, - dictWord{ - 8, - 0, - 333, - }, - dictWord{138, 0, 182}, - dictWord{4, 11, 18}, - dictWord{7, 11, 145}, - dictWord{7, 11, 444}, - dictWord{7, 11, 1278}, - dictWord{8, 11, 49}, - dictWord{ - 8, - 11, - 400, - }, - dictWord{9, 11, 71}, - dictWord{9, 11, 250}, - dictWord{10, 11, 459}, - dictWord{12, 11, 160}, - dictWord{144, 11, 24}, - dictWord{14, 11, 35}, - dictWord{ - 142, - 11, - 191, - }, - dictWord{135, 11, 1864}, - dictWord{135, 0, 1338}, - dictWord{148, 10, 15}, - dictWord{14, 0, 94}, - dictWord{15, 0, 65}, - dictWord{16, 0, 4}, - dictWord{ - 16, - 0, - 77, - }, - dictWord{16, 0, 80}, - dictWord{145, 0, 5}, - dictWord{12, 11, 82}, - dictWord{143, 11, 36}, - dictWord{133, 11, 1010}, - dictWord{133, 0, 449}, - dictWord{ - 133, - 0, - 646, - }, - dictWord{7, 0, 86}, - dictWord{8, 0, 103}, - dictWord{135, 10, 657}, - dictWord{7, 0, 2028}, - dictWord{138, 0, 641}, - dictWord{136, 10, 533}, - dictWord{ - 134, - 0, - 1, - }, - dictWord{139, 11, 970}, - dictWord{5, 11, 87}, - dictWord{7, 11, 313}, - dictWord{7, 11, 1103}, - dictWord{10, 11, 112}, - dictWord{10, 11, 582}, - dictWord{ - 11, - 11, - 389, - }, - dictWord{11, 11, 813}, - dictWord{12, 11, 385}, - dictWord{13, 11, 286}, - dictWord{14, 11, 124}, - dictWord{146, 11, 108}, - dictWord{6, 0, 869}, - dictWord{ - 132, - 11, - 267, - }, - dictWord{6, 0, 277}, - dictWord{7, 0, 1274}, - dictWord{7, 0, 1386}, - dictWord{146, 0, 87}, - dictWord{6, 0, 187}, - dictWord{7, 0, 39}, - dictWord{7, 0, 1203}, - dictWord{8, 0, 380}, - dictWord{14, 0, 117}, - dictWord{149, 0, 28}, - dictWord{4, 10, 211}, - dictWord{4, 10, 332}, - dictWord{5, 10, 335}, - dictWord{6, 10, 238}, - dictWord{ - 7, - 10, - 269, - }, - dictWord{7, 10, 811}, - dictWord{7, 10, 1797}, - dictWord{8, 10, 836}, - dictWord{9, 10, 507}, - dictWord{141, 10, 242}, - dictWord{4, 0, 785}, - dictWord{ - 5, - 0, - 368, - }, - dictWord{6, 0, 297}, - dictWord{7, 0, 793}, - dictWord{139, 0, 938}, - dictWord{7, 0, 464}, - dictWord{8, 0, 558}, - dictWord{11, 0, 105}, - dictWord{12, 0, 231}, - dictWord{14, 0, 386}, - dictWord{15, 0, 102}, - dictWord{148, 0, 75}, - dictWord{133, 10, 1009}, - dictWord{8, 0, 877}, - dictWord{140, 0, 731}, - dictWord{ - 139, - 11, - 289, - }, - dictWord{10, 11, 249}, - dictWord{139, 11, 209}, - dictWord{132, 11, 561}, - dictWord{134, 0, 1608}, - dictWord{132, 11, 760}, - dictWord{134, 0, 1429}, - dictWord{9, 11, 154}, - dictWord{140, 11, 485}, - dictWord{5, 10, 228}, - dictWord{6, 10, 203}, - dictWord{7, 10, 156}, - dictWord{8, 10, 347}, - dictWord{ - 137, - 10, - 265, - }, - dictWord{7, 0, 1010}, - dictWord{11, 0, 733}, - dictWord{11, 0, 759}, - dictWord{13, 0, 34}, - dictWord{14, 0, 427}, - dictWord{146, 0, 45}, - dictWord{7, 10, 1131}, - dictWord{135, 10, 1468}, - dictWord{136, 11, 255}, - dictWord{7, 0, 1656}, - dictWord{9, 0, 369}, - dictWord{10, 0, 338}, - dictWord{10, 0, 490}, - dictWord{ - 11, - 0, - 154, - }, - dictWord{11, 0, 545}, - dictWord{11, 0, 775}, - dictWord{13, 0, 77}, - dictWord{141, 0, 274}, - dictWord{133, 11, 621}, - dictWord{134, 0, 1038}, - dictWord{ - 4, - 11, - 368, - }, - dictWord{135, 11, 641}, - dictWord{6, 0, 2010}, - dictWord{8, 0, 979}, - dictWord{8, 0, 985}, - dictWord{10, 0, 951}, - dictWord{138, 0, 1011}, - dictWord{ - 134, - 0, - 1005, - }, - dictWord{19, 0, 121}, - dictWord{5, 10, 291}, - dictWord{5, 10, 318}, - dictWord{7, 10, 765}, - dictWord{9, 10, 389}, - dictWord{140, 10, 548}, - dictWord{ - 5, - 0, - 20, - }, - dictWord{6, 0, 298}, - dictWord{7, 0, 659}, - dictWord{137, 0, 219}, - dictWord{7, 0, 1440}, - dictWord{11, 0, 854}, - dictWord{11, 0, 872}, - dictWord{11, 0, 921}, - dictWord{12, 0, 551}, - dictWord{13, 0, 472}, - dictWord{142, 0, 367}, - dictWord{5, 0, 490}, - dictWord{6, 0, 615}, - dictWord{6, 0, 620}, - dictWord{135, 0, 683}, - dictWord{ - 6, - 0, - 1070, - }, - dictWord{134, 0, 1597}, - dictWord{139, 0, 522}, - dictWord{132, 0, 439}, - dictWord{136, 0, 669}, - dictWord{6, 0, 766}, - dictWord{6, 0, 1143}, - dictWord{ - 6, - 0, - 1245, - }, - dictWord{10, 10, 525}, - dictWord{139, 10, 82}, - dictWord{9, 11, 92}, - dictWord{147, 11, 91}, - dictWord{6, 0, 668}, - dictWord{134, 0, 1218}, - dictWord{ - 6, - 11, - 525, - }, - dictWord{9, 11, 876}, - dictWord{140, 11, 284}, - dictWord{132, 0, 233}, - dictWord{136, 0, 547}, - dictWord{132, 10, 422}, - dictWord{5, 10, 355}, - dictWord{145, 10, 0}, - dictWord{6, 11, 300}, - dictWord{135, 11, 1515}, - dictWord{4, 0, 482}, - dictWord{137, 10, 905}, - dictWord{4, 0, 886}, - dictWord{7, 0, 346}, - dictWord{133, 11, 594}, - dictWord{133, 10, 865}, - dictWord{5, 10, 914}, - dictWord{134, 10, 1625}, - dictWord{135, 0, 334}, - dictWord{5, 0, 795}, - dictWord{ - 6, - 0, - 1741, - }, - dictWord{133, 10, 234}, - dictWord{135, 10, 1383}, - dictWord{6, 11, 1641}, - dictWord{136, 11, 820}, - dictWord{135, 0, 371}, - dictWord{7, 11, 1313}, - dictWord{138, 11, 660}, - dictWord{135, 10, 1312}, - dictWord{135, 0, 622}, - dictWord{7, 0, 625}, - dictWord{135, 0, 1750}, - dictWord{135, 0, 339}, - dictWord{ - 4, - 0, - 203, - }, - dictWord{135, 0, 1936}, - dictWord{15, 0, 29}, - dictWord{16, 0, 38}, - dictWord{15, 11, 29}, - dictWord{144, 11, 38}, - dictWord{5, 0, 338}, - dictWord{ - 135, - 0, - 1256, - }, - dictWord{135, 10, 1493}, - dictWord{10, 0, 130}, - dictWord{6, 10, 421}, - dictWord{7, 10, 61}, - dictWord{7, 10, 1540}, - dictWord{138, 10, 501}, - dictWord{ - 6, - 11, - 389, - }, - dictWord{7, 11, 149}, - dictWord{9, 11, 142}, - dictWord{138, 11, 94}, - dictWord{137, 10, 341}, - dictWord{11, 0, 678}, - dictWord{12, 0, 307}, - dictWord{142, 10, 98}, - dictWord{6, 11, 8}, - dictWord{7, 11, 1881}, - dictWord{136, 11, 91}, - dictWord{135, 0, 2044}, - dictWord{6, 0, 770}, - dictWord{6, 0, 802}, - dictWord{ - 6, - 0, - 812, - }, - dictWord{7, 0, 311}, - dictWord{9, 0, 308}, - dictWord{12, 0, 255}, - dictWord{6, 10, 102}, - dictWord{7, 10, 72}, - dictWord{15, 10, 142}, - dictWord{ - 147, - 10, - 67, - }, - dictWord{151, 10, 30}, - dictWord{135, 10, 823}, - dictWord{135, 0, 1266}, - dictWord{135, 11, 1746}, - dictWord{135, 10, 1870}, - dictWord{4, 0, 400}, - dictWord{5, 0, 267}, - dictWord{135, 0, 232}, - dictWord{7, 11, 24}, - dictWord{11, 11, 542}, - dictWord{139, 11, 852}, - dictWord{135, 11, 1739}, - dictWord{4, 11, 503}, - dictWord{135, 11, 1661}, - dictWord{5, 11, 130}, - dictWord{7, 11, 1314}, - dictWord{9, 11, 610}, - dictWord{10, 11, 718}, - dictWord{11, 11, 601}, - dictWord{ - 11, - 11, - 819, - }, - dictWord{11, 11, 946}, - dictWord{140, 11, 536}, - dictWord{10, 11, 149}, - dictWord{11, 11, 280}, - dictWord{142, 11, 336}, - dictWord{7, 0, 739}, - dictWord{11, 0, 690}, - dictWord{7, 11, 1946}, - dictWord{8, 10, 48}, - dictWord{8, 10, 88}, - dictWord{8, 10, 582}, - dictWord{8, 10, 681}, - dictWord{9, 10, 373}, - dictWord{ - 9, - 10, - 864, - }, - dictWord{11, 10, 157}, - dictWord{11, 10, 843}, - dictWord{148, 10, 27}, - dictWord{134, 0, 990}, - dictWord{4, 10, 88}, - dictWord{5, 10, 137}, - dictWord{ - 5, - 10, - 174, - }, - dictWord{5, 10, 777}, - dictWord{6, 10, 1664}, - dictWord{6, 10, 1725}, - dictWord{7, 10, 77}, - dictWord{7, 10, 426}, - dictWord{7, 10, 1317}, - dictWord{ - 7, - 10, - 1355, - }, - dictWord{8, 10, 126}, - dictWord{8, 10, 563}, - dictWord{9, 10, 523}, - dictWord{9, 10, 750}, - dictWord{10, 10, 310}, - dictWord{10, 10, 836}, - dictWord{ - 11, - 10, - 42, - }, - dictWord{11, 10, 318}, - dictWord{11, 10, 731}, - dictWord{12, 10, 68}, - dictWord{12, 10, 92}, - dictWord{12, 10, 507}, - dictWord{12, 10, 692}, - dictWord{ - 13, - 10, - 81, - }, - dictWord{13, 10, 238}, - dictWord{13, 10, 374}, - dictWord{14, 10, 436}, - dictWord{18, 10, 138}, - dictWord{19, 10, 78}, - dictWord{19, 10, 111}, - dictWord{20, 10, 55}, - dictWord{20, 10, 77}, - dictWord{148, 10, 92}, - dictWord{141, 10, 418}, - dictWord{7, 0, 1831}, - dictWord{132, 10, 938}, - dictWord{6, 0, 776}, - dictWord{134, 0, 915}, - dictWord{138, 10, 351}, - dictWord{5, 11, 348}, - dictWord{6, 11, 522}, - dictWord{6, 10, 1668}, - dictWord{7, 10, 1499}, - dictWord{8, 10, 117}, - dictWord{9, 10, 314}, - dictWord{138, 10, 174}, - dictWord{135, 10, 707}, - dictWord{132, 0, 613}, - dictWord{133, 10, 403}, - dictWord{132, 11, 392}, - dictWord{ - 5, - 11, - 433, - }, - dictWord{9, 11, 633}, - dictWord{139, 11, 629}, - dictWord{133, 0, 763}, - dictWord{132, 0, 878}, - dictWord{132, 0, 977}, - dictWord{132, 0, 100}, - dictWord{6, 0, 463}, - dictWord{4, 10, 44}, - dictWord{5, 10, 311}, - dictWord{7, 10, 639}, - dictWord{7, 10, 762}, - dictWord{7, 10, 1827}, - dictWord{9, 10, 8}, - dictWord{ - 9, - 10, - 462, - }, - dictWord{148, 10, 83}, - dictWord{134, 11, 234}, - dictWord{4, 10, 346}, - dictWord{7, 10, 115}, - dictWord{9, 10, 180}, - dictWord{9, 10, 456}, - dictWord{ - 138, - 10, - 363, - }, - dictWord{5, 0, 362}, - dictWord{5, 0, 443}, - dictWord{6, 0, 318}, - dictWord{7, 0, 1019}, - dictWord{139, 0, 623}, - dictWord{5, 0, 463}, - dictWord{8, 0, 296}, - dictWord{7, 11, 140}, - dictWord{7, 11, 1950}, - dictWord{8, 11, 680}, - dictWord{11, 11, 817}, - dictWord{147, 11, 88}, - dictWord{7, 11, 1222}, - dictWord{ - 138, - 11, - 386, - }, - dictWord{142, 0, 137}, - dictWord{132, 0, 454}, - dictWord{7, 0, 1914}, - dictWord{6, 11, 5}, - dictWord{7, 10, 1051}, - dictWord{9, 10, 545}, - dictWord{ - 11, - 11, - 249, - }, - dictWord{12, 11, 313}, - dictWord{16, 11, 66}, - dictWord{145, 11, 26}, - dictWord{135, 0, 1527}, - dictWord{145, 0, 58}, - dictWord{148, 11, 59}, - dictWord{ - 5, - 0, - 48, - }, - dictWord{5, 0, 404}, - dictWord{6, 0, 557}, - dictWord{7, 0, 458}, - dictWord{8, 0, 597}, - dictWord{10, 0, 455}, - dictWord{10, 0, 606}, - dictWord{11, 0, 49}, - dictWord{ - 11, - 0, - 548, - }, - dictWord{12, 0, 476}, - dictWord{13, 0, 18}, - dictWord{141, 0, 450}, - dictWord{5, 11, 963}, - dictWord{134, 11, 1773}, - dictWord{133, 0, 729}, - dictWord{138, 11, 586}, - dictWord{5, 0, 442}, - dictWord{135, 0, 1984}, - dictWord{134, 0, 449}, - dictWord{144, 0, 40}, - dictWord{4, 0, 853}, - dictWord{7, 11, 180}, - dictWord{8, 11, 509}, - dictWord{136, 11, 792}, - dictWord{6, 10, 185}, - dictWord{7, 10, 1899}, - dictWord{9, 10, 875}, - dictWord{139, 10, 673}, - dictWord{ - 134, - 11, - 524, - }, - dictWord{12, 0, 227}, - dictWord{4, 10, 327}, - dictWord{5, 10, 478}, - dictWord{7, 10, 1332}, - dictWord{136, 10, 753}, - dictWord{6, 0, 1491}, - dictWord{ - 5, - 10, - 1020, - }, - dictWord{133, 10, 1022}, - dictWord{4, 10, 103}, - dictWord{133, 10, 401}, - dictWord{132, 11, 931}, - dictWord{4, 10, 499}, - dictWord{135, 10, 1421}, - dictWord{5, 0, 55}, - dictWord{7, 0, 376}, - dictWord{140, 0, 161}, - dictWord{133, 0, 450}, - dictWord{6, 0, 1174}, - dictWord{134, 0, 1562}, - dictWord{10, 0, 62}, - dictWord{13, 0, 400}, - dictWord{135, 11, 1837}, - dictWord{140, 0, 207}, - dictWord{135, 0, 869}, - dictWord{4, 11, 773}, - dictWord{5, 11, 618}, - dictWord{ - 137, - 11, - 756, - }, - dictWord{132, 10, 96}, - dictWord{4, 0, 213}, - dictWord{7, 0, 223}, - dictWord{8, 0, 80}, - dictWord{135, 10, 968}, - dictWord{4, 11, 90}, - dictWord{5, 11, 337}, - dictWord{5, 11, 545}, - dictWord{7, 11, 754}, - dictWord{9, 11, 186}, - dictWord{10, 11, 72}, - dictWord{10, 11, 782}, - dictWord{11, 11, 513}, - dictWord{11, 11, 577}, - dictWord{11, 11, 610}, - dictWord{11, 11, 889}, - dictWord{11, 11, 961}, - dictWord{12, 11, 354}, - dictWord{12, 11, 362}, - dictWord{12, 11, 461}, - dictWord{ - 12, - 11, - 595, - }, - dictWord{13, 11, 79}, - dictWord{143, 11, 121}, - dictWord{7, 0, 381}, - dictWord{7, 0, 806}, - dictWord{7, 0, 820}, - dictWord{8, 0, 354}, - dictWord{8, 0, 437}, - dictWord{8, 0, 787}, - dictWord{9, 0, 657}, - dictWord{10, 0, 58}, - dictWord{10, 0, 339}, - dictWord{10, 0, 749}, - dictWord{11, 0, 914}, - dictWord{12, 0, 162}, - dictWord{ - 13, - 0, - 75, - }, - dictWord{14, 0, 106}, - dictWord{14, 0, 198}, - dictWord{14, 0, 320}, - dictWord{14, 0, 413}, - dictWord{146, 0, 43}, - dictWord{136, 0, 747}, - dictWord{ - 136, - 0, - 954, - }, - dictWord{134, 0, 1073}, - dictWord{135, 0, 556}, - dictWord{7, 11, 151}, - dictWord{9, 11, 329}, - dictWord{139, 11, 254}, - dictWord{5, 0, 692}, - dictWord{ - 134, - 0, - 1395, - }, - dictWord{6, 10, 563}, - dictWord{137, 10, 224}, - dictWord{134, 0, 191}, - dictWord{132, 0, 804}, - dictWord{9, 11, 187}, - dictWord{10, 11, 36}, - dictWord{17, 11, 44}, - dictWord{146, 11, 64}, - dictWord{7, 11, 165}, - dictWord{7, 11, 919}, - dictWord{136, 11, 517}, - dictWord{4, 11, 506}, - dictWord{5, 11, 295}, - dictWord{7, 11, 1680}, - dictWord{15, 11, 14}, - dictWord{144, 11, 5}, - dictWord{4, 0, 706}, - dictWord{6, 0, 162}, - dictWord{7, 0, 1960}, - dictWord{136, 0, 831}, - dictWord{ - 135, - 11, - 1376, - }, - dictWord{7, 11, 987}, - dictWord{9, 11, 688}, - dictWord{10, 11, 522}, - dictWord{11, 11, 788}, - dictWord{140, 11, 566}, - dictWord{150, 0, 35}, - dictWord{138, 0, 426}, - dictWord{135, 0, 1235}, - dictWord{135, 11, 1741}, - dictWord{7, 11, 389}, - dictWord{7, 11, 700}, - dictWord{7, 11, 940}, - dictWord{ - 8, - 11, - 514, - }, - dictWord{9, 11, 116}, - dictWord{9, 11, 535}, - dictWord{10, 11, 118}, - dictWord{11, 11, 107}, - dictWord{11, 11, 148}, - dictWord{11, 11, 922}, - dictWord{ - 12, - 11, - 254, - }, - dictWord{12, 11, 421}, - dictWord{142, 11, 238}, - dictWord{134, 0, 1234}, - dictWord{132, 11, 743}, - dictWord{4, 10, 910}, - dictWord{5, 10, 832}, - dictWord{135, 11, 1335}, - dictWord{141, 0, 96}, - dictWord{135, 11, 185}, - dictWord{146, 0, 149}, - dictWord{4, 0, 204}, - dictWord{137, 0, 902}, - dictWord{ - 4, - 11, - 784, - }, - dictWord{133, 11, 745}, - dictWord{136, 0, 833}, - dictWord{136, 0, 949}, - dictWord{7, 0, 366}, - dictWord{9, 0, 287}, - dictWord{12, 0, 199}, - dictWord{ - 12, - 0, - 556, - }, - dictWord{12, 0, 577}, - dictWord{5, 11, 81}, - dictWord{7, 11, 146}, - dictWord{7, 11, 1342}, - dictWord{7, 11, 1446}, - dictWord{8, 11, 53}, - dictWord{8, 11, 561}, - dictWord{8, 11, 694}, - dictWord{8, 11, 754}, - dictWord{9, 11, 97}, - dictWord{9, 11, 115}, - dictWord{9, 11, 894}, - dictWord{10, 11, 462}, - dictWord{10, 11, 813}, - dictWord{11, 11, 230}, - dictWord{11, 11, 657}, - dictWord{11, 11, 699}, - dictWord{11, 11, 748}, - dictWord{12, 11, 119}, - dictWord{12, 11, 200}, - dictWord{ - 12, - 11, - 283, - }, - dictWord{14, 11, 273}, - dictWord{145, 11, 15}, - dictWord{5, 11, 408}, - dictWord{137, 11, 747}, - dictWord{9, 11, 498}, - dictWord{140, 11, 181}, - dictWord{ - 6, - 0, - 2020, - }, - dictWord{136, 0, 992}, - dictWord{5, 0, 356}, - dictWord{135, 0, 224}, - dictWord{134, 0, 784}, - dictWord{7, 0, 630}, - dictWord{9, 0, 567}, - dictWord{ - 11, - 0, - 150, - }, - dictWord{11, 0, 444}, - dictWord{13, 0, 119}, - dictWord{8, 10, 528}, - dictWord{137, 10, 348}, - dictWord{134, 0, 539}, - dictWord{4, 10, 20}, - dictWord{ - 133, - 10, - 616, - }, - dictWord{142, 0, 27}, - dictWord{7, 11, 30}, - dictWord{8, 11, 86}, - dictWord{8, 11, 315}, - dictWord{8, 11, 700}, - dictWord{9, 11, 576}, - dictWord{9, 11, 858}, - dictWord{11, 11, 310}, - dictWord{11, 11, 888}, - dictWord{11, 11, 904}, - dictWord{12, 11, 361}, - dictWord{141, 11, 248}, - dictWord{138, 11, 839}, - dictWord{ - 134, - 0, - 755, - }, - dictWord{134, 0, 1063}, - dictWord{7, 10, 1091}, - dictWord{135, 10, 1765}, - dictWord{134, 11, 428}, - dictWord{7, 11, 524}, - dictWord{8, 11, 169}, - dictWord{8, 11, 234}, - dictWord{9, 11, 480}, - dictWord{138, 11, 646}, - dictWord{139, 0, 814}, - dictWord{7, 11, 1462}, - dictWord{139, 11, 659}, - dictWord{ - 4, - 10, - 26, - }, - dictWord{5, 10, 429}, - dictWord{6, 10, 245}, - dictWord{7, 10, 704}, - dictWord{7, 10, 1379}, - dictWord{135, 10, 1474}, - dictWord{7, 11, 1205}, - dictWord{ - 138, - 11, - 637, - }, - dictWord{139, 11, 803}, - dictWord{132, 10, 621}, - dictWord{136, 0, 987}, - dictWord{4, 11, 266}, - dictWord{8, 11, 4}, - dictWord{9, 11, 39}, - dictWord{ - 10, - 11, - 166, - }, - dictWord{11, 11, 918}, - dictWord{12, 11, 635}, - dictWord{20, 11, 10}, - dictWord{22, 11, 27}, - dictWord{150, 11, 43}, - dictWord{4, 0, 235}, - dictWord{ - 135, - 0, - 255, - }, - dictWord{4, 0, 194}, - dictWord{5, 0, 584}, - dictWord{6, 0, 384}, - dictWord{7, 0, 583}, - dictWord{10, 0, 761}, - dictWord{11, 0, 760}, - dictWord{139, 0, 851}, - dictWord{133, 10, 542}, - dictWord{134, 0, 1086}, - dictWord{133, 10, 868}, - dictWord{8, 0, 1016}, - dictWord{136, 0, 1018}, - dictWord{7, 0, 1396}, - dictWord{ - 7, - 11, - 1396, - }, - dictWord{136, 10, 433}, - dictWord{135, 10, 1495}, - dictWord{138, 10, 215}, - dictWord{141, 10, 124}, - dictWord{7, 11, 157}, - dictWord{ - 8, - 11, - 279, - }, - dictWord{9, 11, 759}, - dictWord{16, 11, 31}, - dictWord{16, 11, 39}, - dictWord{16, 11, 75}, - dictWord{18, 11, 24}, - dictWord{20, 11, 42}, - dictWord{152, 11, 1}, - dictWord{5, 0, 562}, - dictWord{134, 11, 604}, - dictWord{134, 0, 913}, - dictWord{5, 0, 191}, - dictWord{137, 0, 271}, - dictWord{4, 0, 470}, - dictWord{6, 0, 153}, - dictWord{7, 0, 1503}, - dictWord{7, 0, 1923}, - dictWord{10, 0, 701}, - dictWord{11, 0, 132}, - dictWord{11, 0, 227}, - dictWord{11, 0, 320}, - dictWord{11, 0, 436}, - dictWord{ - 11, - 0, - 525, - }, - dictWord{11, 0, 855}, - dictWord{11, 0, 873}, - dictWord{12, 0, 41}, - dictWord{12, 0, 286}, - dictWord{13, 0, 103}, - dictWord{13, 0, 284}, - dictWord{ - 14, - 0, - 255, - }, - dictWord{14, 0, 262}, - dictWord{15, 0, 117}, - dictWord{143, 0, 127}, - dictWord{7, 0, 475}, - dictWord{12, 0, 45}, - dictWord{147, 10, 112}, - dictWord{ - 132, - 11, - 567, - }, - dictWord{137, 11, 859}, - dictWord{6, 0, 713}, - dictWord{6, 0, 969}, - dictWord{6, 0, 1290}, - dictWord{134, 0, 1551}, - dictWord{133, 0, 327}, - dictWord{ - 6, - 0, - 552, - }, - dictWord{6, 0, 1292}, - dictWord{7, 0, 1754}, - dictWord{137, 0, 604}, - dictWord{4, 0, 223}, - dictWord{6, 0, 359}, - dictWord{11, 0, 3}, - dictWord{13, 0, 108}, - dictWord{14, 0, 89}, - dictWord{16, 0, 22}, - dictWord{5, 11, 762}, - dictWord{7, 11, 1880}, - dictWord{9, 11, 680}, - dictWord{139, 11, 798}, - dictWord{5, 0, 80}, - dictWord{ - 6, - 0, - 405, - }, - dictWord{7, 0, 403}, - dictWord{7, 0, 1502}, - dictWord{8, 0, 456}, - dictWord{9, 0, 487}, - dictWord{9, 0, 853}, - dictWord{9, 0, 889}, - dictWord{10, 0, 309}, - dictWord{ - 11, - 0, - 721, - }, - dictWord{11, 0, 994}, - dictWord{12, 0, 430}, - dictWord{141, 0, 165}, - dictWord{133, 11, 298}, - dictWord{132, 10, 647}, - dictWord{134, 0, 2016}, - dictWord{18, 10, 10}, - dictWord{146, 11, 10}, - dictWord{4, 0, 453}, - dictWord{5, 0, 887}, - dictWord{6, 0, 535}, - dictWord{8, 0, 6}, - dictWord{8, 0, 543}, - dictWord{ - 136, - 0, - 826, - }, - dictWord{136, 0, 975}, - dictWord{10, 0, 961}, - dictWord{138, 0, 962}, - dictWord{138, 10, 220}, - dictWord{6, 0, 1891}, - dictWord{6, 0, 1893}, - dictWord{ - 9, - 0, - 916, - }, - dictWord{9, 0, 965}, - dictWord{9, 0, 972}, - dictWord{12, 0, 801}, - dictWord{12, 0, 859}, - dictWord{12, 0, 883}, - dictWord{15, 0, 226}, - dictWord{149, 0, 51}, - dictWord{132, 10, 109}, - dictWord{135, 11, 267}, - dictWord{7, 11, 92}, - dictWord{7, 11, 182}, - dictWord{8, 11, 453}, - dictWord{9, 11, 204}, - dictWord{11, 11, 950}, - dictWord{12, 11, 94}, - dictWord{12, 11, 644}, - dictWord{16, 11, 20}, - dictWord{16, 11, 70}, - dictWord{16, 11, 90}, - dictWord{147, 11, 55}, - dictWord{ - 134, - 10, - 1746, - }, - dictWord{6, 11, 71}, - dictWord{7, 11, 845}, - dictWord{7, 11, 1308}, - dictWord{8, 11, 160}, - dictWord{137, 11, 318}, - dictWord{5, 0, 101}, - dictWord{6, 0, 88}, - dictWord{7, 0, 263}, - dictWord{7, 0, 628}, - dictWord{7, 0, 1677}, - dictWord{8, 0, 349}, - dictWord{9, 0, 100}, - dictWord{10, 0, 677}, - dictWord{14, 0, 169}, - dictWord{ - 14, - 0, - 302, - }, - dictWord{14, 0, 313}, - dictWord{15, 0, 48}, - dictWord{15, 0, 84}, - dictWord{7, 11, 237}, - dictWord{8, 11, 664}, - dictWord{9, 11, 42}, - dictWord{9, 11, 266}, - dictWord{9, 11, 380}, - dictWord{9, 11, 645}, - dictWord{10, 11, 177}, - dictWord{138, 11, 276}, - dictWord{138, 11, 69}, - dictWord{4, 0, 310}, - dictWord{7, 0, 708}, - dictWord{7, 0, 996}, - dictWord{9, 0, 795}, - dictWord{10, 0, 390}, - dictWord{10, 0, 733}, - dictWord{11, 0, 451}, - dictWord{12, 0, 249}, - dictWord{14, 0, 115}, - dictWord{ - 14, - 0, - 286, - }, - dictWord{143, 0, 100}, - dictWord{5, 0, 587}, - dictWord{4, 10, 40}, - dictWord{10, 10, 67}, - dictWord{11, 10, 117}, - dictWord{11, 10, 768}, - dictWord{ - 139, - 10, - 935, - }, - dictWord{6, 0, 1942}, - dictWord{7, 0, 512}, - dictWord{136, 0, 983}, - dictWord{7, 10, 992}, - dictWord{8, 10, 301}, - dictWord{9, 10, 722}, - dictWord{12, 10, 63}, - dictWord{13, 10, 29}, - dictWord{14, 10, 161}, - dictWord{143, 10, 18}, - dictWord{136, 11, 76}, - dictWord{139, 10, 923}, - dictWord{134, 0, 645}, - dictWord{ - 134, - 0, - 851, - }, - dictWord{4, 0, 498}, - dictWord{132, 11, 293}, - dictWord{7, 0, 217}, - dictWord{8, 0, 140}, - dictWord{10, 0, 610}, - dictWord{14, 11, 352}, - dictWord{ - 17, - 11, - 53, - }, - dictWord{18, 11, 146}, - dictWord{18, 11, 152}, - dictWord{19, 11, 11}, - dictWord{150, 11, 54}, - dictWord{134, 0, 1448}, - dictWord{138, 11, 841}, - dictWord{133, 0, 905}, - dictWord{4, 11, 605}, - dictWord{7, 11, 518}, - dictWord{7, 11, 1282}, - dictWord{7, 11, 1918}, - dictWord{10, 11, 180}, - dictWord{139, 11, 218}, - dictWord{139, 11, 917}, - dictWord{135, 10, 825}, - dictWord{140, 10, 328}, - dictWord{4, 0, 456}, - dictWord{7, 0, 105}, - dictWord{7, 0, 358}, - dictWord{7, 0, 1637}, - dictWord{8, 0, 643}, - dictWord{139, 0, 483}, - dictWord{134, 0, 792}, - dictWord{6, 11, 96}, - dictWord{135, 11, 1426}, - dictWord{137, 11, 691}, - dictWord{ - 4, - 11, - 651, - }, - dictWord{133, 11, 289}, - dictWord{7, 11, 688}, - dictWord{8, 11, 35}, - dictWord{9, 11, 511}, - dictWord{10, 11, 767}, - dictWord{147, 11, 118}, - dictWord{ - 150, - 0, - 56, - }, - dictWord{5, 0, 243}, - dictWord{5, 0, 535}, - dictWord{6, 10, 204}, - dictWord{10, 10, 320}, - dictWord{10, 10, 583}, - dictWord{13, 10, 502}, - dictWord{ - 14, - 10, - 72, - }, - dictWord{14, 10, 274}, - dictWord{14, 10, 312}, - dictWord{14, 10, 344}, - dictWord{15, 10, 159}, - dictWord{16, 10, 62}, - dictWord{16, 10, 69}, - dictWord{ - 17, - 10, - 30, - }, - dictWord{18, 10, 42}, - dictWord{18, 10, 53}, - dictWord{18, 10, 84}, - dictWord{18, 10, 140}, - dictWord{19, 10, 68}, - dictWord{19, 10, 85}, - dictWord{20, 10, 5}, - dictWord{20, 10, 45}, - dictWord{20, 10, 101}, - dictWord{22, 10, 7}, - dictWord{150, 10, 20}, - dictWord{4, 10, 558}, - dictWord{6, 10, 390}, - dictWord{7, 10, 162}, - dictWord{7, 10, 689}, - dictWord{9, 10, 360}, - dictWord{138, 10, 653}, - dictWord{146, 11, 23}, - dictWord{135, 0, 1748}, - dictWord{5, 10, 856}, - dictWord{ - 6, - 10, - 1672, - }, - dictWord{6, 10, 1757}, - dictWord{134, 10, 1781}, - dictWord{5, 0, 539}, - dictWord{5, 0, 754}, - dictWord{6, 0, 876}, - dictWord{132, 11, 704}, - dictWord{ - 135, - 11, - 1078, - }, - dictWord{5, 10, 92}, - dictWord{10, 10, 736}, - dictWord{140, 10, 102}, - dictWord{17, 0, 91}, - dictWord{5, 10, 590}, - dictWord{137, 10, 213}, - dictWord{134, 0, 1565}, - dictWord{6, 0, 91}, - dictWord{135, 0, 435}, - dictWord{4, 0, 939}, - dictWord{140, 0, 792}, - dictWord{134, 0, 1399}, - dictWord{4, 0, 16}, - dictWord{ - 5, - 0, - 316, - }, - dictWord{5, 0, 842}, - dictWord{6, 0, 370}, - dictWord{6, 0, 1778}, - dictWord{8, 0, 166}, - dictWord{11, 0, 812}, - dictWord{12, 0, 206}, - dictWord{12, 0, 351}, - dictWord{14, 0, 418}, - dictWord{16, 0, 15}, - dictWord{16, 0, 34}, - dictWord{18, 0, 3}, - dictWord{19, 0, 3}, - dictWord{19, 0, 7}, - dictWord{20, 0, 4}, - dictWord{21, 0, 21}, - dictWord{ - 4, - 11, - 720, - }, - dictWord{133, 11, 306}, - dictWord{144, 0, 95}, - dictWord{133, 11, 431}, - dictWord{132, 11, 234}, - dictWord{135, 0, 551}, - dictWord{4, 0, 999}, - dictWord{6, 0, 1966}, - dictWord{134, 0, 2042}, - dictWord{7, 0, 619}, - dictWord{10, 0, 547}, - dictWord{11, 0, 122}, - dictWord{12, 0, 601}, - dictWord{15, 0, 7}, - dictWord{148, 0, 20}, - dictWord{5, 11, 464}, - dictWord{6, 11, 236}, - dictWord{7, 11, 276}, - dictWord{7, 11, 696}, - dictWord{7, 11, 914}, - dictWord{7, 11, 1108}, - dictWord{ - 7, - 11, - 1448, - }, - dictWord{9, 11, 15}, - dictWord{9, 11, 564}, - dictWord{10, 11, 14}, - dictWord{12, 11, 565}, - dictWord{13, 11, 449}, - dictWord{14, 11, 53}, - dictWord{ - 15, - 11, - 13, - }, - dictWord{16, 11, 64}, - dictWord{145, 11, 41}, - dictWord{6, 0, 884}, - dictWord{6, 0, 1019}, - dictWord{134, 0, 1150}, - dictWord{6, 11, 1767}, - dictWord{ - 12, - 11, - 194, - }, - dictWord{145, 11, 107}, - dictWord{136, 10, 503}, - dictWord{133, 11, 840}, - dictWord{7, 0, 671}, - dictWord{134, 10, 466}, - dictWord{132, 0, 888}, - dictWord{4, 0, 149}, - dictWord{138, 0, 368}, - dictWord{4, 0, 154}, - dictWord{7, 0, 1134}, - dictWord{136, 0, 105}, - dictWord{135, 0, 983}, - dictWord{9, 11, 642}, - dictWord{11, 11, 236}, - dictWord{142, 11, 193}, - dictWord{4, 0, 31}, - dictWord{6, 0, 429}, - dictWord{7, 0, 962}, - dictWord{9, 0, 458}, - dictWord{139, 0, 691}, - dictWord{ - 6, - 0, - 643, - }, - dictWord{134, 0, 1102}, - dictWord{132, 0, 312}, - dictWord{4, 11, 68}, - dictWord{5, 11, 634}, - dictWord{6, 11, 386}, - dictWord{7, 11, 794}, - dictWord{ - 8, - 11, - 273, - }, - dictWord{9, 11, 563}, - dictWord{10, 11, 105}, - dictWord{10, 11, 171}, - dictWord{11, 11, 94}, - dictWord{139, 11, 354}, - dictWord{133, 0, 740}, - dictWord{ - 135, - 0, - 1642, - }, - dictWord{4, 11, 95}, - dictWord{7, 11, 416}, - dictWord{8, 11, 211}, - dictWord{139, 11, 830}, - dictWord{132, 0, 236}, - dictWord{138, 10, 241}, - dictWord{7, 11, 731}, - dictWord{13, 11, 20}, - dictWord{143, 11, 11}, - dictWord{5, 0, 836}, - dictWord{5, 0, 857}, - dictWord{6, 0, 1680}, - dictWord{135, 0, 59}, - dictWord{ - 10, - 0, - 68, - }, - dictWord{11, 0, 494}, - dictWord{152, 11, 6}, - dictWord{4, 0, 81}, - dictWord{139, 0, 867}, - dictWord{135, 0, 795}, - dictWord{133, 11, 689}, - dictWord{ - 4, - 0, - 1001, - }, - dictWord{5, 0, 282}, - dictWord{6, 0, 1932}, - dictWord{6, 0, 1977}, - dictWord{6, 0, 1987}, - dictWord{6, 0, 1992}, - dictWord{8, 0, 650}, - dictWord{8, 0, 919}, - dictWord{8, 0, 920}, - dictWord{8, 0, 923}, - dictWord{8, 0, 926}, - dictWord{8, 0, 927}, - dictWord{8, 0, 931}, - dictWord{8, 0, 939}, - dictWord{8, 0, 947}, - dictWord{8, 0, 956}, - dictWord{8, 0, 997}, - dictWord{9, 0, 907}, - dictWord{10, 0, 950}, - dictWord{10, 0, 953}, - dictWord{10, 0, 954}, - dictWord{10, 0, 956}, - dictWord{10, 0, 958}, - dictWord{ - 10, - 0, - 959, - }, - dictWord{10, 0, 964}, - dictWord{10, 0, 970}, - dictWord{10, 0, 972}, - dictWord{10, 0, 973}, - dictWord{10, 0, 975}, - dictWord{10, 0, 976}, - dictWord{ - 10, - 0, - 980, - }, - dictWord{10, 0, 981}, - dictWord{10, 0, 984}, - dictWord{10, 0, 988}, - dictWord{10, 0, 990}, - dictWord{10, 0, 995}, - dictWord{10, 0, 999}, - dictWord{ - 10, - 0, - 1002, - }, - dictWord{10, 0, 1003}, - dictWord{10, 0, 1005}, - dictWord{10, 0, 1006}, - dictWord{10, 0, 1008}, - dictWord{10, 0, 1009}, - dictWord{10, 0, 1012}, - dictWord{10, 0, 1014}, - dictWord{10, 0, 1015}, - dictWord{10, 0, 1019}, - dictWord{10, 0, 1020}, - dictWord{10, 0, 1022}, - dictWord{12, 0, 959}, - dictWord{12, 0, 961}, - dictWord{12, 0, 962}, - dictWord{12, 0, 963}, - dictWord{12, 0, 964}, - dictWord{12, 0, 965}, - dictWord{12, 0, 967}, - dictWord{12, 0, 968}, - dictWord{12, 0, 969}, - dictWord{12, 0, 970}, - dictWord{12, 0, 971}, - dictWord{12, 0, 972}, - dictWord{12, 0, 973}, - dictWord{12, 0, 974}, - dictWord{12, 0, 975}, - dictWord{12, 0, 976}, - dictWord{ - 12, - 0, - 977, - }, - dictWord{12, 0, 979}, - dictWord{12, 0, 981}, - dictWord{12, 0, 982}, - dictWord{12, 0, 983}, - dictWord{12, 0, 984}, - dictWord{12, 0, 985}, - dictWord{ - 12, - 0, - 986, - }, - dictWord{12, 0, 987}, - dictWord{12, 0, 989}, - dictWord{12, 0, 990}, - dictWord{12, 0, 992}, - dictWord{12, 0, 993}, - dictWord{12, 0, 995}, - dictWord{12, 0, 998}, - dictWord{12, 0, 999}, - dictWord{12, 0, 1000}, - dictWord{12, 0, 1001}, - dictWord{12, 0, 1002}, - dictWord{12, 0, 1004}, - dictWord{12, 0, 1005}, - dictWord{ - 12, - 0, - 1006, - }, - dictWord{12, 0, 1007}, - dictWord{12, 0, 1008}, - dictWord{12, 0, 1009}, - dictWord{12, 0, 1010}, - dictWord{12, 0, 1011}, - dictWord{12, 0, 1012}, - dictWord{12, 0, 1014}, - dictWord{12, 0, 1015}, - dictWord{12, 0, 1016}, - dictWord{12, 0, 1017}, - dictWord{12, 0, 1018}, - dictWord{12, 0, 1019}, - dictWord{ - 12, - 0, - 1022, - }, - dictWord{12, 0, 1023}, - dictWord{14, 0, 475}, - dictWord{14, 0, 477}, - dictWord{14, 0, 478}, - dictWord{14, 0, 479}, - dictWord{14, 0, 480}, - dictWord{ - 14, - 0, - 482, - }, - dictWord{14, 0, 483}, - dictWord{14, 0, 484}, - dictWord{14, 0, 485}, - dictWord{14, 0, 486}, - dictWord{14, 0, 487}, - dictWord{14, 0, 488}, - dictWord{14, 0, 489}, - dictWord{14, 0, 490}, - dictWord{14, 0, 491}, - dictWord{14, 0, 492}, - dictWord{14, 0, 493}, - dictWord{14, 0, 494}, - dictWord{14, 0, 495}, - dictWord{14, 0, 496}, - dictWord{14, 0, 497}, - dictWord{14, 0, 498}, - dictWord{14, 0, 499}, - dictWord{14, 0, 500}, - dictWord{14, 0, 501}, - dictWord{14, 0, 502}, - dictWord{14, 0, 503}, - dictWord{ - 14, - 0, - 504, - }, - dictWord{14, 0, 506}, - dictWord{14, 0, 507}, - dictWord{14, 0, 508}, - dictWord{14, 0, 509}, - dictWord{14, 0, 510}, - dictWord{14, 0, 511}, - dictWord{ - 16, - 0, - 113, - }, - dictWord{16, 0, 114}, - dictWord{16, 0, 115}, - dictWord{16, 0, 117}, - dictWord{16, 0, 118}, - dictWord{16, 0, 119}, - dictWord{16, 0, 121}, - dictWord{16, 0, 122}, - dictWord{16, 0, 123}, - dictWord{16, 0, 124}, - dictWord{16, 0, 125}, - dictWord{16, 0, 126}, - dictWord{16, 0, 127}, - dictWord{18, 0, 242}, - dictWord{18, 0, 243}, - dictWord{18, 0, 244}, - dictWord{18, 0, 245}, - dictWord{18, 0, 248}, - dictWord{18, 0, 249}, - dictWord{18, 0, 250}, - dictWord{18, 0, 251}, - dictWord{18, 0, 252}, - dictWord{ - 18, - 0, - 253, - }, - dictWord{18, 0, 254}, - dictWord{18, 0, 255}, - dictWord{20, 0, 125}, - dictWord{20, 0, 126}, - dictWord{148, 0, 127}, - dictWord{7, 11, 1717}, - dictWord{ - 7, - 11, - 1769, - }, - dictWord{138, 11, 546}, - dictWord{7, 11, 1127}, - dictWord{7, 11, 1572}, - dictWord{10, 11, 297}, - dictWord{10, 11, 422}, - dictWord{11, 11, 764}, - dictWord{11, 11, 810}, - dictWord{12, 11, 264}, - dictWord{13, 11, 102}, - dictWord{13, 11, 300}, - dictWord{13, 11, 484}, - dictWord{14, 11, 147}, - dictWord{ - 14, - 11, - 229, - }, - dictWord{17, 11, 71}, - dictWord{18, 11, 118}, - dictWord{147, 11, 120}, - dictWord{6, 0, 1148}, - dictWord{134, 0, 1586}, - dictWord{132, 0, 775}, - dictWord{135, 10, 954}, - dictWord{133, 11, 864}, - dictWord{133, 11, 928}, - dictWord{138, 11, 189}, - dictWord{135, 10, 1958}, - dictWord{6, 10, 549}, - dictWord{ - 8, - 10, - 34, - }, - dictWord{8, 10, 283}, - dictWord{9, 10, 165}, - dictWord{138, 10, 475}, - dictWord{5, 10, 652}, - dictWord{5, 10, 701}, - dictWord{135, 10, 449}, - dictWord{135, 11, 695}, - dictWord{4, 10, 655}, - dictWord{7, 10, 850}, - dictWord{17, 10, 75}, - dictWord{146, 10, 137}, - dictWord{140, 11, 682}, - dictWord{ - 133, - 11, - 523, - }, - dictWord{8, 0, 970}, - dictWord{136, 10, 670}, - dictWord{136, 11, 555}, - dictWord{7, 11, 76}, - dictWord{8, 11, 44}, - dictWord{9, 11, 884}, - dictWord{ - 10, - 11, - 580, - }, - dictWord{11, 11, 399}, - dictWord{11, 11, 894}, - dictWord{15, 11, 122}, - dictWord{18, 11, 144}, - dictWord{147, 11, 61}, - dictWord{6, 10, 159}, - dictWord{ - 6, - 10, - 364, - }, - dictWord{7, 10, 516}, - dictWord{7, 10, 1439}, - dictWord{137, 10, 518}, - dictWord{4, 0, 71}, - dictWord{5, 0, 376}, - dictWord{7, 0, 119}, - dictWord{ - 138, - 0, - 665, - }, - dictWord{141, 10, 151}, - dictWord{11, 0, 827}, - dictWord{14, 0, 34}, - dictWord{143, 0, 148}, - dictWord{133, 11, 518}, - dictWord{4, 0, 479}, - dictWord{ - 135, - 11, - 1787, - }, - dictWord{135, 11, 1852}, - dictWord{135, 10, 993}, - dictWord{7, 0, 607}, - dictWord{136, 0, 99}, - dictWord{134, 0, 1960}, - dictWord{132, 0, 793}, - dictWord{4, 0, 41}, - dictWord{5, 0, 74}, - dictWord{7, 0, 1627}, - dictWord{11, 0, 871}, - dictWord{140, 0, 619}, - dictWord{7, 0, 94}, - dictWord{11, 0, 329}, - dictWord{ - 11, - 0, - 965, - }, - dictWord{12, 0, 241}, - dictWord{14, 0, 354}, - dictWord{15, 0, 22}, - dictWord{148, 0, 63}, - dictWord{7, 10, 501}, - dictWord{9, 10, 111}, - dictWord{10, 10, 141}, - dictWord{11, 10, 332}, - dictWord{13, 10, 43}, - dictWord{13, 10, 429}, - dictWord{14, 10, 130}, - dictWord{14, 10, 415}, - dictWord{145, 10, 102}, - dictWord{ - 9, - 0, - 209, - }, - dictWord{137, 0, 300}, - dictWord{134, 0, 1497}, - dictWord{138, 11, 255}, - dictWord{4, 11, 934}, - dictWord{5, 11, 138}, - dictWord{136, 11, 610}, - dictWord{133, 0, 98}, - dictWord{6, 0, 1316}, - dictWord{10, 11, 804}, - dictWord{138, 11, 832}, - dictWord{8, 11, 96}, - dictWord{9, 11, 36}, - dictWord{10, 11, 607}, - dictWord{11, 11, 423}, - dictWord{11, 11, 442}, - dictWord{12, 11, 309}, - dictWord{14, 11, 199}, - dictWord{15, 11, 90}, - dictWord{145, 11, 110}, - dictWord{ - 132, - 0, - 463, - }, - dictWord{5, 10, 149}, - dictWord{136, 10, 233}, - dictWord{133, 10, 935}, - dictWord{4, 11, 652}, - dictWord{8, 11, 320}, - dictWord{9, 11, 13}, - dictWord{ - 9, - 11, - 398, - }, - dictWord{9, 11, 727}, - dictWord{10, 11, 75}, - dictWord{10, 11, 184}, - dictWord{10, 11, 230}, - dictWord{10, 11, 564}, - dictWord{10, 11, 569}, - dictWord{ - 11, - 11, - 973, - }, - dictWord{12, 11, 70}, - dictWord{12, 11, 189}, - dictWord{13, 11, 57}, - dictWord{13, 11, 257}, - dictWord{22, 11, 6}, - dictWord{150, 11, 16}, - dictWord{ - 142, - 0, - 291, - }, - dictWord{12, 10, 582}, - dictWord{146, 10, 131}, - dictWord{136, 10, 801}, - dictWord{133, 0, 984}, - dictWord{145, 11, 116}, - dictWord{4, 11, 692}, - dictWord{133, 11, 321}, - dictWord{4, 0, 182}, - dictWord{6, 0, 205}, - dictWord{135, 0, 220}, - dictWord{4, 0, 42}, - dictWord{9, 0, 205}, - dictWord{9, 0, 786}, - dictWord{ - 138, - 0, - 659, - }, - dictWord{6, 0, 801}, - dictWord{11, 11, 130}, - dictWord{140, 11, 609}, - dictWord{132, 0, 635}, - dictWord{5, 11, 345}, - dictWord{135, 11, 1016}, - dictWord{139, 0, 533}, - dictWord{132, 0, 371}, - dictWord{4, 0, 272}, - dictWord{135, 0, 836}, - dictWord{6, 0, 1282}, - dictWord{135, 11, 1100}, - dictWord{5, 0, 825}, - dictWord{134, 0, 1640}, - dictWord{135, 11, 1325}, - dictWord{133, 11, 673}, - dictWord{4, 11, 287}, - dictWord{133, 11, 1018}, - dictWord{135, 0, 357}, - dictWord{ - 6, - 0, - 467, - }, - dictWord{137, 0, 879}, - dictWord{7, 0, 317}, - dictWord{135, 0, 569}, - dictWord{6, 0, 924}, - dictWord{134, 0, 1588}, - dictWord{5, 11, 34}, - dictWord{ - 5, - 10, - 406, - }, - dictWord{10, 11, 724}, - dictWord{12, 11, 444}, - dictWord{13, 11, 354}, - dictWord{18, 11, 32}, - dictWord{23, 11, 24}, - dictWord{23, 11, 31}, - dictWord{ - 152, - 11, - 5, - }, - dictWord{6, 0, 1795}, - dictWord{6, 0, 1835}, - dictWord{6, 0, 1836}, - dictWord{6, 0, 1856}, - dictWord{8, 0, 844}, - dictWord{8, 0, 849}, - dictWord{8, 0, 854}, - dictWord{8, 0, 870}, - dictWord{8, 0, 887}, - dictWord{10, 0, 852}, - dictWord{138, 0, 942}, - dictWord{6, 10, 69}, - dictWord{135, 10, 117}, - dictWord{137, 0, 307}, - dictWord{ - 4, - 0, - 944, - }, - dictWord{6, 0, 1799}, - dictWord{6, 0, 1825}, - dictWord{10, 0, 848}, - dictWord{10, 0, 875}, - dictWord{10, 0, 895}, - dictWord{10, 0, 899}, - dictWord{ - 10, - 0, - 902, - }, - dictWord{140, 0, 773}, - dictWord{11, 0, 43}, - dictWord{13, 0, 72}, - dictWord{141, 0, 142}, - dictWord{135, 10, 1830}, - dictWord{134, 11, 382}, - dictWord{ - 4, - 10, - 432, - }, - dictWord{135, 10, 824}, - dictWord{132, 11, 329}, - dictWord{7, 0, 1820}, - dictWord{139, 11, 124}, - dictWord{133, 10, 826}, - dictWord{ - 133, - 0, - 525, - }, - dictWord{132, 11, 906}, - dictWord{7, 11, 1940}, - dictWord{136, 11, 366}, - dictWord{138, 11, 10}, - dictWord{4, 11, 123}, - dictWord{4, 11, 649}, - dictWord{ - 5, - 11, - 605, - }, - dictWord{7, 11, 1509}, - dictWord{136, 11, 36}, - dictWord{6, 0, 110}, - dictWord{135, 0, 1681}, - dictWord{133, 0, 493}, - dictWord{133, 11, 767}, - dictWord{4, 0, 174}, - dictWord{135, 0, 911}, - dictWord{138, 11, 786}, - dictWord{8, 0, 417}, - dictWord{137, 0, 782}, - dictWord{133, 10, 1000}, - dictWord{7, 0, 733}, - dictWord{137, 0, 583}, - dictWord{4, 10, 297}, - dictWord{6, 10, 529}, - dictWord{7, 10, 152}, - dictWord{7, 10, 713}, - dictWord{7, 10, 1845}, - dictWord{8, 10, 710}, - dictWord{8, 10, 717}, - dictWord{12, 10, 639}, - dictWord{140, 10, 685}, - dictWord{4, 0, 32}, - dictWord{5, 0, 215}, - dictWord{6, 0, 269}, - dictWord{7, 0, 1782}, - dictWord{ - 7, - 0, - 1892, - }, - dictWord{10, 0, 16}, - dictWord{11, 0, 822}, - dictWord{11, 0, 954}, - dictWord{141, 0, 481}, - dictWord{4, 11, 273}, - dictWord{5, 11, 658}, - dictWord{ - 133, - 11, - 995, - }, - dictWord{136, 0, 477}, - dictWord{134, 11, 72}, - dictWord{135, 11, 1345}, - dictWord{5, 0, 308}, - dictWord{7, 0, 1088}, - dictWord{4, 10, 520}, - dictWord{ - 135, - 10, - 575, - }, - dictWord{133, 11, 589}, - dictWord{5, 0, 126}, - dictWord{8, 0, 297}, - dictWord{9, 0, 366}, - dictWord{140, 0, 374}, - dictWord{7, 0, 1551}, - dictWord{ - 139, - 0, - 361, - }, - dictWord{5, 11, 117}, - dictWord{6, 11, 514}, - dictWord{6, 11, 541}, - dictWord{7, 11, 1164}, - dictWord{7, 11, 1436}, - dictWord{8, 11, 220}, - dictWord{ - 8, - 11, - 648, - }, - dictWord{10, 11, 688}, - dictWord{139, 11, 560}, - dictWord{133, 11, 686}, - dictWord{4, 0, 946}, - dictWord{6, 0, 1807}, - dictWord{8, 0, 871}, - dictWord{ - 10, - 0, - 854, - }, - dictWord{10, 0, 870}, - dictWord{10, 0, 888}, - dictWord{10, 0, 897}, - dictWord{10, 0, 920}, - dictWord{12, 0, 722}, - dictWord{12, 0, 761}, - dictWord{ - 12, - 0, - 763, - }, - dictWord{12, 0, 764}, - dictWord{14, 0, 454}, - dictWord{14, 0, 465}, - dictWord{16, 0, 107}, - dictWord{18, 0, 167}, - dictWord{18, 0, 168}, - dictWord{ - 146, - 0, - 172, - }, - dictWord{132, 0, 175}, - dictWord{135, 0, 1307}, - dictWord{132, 0, 685}, - dictWord{135, 11, 1834}, - dictWord{133, 0, 797}, - dictWord{6, 0, 745}, - dictWord{ - 6, - 0, - 858, - }, - dictWord{134, 0, 963}, - dictWord{133, 0, 565}, - dictWord{5, 10, 397}, - dictWord{6, 10, 154}, - dictWord{7, 11, 196}, - dictWord{7, 10, 676}, - dictWord{ - 8, - 10, - 443, - }, - dictWord{8, 10, 609}, - dictWord{9, 10, 24}, - dictWord{9, 10, 325}, - dictWord{10, 10, 35}, - dictWord{10, 11, 765}, - dictWord{11, 11, 347}, - dictWord{ - 11, - 10, - 535, - }, - dictWord{11, 11, 552}, - dictWord{11, 11, 576}, - dictWord{11, 10, 672}, - dictWord{11, 11, 790}, - dictWord{11, 10, 1018}, - dictWord{12, 11, 263}, - dictWord{12, 10, 637}, - dictWord{13, 11, 246}, - dictWord{13, 11, 270}, - dictWord{13, 11, 395}, - dictWord{14, 11, 74}, - dictWord{14, 11, 176}, - dictWord{ - 14, - 11, - 190, - }, - dictWord{14, 11, 398}, - dictWord{14, 11, 412}, - dictWord{15, 11, 32}, - dictWord{15, 11, 63}, - dictWord{16, 10, 30}, - dictWord{16, 11, 88}, - dictWord{ - 147, - 11, - 105, - }, - dictWord{13, 11, 84}, - dictWord{141, 11, 122}, - dictWord{4, 0, 252}, - dictWord{7, 0, 1068}, - dictWord{10, 0, 434}, - dictWord{11, 0, 228}, - dictWord{ - 11, - 0, - 426, - }, - dictWord{13, 0, 231}, - dictWord{18, 0, 106}, - dictWord{148, 0, 87}, - dictWord{137, 0, 826}, - dictWord{4, 11, 589}, - dictWord{139, 11, 282}, - dictWord{ - 5, - 11, - 381, - }, - dictWord{135, 11, 1792}, - dictWord{132, 0, 791}, - dictWord{5, 0, 231}, - dictWord{10, 0, 509}, - dictWord{133, 10, 981}, - dictWord{7, 0, 601}, - dictWord{ - 9, - 0, - 277, - }, - dictWord{9, 0, 674}, - dictWord{10, 0, 178}, - dictWord{10, 0, 418}, - dictWord{10, 0, 571}, - dictWord{11, 0, 531}, - dictWord{12, 0, 113}, - dictWord{12, 0, 475}, - dictWord{13, 0, 99}, - dictWord{142, 0, 428}, - dictWord{4, 10, 56}, - dictWord{7, 11, 616}, - dictWord{7, 10, 1791}, - dictWord{8, 10, 607}, - dictWord{8, 10, 651}, - dictWord{10, 11, 413}, - dictWord{11, 10, 465}, - dictWord{11, 10, 835}, - dictWord{12, 10, 337}, - dictWord{141, 10, 480}, - dictWord{7, 0, 1591}, - dictWord{144, 0, 43}, - dictWord{9, 10, 158}, - dictWord{138, 10, 411}, - dictWord{135, 0, 1683}, - dictWord{8, 0, 289}, - dictWord{11, 0, 45}, - dictWord{12, 0, 278}, - dictWord{140, 0, 537}, - dictWord{6, 11, 120}, - dictWord{7, 11, 1188}, - dictWord{7, 11, 1710}, - dictWord{8, 11, 286}, - dictWord{9, 11, 667}, - dictWord{11, 11, 592}, - dictWord{ - 139, - 11, - 730, - }, - dictWord{136, 10, 617}, - dictWord{135, 0, 1120}, - dictWord{135, 11, 1146}, - dictWord{139, 10, 563}, - dictWord{4, 11, 352}, - dictWord{4, 10, 369}, - dictWord{135, 11, 687}, - dictWord{143, 11, 38}, - dictWord{4, 0, 399}, - dictWord{5, 0, 119}, - dictWord{5, 0, 494}, - dictWord{7, 0, 751}, - dictWord{9, 0, 556}, - dictWord{ - 14, - 11, - 179, - }, - dictWord{15, 11, 151}, - dictWord{150, 11, 11}, - dictWord{4, 11, 192}, - dictWord{5, 11, 49}, - dictWord{6, 11, 200}, - dictWord{6, 11, 293}, - dictWord{ - 6, - 11, - 1696, - }, - dictWord{135, 11, 488}, - dictWord{4, 0, 398}, - dictWord{133, 0, 660}, - dictWord{7, 0, 1030}, - dictWord{134, 10, 622}, - dictWord{135, 11, 595}, - dictWord{141, 0, 168}, - dictWord{132, 11, 147}, - dictWord{7, 0, 973}, - dictWord{10, 10, 624}, - dictWord{142, 10, 279}, - dictWord{132, 10, 363}, - dictWord{ - 132, - 0, - 642, - }, - dictWord{133, 11, 934}, - dictWord{134, 0, 1615}, - dictWord{7, 11, 505}, - dictWord{135, 11, 523}, - dictWord{7, 0, 594}, - dictWord{7, 0, 851}, - dictWord{ - 7, - 0, - 1858, - }, - dictWord{9, 0, 411}, - dictWord{9, 0, 574}, - dictWord{9, 0, 666}, - dictWord{9, 0, 737}, - dictWord{10, 0, 346}, - dictWord{10, 0, 712}, - dictWord{11, 0, 246}, - dictWord{11, 0, 432}, - dictWord{11, 0, 517}, - dictWord{11, 0, 647}, - dictWord{11, 0, 679}, - dictWord{11, 0, 727}, - dictWord{12, 0, 304}, - dictWord{12, 0, 305}, - dictWord{ - 12, - 0, - 323, - }, - dictWord{12, 0, 483}, - dictWord{12, 0, 572}, - dictWord{12, 0, 593}, - dictWord{12, 0, 602}, - dictWord{13, 0, 95}, - dictWord{13, 0, 101}, - dictWord{ - 13, - 0, - 171, - }, - dictWord{13, 0, 315}, - dictWord{13, 0, 378}, - dictWord{13, 0, 425}, - dictWord{13, 0, 475}, - dictWord{14, 0, 63}, - dictWord{14, 0, 380}, - dictWord{14, 0, 384}, - dictWord{15, 0, 133}, - dictWord{18, 0, 112}, - dictWord{148, 0, 72}, - dictWord{135, 0, 1093}, - dictWord{132, 0, 679}, - dictWord{8, 0, 913}, - dictWord{10, 0, 903}, - dictWord{10, 0, 915}, - dictWord{12, 0, 648}, - dictWord{12, 0, 649}, - dictWord{14, 0, 455}, - dictWord{16, 0, 112}, - dictWord{138, 11, 438}, - dictWord{137, 0, 203}, - dictWord{134, 10, 292}, - dictWord{134, 0, 1492}, - dictWord{7, 0, 1374}, - dictWord{8, 0, 540}, - dictWord{5, 10, 177}, - dictWord{6, 10, 616}, - dictWord{7, 10, 827}, - dictWord{9, 10, 525}, - dictWord{138, 10, 656}, - dictWord{135, 0, 1486}, - dictWord{9, 0, 714}, - dictWord{138, 10, 31}, - dictWord{136, 0, 825}, - dictWord{ - 134, - 0, - 1511, - }, - dictWord{132, 11, 637}, - dictWord{134, 0, 952}, - dictWord{4, 10, 161}, - dictWord{133, 10, 631}, - dictWord{5, 0, 143}, - dictWord{5, 0, 769}, - dictWord{ - 6, - 0, - 1760, - }, - dictWord{7, 0, 682}, - dictWord{7, 0, 1992}, - dictWord{136, 0, 736}, - dictWord{132, 0, 700}, - dictWord{134, 0, 1540}, - dictWord{132, 11, 777}, - dictWord{ - 9, - 11, - 867, - }, - dictWord{138, 11, 837}, - dictWord{7, 0, 1557}, - dictWord{135, 10, 1684}, - dictWord{133, 0, 860}, - dictWord{6, 0, 422}, - dictWord{7, 0, 0}, - dictWord{ - 7, - 0, - 1544, - }, - dictWord{9, 0, 605}, - dictWord{11, 0, 990}, - dictWord{12, 0, 235}, - dictWord{12, 0, 453}, - dictWord{13, 0, 47}, - dictWord{13, 0, 266}, - dictWord{9, 10, 469}, - dictWord{9, 10, 709}, - dictWord{12, 10, 512}, - dictWord{14, 10, 65}, - dictWord{145, 10, 12}, - dictWord{11, 0, 807}, - dictWord{10, 10, 229}, - dictWord{11, 10, 73}, - dictWord{139, 10, 376}, - dictWord{6, 11, 170}, - dictWord{7, 11, 1080}, - dictWord{8, 11, 395}, - dictWord{8, 11, 487}, - dictWord{11, 11, 125}, - dictWord{ - 141, - 11, - 147, - }, - dictWord{5, 0, 515}, - dictWord{137, 0, 131}, - dictWord{7, 0, 1605}, - dictWord{11, 0, 962}, - dictWord{146, 0, 139}, - dictWord{132, 0, 646}, - dictWord{ - 4, - 0, - 396, - }, - dictWord{7, 0, 728}, - dictWord{9, 0, 117}, - dictWord{13, 0, 202}, - dictWord{148, 0, 51}, - dictWord{6, 0, 121}, - dictWord{6, 0, 124}, - dictWord{6, 0, 357}, - dictWord{ - 7, - 0, - 1138, - }, - dictWord{7, 0, 1295}, - dictWord{8, 0, 162}, - dictWord{8, 0, 508}, - dictWord{11, 0, 655}, - dictWord{4, 11, 535}, - dictWord{6, 10, 558}, - dictWord{ - 7, - 10, - 651, - }, - dictWord{8, 11, 618}, - dictWord{9, 10, 0}, - dictWord{10, 10, 34}, - dictWord{139, 10, 1008}, - dictWord{135, 11, 1245}, - dictWord{138, 0, 357}, - dictWord{ - 150, - 11, - 23, - }, - dictWord{133, 0, 237}, - dictWord{135, 0, 1784}, - dictWord{7, 10, 1832}, - dictWord{138, 10, 374}, - dictWord{132, 0, 713}, - dictWord{132, 11, 46}, - dictWord{6, 0, 1536}, - dictWord{10, 0, 348}, - dictWord{5, 11, 811}, - dictWord{6, 11, 1679}, - dictWord{6, 11, 1714}, - dictWord{135, 11, 2032}, - dictWord{ - 11, - 11, - 182, - }, - dictWord{142, 11, 195}, - dictWord{6, 0, 523}, - dictWord{7, 0, 738}, - dictWord{7, 10, 771}, - dictWord{7, 10, 1731}, - dictWord{9, 10, 405}, - dictWord{ - 138, - 10, - 421, - }, - dictWord{7, 11, 1458}, - dictWord{9, 11, 407}, - dictWord{139, 11, 15}, - dictWord{6, 11, 34}, - dictWord{7, 11, 69}, - dictWord{7, 11, 640}, - dictWord{ - 7, - 11, - 1089, - }, - dictWord{8, 11, 708}, - dictWord{8, 11, 721}, - dictWord{9, 11, 363}, - dictWord{9, 11, 643}, - dictWord{10, 11, 628}, - dictWord{148, 11, 98}, - dictWord{ - 133, - 0, - 434, - }, - dictWord{135, 0, 1877}, - dictWord{7, 0, 571}, - dictWord{138, 0, 366}, - dictWord{5, 10, 881}, - dictWord{133, 10, 885}, - dictWord{9, 0, 513}, - dictWord{ - 10, - 0, - 25, - }, - dictWord{10, 0, 39}, - dictWord{12, 0, 122}, - dictWord{140, 0, 187}, - dictWord{132, 0, 580}, - dictWord{5, 10, 142}, - dictWord{134, 10, 546}, - dictWord{ - 132, - 11, - 462, - }, - dictWord{137, 0, 873}, - dictWord{5, 10, 466}, - dictWord{11, 10, 571}, - dictWord{12, 10, 198}, - dictWord{13, 10, 283}, - dictWord{14, 10, 186}, - dictWord{15, 10, 21}, - dictWord{143, 10, 103}, - dictWord{7, 0, 171}, - dictWord{4, 10, 185}, - dictWord{5, 10, 257}, - dictWord{5, 10, 839}, - dictWord{5, 10, 936}, - dictWord{ - 9, - 10, - 399, - }, - dictWord{10, 10, 258}, - dictWord{10, 10, 395}, - dictWord{10, 10, 734}, - dictWord{11, 10, 1014}, - dictWord{12, 10, 23}, - dictWord{13, 10, 350}, - dictWord{14, 10, 150}, - dictWord{147, 10, 6}, - dictWord{134, 0, 625}, - dictWord{7, 0, 107}, - dictWord{7, 0, 838}, - dictWord{8, 0, 550}, - dictWord{138, 0, 401}, - dictWord{ - 5, - 11, - 73, - }, - dictWord{6, 11, 23}, - dictWord{134, 11, 338}, - dictWord{4, 0, 943}, - dictWord{6, 0, 1850}, - dictWord{12, 0, 713}, - dictWord{142, 0, 434}, - dictWord{ - 11, - 0, - 588, - }, - dictWord{11, 0, 864}, - dictWord{11, 0, 936}, - dictWord{11, 0, 968}, - dictWord{12, 0, 73}, - dictWord{12, 0, 343}, - dictWord{12, 0, 394}, - dictWord{13, 0, 275}, - dictWord{14, 0, 257}, - dictWord{15, 0, 160}, - dictWord{7, 10, 404}, - dictWord{7, 10, 1377}, - dictWord{7, 10, 1430}, - dictWord{7, 10, 2017}, - dictWord{8, 10, 149}, - dictWord{8, 10, 239}, - dictWord{8, 10, 512}, - dictWord{8, 10, 793}, - dictWord{8, 10, 818}, - dictWord{9, 10, 474}, - dictWord{9, 10, 595}, - dictWord{10, 10, 122}, - dictWord{10, 10, 565}, - dictWord{10, 10, 649}, - dictWord{10, 10, 783}, - dictWord{11, 10, 239}, - dictWord{11, 10, 295}, - dictWord{11, 10, 447}, - dictWord{ - 11, - 10, - 528, - }, - dictWord{11, 10, 639}, - dictWord{11, 10, 800}, - dictWord{12, 10, 25}, - dictWord{12, 10, 157}, - dictWord{12, 10, 316}, - dictWord{12, 10, 390}, - dictWord{ - 12, - 10, - 391, - }, - dictWord{12, 10, 395}, - dictWord{12, 10, 478}, - dictWord{12, 10, 503}, - dictWord{12, 10, 592}, - dictWord{12, 10, 680}, - dictWord{13, 10, 50}, - dictWord{13, 10, 53}, - dictWord{13, 10, 132}, - dictWord{13, 10, 198}, - dictWord{13, 10, 322}, - dictWord{13, 10, 415}, - dictWord{13, 10, 511}, - dictWord{14, 10, 71}, - dictWord{14, 10, 395}, - dictWord{15, 10, 71}, - dictWord{15, 10, 136}, - dictWord{17, 10, 123}, - dictWord{18, 10, 93}, - dictWord{147, 10, 58}, - dictWord{ - 133, - 0, - 768, - }, - dictWord{11, 0, 103}, - dictWord{142, 0, 0}, - dictWord{136, 10, 712}, - dictWord{132, 0, 799}, - dictWord{132, 0, 894}, - dictWord{7, 11, 725}, - dictWord{ - 8, - 11, - 498, - }, - dictWord{139, 11, 268}, - dictWord{135, 11, 1798}, - dictWord{135, 11, 773}, - dictWord{141, 11, 360}, - dictWord{4, 10, 377}, - dictWord{152, 10, 13}, - dictWord{135, 0, 1673}, - dictWord{132, 11, 583}, - dictWord{134, 0, 1052}, - dictWord{133, 11, 220}, - dictWord{140, 11, 69}, - dictWord{132, 11, 544}, - dictWord{ - 4, - 10, - 180, - }, - dictWord{135, 10, 1906}, - dictWord{134, 0, 272}, - dictWord{4, 0, 441}, - dictWord{134, 0, 1421}, - dictWord{4, 0, 9}, - dictWord{5, 0, 128}, - dictWord{ - 7, - 0, - 368, - }, - dictWord{11, 0, 480}, - dictWord{148, 0, 3}, - dictWord{5, 11, 176}, - dictWord{6, 11, 437}, - dictWord{6, 11, 564}, - dictWord{11, 11, 181}, - dictWord{ - 141, - 11, - 183, - }, - dictWord{132, 10, 491}, - dictWord{7, 0, 1182}, - dictWord{141, 11, 67}, - dictWord{6, 0, 1346}, - dictWord{4, 10, 171}, - dictWord{138, 10, 234}, - dictWord{ - 4, - 10, - 586, - }, - dictWord{7, 10, 1186}, - dictWord{138, 10, 631}, - dictWord{136, 0, 682}, - dictWord{134, 0, 1004}, - dictWord{15, 0, 24}, - dictWord{143, 11, 24}, - dictWord{134, 0, 968}, - dictWord{4, 0, 2}, - dictWord{6, 0, 742}, - dictWord{6, 0, 793}, - dictWord{7, 0, 545}, - dictWord{7, 0, 894}, - dictWord{9, 10, 931}, - dictWord{ - 10, - 10, - 334, - }, - dictWord{148, 10, 71}, - dictWord{136, 11, 600}, - dictWord{133, 10, 765}, - dictWord{9, 0, 769}, - dictWord{140, 0, 185}, - dictWord{4, 11, 790}, - dictWord{ - 5, - 11, - 273, - }, - dictWord{134, 11, 394}, - dictWord{7, 0, 474}, - dictWord{137, 0, 578}, - dictWord{4, 11, 135}, - dictWord{6, 11, 127}, - dictWord{7, 11, 1185}, - dictWord{ - 7, - 11, - 1511, - }, - dictWord{8, 11, 613}, - dictWord{11, 11, 5}, - dictWord{12, 11, 133}, - dictWord{12, 11, 495}, - dictWord{12, 11, 586}, - dictWord{14, 11, 385}, - dictWord{15, 11, 118}, - dictWord{17, 11, 20}, - dictWord{146, 11, 98}, - dictWord{133, 10, 424}, - dictWord{5, 0, 530}, - dictWord{142, 0, 113}, - dictWord{6, 11, 230}, - dictWord{7, 11, 961}, - dictWord{7, 11, 1085}, - dictWord{136, 11, 462}, - dictWord{7, 11, 1954}, - dictWord{137, 11, 636}, - dictWord{136, 10, 714}, - dictWord{ - 149, - 11, - 6, - }, - dictWord{135, 10, 685}, - dictWord{9, 10, 420}, - dictWord{10, 10, 269}, - dictWord{10, 10, 285}, - dictWord{10, 10, 576}, - dictWord{11, 10, 397}, - dictWord{13, 10, 175}, - dictWord{145, 10, 90}, - dictWord{132, 10, 429}, - dictWord{5, 0, 556}, - dictWord{5, 11, 162}, - dictWord{136, 11, 68}, - dictWord{132, 11, 654}, - dictWord{4, 11, 156}, - dictWord{7, 11, 998}, - dictWord{7, 11, 1045}, - dictWord{7, 11, 1860}, - dictWord{9, 11, 48}, - dictWord{9, 11, 692}, - dictWord{11, 11, 419}, - dictWord{139, 11, 602}, - dictWord{6, 0, 1317}, - dictWord{8, 0, 16}, - dictWord{9, 0, 825}, - dictWord{12, 0, 568}, - dictWord{7, 11, 1276}, - dictWord{8, 11, 474}, - dictWord{137, 11, 652}, - dictWord{18, 0, 97}, - dictWord{7, 10, 18}, - dictWord{7, 10, 699}, - dictWord{7, 10, 1966}, - dictWord{8, 10, 752}, - dictWord{9, 10, 273}, - dictWord{ - 9, - 10, - 412, - }, - dictWord{9, 10, 703}, - dictWord{10, 10, 71}, - dictWord{10, 10, 427}, - dictWord{138, 10, 508}, - dictWord{10, 0, 703}, - dictWord{7, 11, 1454}, - dictWord{138, 11, 703}, - dictWord{4, 10, 53}, - dictWord{5, 10, 186}, - dictWord{135, 10, 752}, - dictWord{134, 0, 892}, - dictWord{134, 0, 1571}, - dictWord{8, 10, 575}, - dictWord{10, 10, 289}, - dictWord{139, 10, 319}, - dictWord{6, 0, 186}, - dictWord{137, 0, 426}, - dictWord{134, 0, 1101}, - dictWord{132, 10, 675}, - dictWord{ - 132, - 0, - 585, - }, - dictWord{6, 0, 1870}, - dictWord{137, 0, 937}, - dictWord{152, 11, 10}, - dictWord{9, 11, 197}, - dictWord{10, 11, 300}, - dictWord{12, 11, 473}, - dictWord{ - 13, - 11, - 90, - }, - dictWord{141, 11, 405}, - dictWord{4, 0, 93}, - dictWord{5, 0, 252}, - dictWord{6, 0, 229}, - dictWord{7, 0, 291}, - dictWord{9, 0, 550}, - dictWord{139, 0, 644}, - dictWord{137, 0, 749}, - dictWord{9, 0, 162}, - dictWord{6, 10, 209}, - dictWord{8, 10, 468}, - dictWord{9, 10, 210}, - dictWord{11, 10, 36}, - dictWord{12, 10, 28}, - dictWord{12, 10, 630}, - dictWord{13, 10, 21}, - dictWord{13, 10, 349}, - dictWord{14, 10, 7}, - dictWord{145, 10, 13}, - dictWord{132, 0, 381}, - dictWord{132, 11, 606}, - dictWord{4, 10, 342}, - dictWord{135, 10, 1179}, - dictWord{7, 11, 1587}, - dictWord{7, 11, 1707}, - dictWord{10, 11, 528}, - dictWord{139, 11, 504}, - dictWord{ - 12, - 11, - 39, - }, - dictWord{13, 11, 265}, - dictWord{141, 11, 439}, - dictWord{4, 10, 928}, - dictWord{133, 10, 910}, - dictWord{7, 10, 1838}, - dictWord{7, 11, 1978}, - dictWord{136, 11, 676}, - dictWord{6, 0, 762}, - dictWord{6, 0, 796}, - dictWord{134, 0, 956}, - dictWord{4, 10, 318}, - dictWord{4, 10, 496}, - dictWord{7, 10, 856}, - dictWord{139, 10, 654}, - dictWord{137, 11, 242}, - dictWord{4, 11, 361}, - dictWord{133, 11, 315}, - dictWord{132, 11, 461}, - dictWord{132, 11, 472}, - dictWord{ - 132, - 0, - 857, - }, - dictWord{5, 0, 21}, - dictWord{6, 0, 77}, - dictWord{6, 0, 157}, - dictWord{7, 0, 974}, - dictWord{7, 0, 1301}, - dictWord{7, 0, 1339}, - dictWord{7, 0, 1490}, - dictWord{ - 7, - 0, - 1873, - }, - dictWord{9, 0, 628}, - dictWord{7, 10, 915}, - dictWord{8, 10, 247}, - dictWord{147, 10, 0}, - dictWord{4, 10, 202}, - dictWord{5, 10, 382}, - dictWord{ - 6, - 10, - 454, - }, - dictWord{7, 10, 936}, - dictWord{7, 10, 1803}, - dictWord{8, 10, 758}, - dictWord{9, 10, 375}, - dictWord{9, 10, 895}, - dictWord{10, 10, 743}, - dictWord{ - 10, - 10, - 792, - }, - dictWord{11, 10, 978}, - dictWord{11, 10, 1012}, - dictWord{142, 10, 109}, - dictWord{7, 11, 617}, - dictWord{10, 11, 498}, - dictWord{11, 11, 501}, - dictWord{12, 11, 16}, - dictWord{140, 11, 150}, - dictWord{7, 10, 1150}, - dictWord{7, 10, 1425}, - dictWord{7, 10, 1453}, - dictWord{10, 11, 747}, - dictWord{ - 140, - 10, - 513, - }, - dictWord{133, 11, 155}, - dictWord{11, 0, 919}, - dictWord{141, 0, 409}, - dictWord{138, 10, 791}, - dictWord{10, 0, 633}, - dictWord{139, 11, 729}, - dictWord{ - 7, - 11, - 163, - }, - dictWord{8, 11, 319}, - dictWord{9, 11, 402}, - dictWord{10, 11, 24}, - dictWord{10, 11, 681}, - dictWord{11, 11, 200}, - dictWord{11, 11, 567}, - dictWord{12, 11, 253}, - dictWord{12, 11, 410}, - dictWord{142, 11, 219}, - dictWord{5, 11, 475}, - dictWord{7, 11, 1780}, - dictWord{9, 11, 230}, - dictWord{11, 11, 297}, - dictWord{11, 11, 558}, - dictWord{14, 11, 322}, - dictWord{147, 11, 76}, - dictWord{7, 0, 332}, - dictWord{6, 10, 445}, - dictWord{137, 10, 909}, - dictWord{ - 135, - 11, - 1956, - }, - dictWord{136, 11, 274}, - dictWord{134, 10, 578}, - dictWord{135, 0, 1489}, - dictWord{135, 11, 1848}, - dictWord{5, 11, 944}, - dictWord{ - 134, - 11, - 1769, - }, - dictWord{132, 11, 144}, - dictWord{136, 10, 766}, - dictWord{4, 0, 832}, - dictWord{135, 10, 541}, - dictWord{8, 0, 398}, - dictWord{9, 0, 681}, - dictWord{ - 139, - 0, - 632, - }, - dictWord{136, 0, 645}, - dictWord{9, 0, 791}, - dictWord{10, 0, 93}, - dictWord{16, 0, 13}, - dictWord{17, 0, 23}, - dictWord{18, 0, 135}, - dictWord{19, 0, 12}, - dictWord{20, 0, 1}, - dictWord{20, 0, 12}, - dictWord{148, 0, 14}, - dictWord{6, 11, 247}, - dictWord{137, 11, 555}, - dictWord{134, 0, 20}, - dictWord{132, 0, 800}, - dictWord{135, 0, 1841}, - dictWord{139, 10, 983}, - dictWord{137, 10, 768}, - dictWord{132, 10, 584}, - dictWord{141, 11, 51}, - dictWord{6, 0, 1993}, - dictWord{ - 4, - 11, - 620, - }, - dictWord{138, 11, 280}, - dictWord{136, 0, 769}, - dictWord{11, 0, 290}, - dictWord{11, 0, 665}, - dictWord{7, 11, 1810}, - dictWord{11, 11, 866}, - dictWord{ - 12, - 11, - 103, - }, - dictWord{13, 11, 495}, - dictWord{17, 11, 67}, - dictWord{147, 11, 74}, - dictWord{134, 0, 1426}, - dictWord{139, 0, 60}, - dictWord{4, 10, 326}, - dictWord{135, 10, 1770}, - dictWord{7, 0, 1874}, - dictWord{9, 0, 641}, - dictWord{132, 10, 226}, - dictWord{6, 0, 644}, - dictWord{5, 10, 426}, - dictWord{8, 10, 30}, - dictWord{ - 9, - 10, - 2, - }, - dictWord{11, 10, 549}, - dictWord{147, 10, 122}, - dictWord{5, 11, 428}, - dictWord{138, 11, 442}, - dictWord{135, 11, 1871}, - dictWord{ - 135, - 0, - 1757, - }, - dictWord{147, 10, 117}, - dictWord{135, 0, 937}, - dictWord{135, 0, 1652}, - dictWord{6, 0, 654}, - dictWord{134, 0, 1476}, - dictWord{133, 11, 99}, - dictWord{135, 0, 527}, - dictWord{132, 10, 345}, - dictWord{4, 10, 385}, - dictWord{4, 11, 397}, - dictWord{7, 10, 265}, - dictWord{135, 10, 587}, - dictWord{4, 0, 579}, - dictWord{5, 0, 226}, - dictWord{5, 0, 323}, - dictWord{135, 0, 960}, - dictWord{134, 0, 1486}, - dictWord{8, 11, 502}, - dictWord{144, 11, 9}, - dictWord{4, 10, 347}, - dictWord{ - 5, - 10, - 423, - }, - dictWord{5, 10, 996}, - dictWord{135, 10, 1329}, - dictWord{7, 11, 727}, - dictWord{146, 11, 73}, - dictWord{4, 11, 485}, - dictWord{7, 11, 353}, - dictWord{7, 10, 1259}, - dictWord{7, 11, 1523}, - dictWord{9, 10, 125}, - dictWord{139, 10, 65}, - dictWord{6, 0, 325}, - dictWord{5, 10, 136}, - dictWord{6, 11, 366}, - dictWord{ - 7, - 11, - 1384, - }, - dictWord{7, 11, 1601}, - dictWord{136, 10, 644}, - dictWord{138, 11, 160}, - dictWord{6, 0, 1345}, - dictWord{137, 11, 282}, - dictWord{18, 0, 91}, - dictWord{147, 0, 70}, - dictWord{136, 0, 404}, - dictWord{4, 11, 157}, - dictWord{133, 11, 471}, - dictWord{133, 0, 973}, - dictWord{6, 0, 135}, - dictWord{ - 135, - 0, - 1176, - }, - dictWord{8, 11, 116}, - dictWord{11, 11, 551}, - dictWord{142, 11, 159}, - dictWord{4, 0, 549}, - dictWord{4, 10, 433}, - dictWord{133, 10, 719}, - dictWord{ - 136, - 0, - 976, - }, - dictWord{5, 11, 160}, - dictWord{7, 11, 363}, - dictWord{7, 11, 589}, - dictWord{10, 11, 170}, - dictWord{141, 11, 55}, - dictWord{144, 0, 21}, - dictWord{ - 144, - 0, - 51, - }, - dictWord{135, 0, 314}, - dictWord{135, 10, 1363}, - dictWord{4, 11, 108}, - dictWord{7, 11, 405}, - dictWord{10, 11, 491}, - dictWord{139, 11, 498}, - dictWord{146, 0, 4}, - dictWord{4, 10, 555}, - dictWord{8, 10, 536}, - dictWord{10, 10, 288}, - dictWord{139, 10, 1005}, - dictWord{135, 11, 1005}, - dictWord{6, 0, 281}, - dictWord{7, 0, 6}, - dictWord{8, 0, 282}, - dictWord{8, 0, 480}, - dictWord{8, 0, 499}, - dictWord{9, 0, 198}, - dictWord{10, 0, 143}, - dictWord{10, 0, 169}, - dictWord{ - 10, - 0, - 211, - }, - dictWord{10, 0, 417}, - dictWord{10, 0, 574}, - dictWord{11, 0, 147}, - dictWord{11, 0, 395}, - dictWord{12, 0, 75}, - dictWord{12, 0, 407}, - dictWord{12, 0, 608}, - dictWord{13, 0, 500}, - dictWord{142, 0, 251}, - dictWord{6, 0, 1093}, - dictWord{6, 0, 1405}, - dictWord{9, 10, 370}, - dictWord{138, 10, 90}, - dictWord{4, 11, 926}, - dictWord{133, 11, 983}, - dictWord{135, 0, 1776}, - dictWord{134, 0, 1528}, - dictWord{132, 0, 419}, - dictWord{132, 11, 538}, - dictWord{6, 11, 294}, - dictWord{ - 7, - 11, - 1267, - }, - dictWord{136, 11, 624}, - dictWord{135, 11, 1772}, - dictWord{138, 11, 301}, - dictWord{4, 10, 257}, - dictWord{135, 10, 2031}, - dictWord{4, 0, 138}, - dictWord{7, 0, 1012}, - dictWord{7, 0, 1280}, - dictWord{9, 0, 76}, - dictWord{135, 10, 1768}, - dictWord{132, 11, 757}, - dictWord{5, 0, 29}, - dictWord{140, 0, 638}, - dictWord{7, 11, 655}, - dictWord{135, 11, 1844}, - dictWord{7, 0, 1418}, - dictWord{6, 11, 257}, - dictWord{135, 11, 1522}, - dictWord{8, 11, 469}, - dictWord{ - 138, - 11, - 47, - }, - dictWord{142, 11, 278}, - dictWord{6, 10, 83}, - dictWord{6, 10, 1733}, - dictWord{135, 10, 1389}, - dictWord{11, 11, 204}, - dictWord{11, 11, 243}, - dictWord{140, 11, 293}, - dictWord{135, 11, 1875}, - dictWord{6, 0, 1710}, - dictWord{135, 0, 2038}, - dictWord{137, 11, 299}, - dictWord{4, 0, 17}, - dictWord{5, 0, 23}, - dictWord{7, 0, 995}, - dictWord{11, 0, 383}, - dictWord{11, 0, 437}, - dictWord{12, 0, 460}, - dictWord{140, 0, 532}, - dictWord{133, 0, 862}, - dictWord{137, 10, 696}, - dictWord{6, 0, 592}, - dictWord{138, 0, 946}, - dictWord{138, 11, 599}, - dictWord{7, 10, 1718}, - dictWord{9, 10, 95}, - dictWord{9, 10, 274}, - dictWord{10, 10, 279}, - dictWord{10, 10, 317}, - dictWord{10, 10, 420}, - dictWord{11, 10, 303}, - dictWord{11, 10, 808}, - dictWord{12, 10, 134}, - dictWord{12, 10, 367}, - dictWord{ - 13, - 10, - 149, - }, - dictWord{13, 10, 347}, - dictWord{14, 10, 349}, - dictWord{14, 10, 406}, - dictWord{18, 10, 22}, - dictWord{18, 10, 89}, - dictWord{18, 10, 122}, - dictWord{ - 147, - 10, - 47, - }, - dictWord{8, 0, 70}, - dictWord{12, 0, 171}, - dictWord{141, 0, 272}, - dictWord{133, 10, 26}, - dictWord{132, 10, 550}, - dictWord{137, 0, 812}, - dictWord{ - 10, - 0, - 233, - }, - dictWord{139, 0, 76}, - dictWord{134, 0, 988}, - dictWord{134, 0, 442}, - dictWord{136, 10, 822}, - dictWord{7, 0, 896}, - dictWord{4, 10, 902}, - dictWord{ - 5, - 10, - 809, - }, - dictWord{134, 10, 122}, - dictWord{5, 11, 150}, - dictWord{7, 11, 106}, - dictWord{8, 11, 603}, - dictWord{9, 11, 593}, - dictWord{9, 11, 634}, - dictWord{ - 10, - 11, - 44, - }, - dictWord{10, 11, 173}, - dictWord{11, 11, 462}, - dictWord{11, 11, 515}, - dictWord{13, 11, 216}, - dictWord{13, 11, 288}, - dictWord{142, 11, 400}, - dictWord{136, 0, 483}, - dictWord{135, 10, 262}, - dictWord{6, 0, 1709}, - dictWord{133, 10, 620}, - dictWord{4, 10, 34}, - dictWord{5, 10, 574}, - dictWord{7, 10, 279}, - dictWord{7, 10, 1624}, - dictWord{136, 10, 601}, - dictWord{137, 10, 170}, - dictWord{147, 0, 119}, - dictWord{12, 11, 108}, - dictWord{141, 11, 291}, - dictWord{ - 11, - 0, - 69, - }, - dictWord{12, 0, 105}, - dictWord{12, 0, 117}, - dictWord{13, 0, 213}, - dictWord{14, 0, 13}, - dictWord{14, 0, 62}, - dictWord{14, 0, 177}, - dictWord{14, 0, 421}, - dictWord{15, 0, 19}, - dictWord{146, 0, 141}, - dictWord{137, 0, 309}, - dictWord{11, 11, 278}, - dictWord{142, 11, 73}, - dictWord{7, 0, 608}, - dictWord{7, 0, 976}, - dictWord{9, 0, 146}, - dictWord{10, 0, 206}, - dictWord{10, 0, 596}, - dictWord{13, 0, 218}, - dictWord{142, 0, 153}, - dictWord{133, 10, 332}, - dictWord{6, 10, 261}, - dictWord{ - 8, - 10, - 182, - }, - dictWord{139, 10, 943}, - dictWord{4, 11, 493}, - dictWord{144, 11, 55}, - dictWord{134, 10, 1721}, - dictWord{132, 0, 768}, - dictWord{4, 10, 933}, - dictWord{133, 10, 880}, - dictWord{7, 11, 555}, - dictWord{7, 11, 1316}, - dictWord{7, 11, 1412}, - dictWord{7, 11, 1839}, - dictWord{9, 11, 192}, - dictWord{ - 9, - 11, - 589, - }, - dictWord{11, 11, 241}, - dictWord{11, 11, 676}, - dictWord{11, 11, 811}, - dictWord{11, 11, 891}, - dictWord{12, 11, 140}, - dictWord{12, 11, 346}, - dictWord{ - 12, - 11, - 479, - }, - dictWord{13, 11, 30}, - dictWord{13, 11, 49}, - dictWord{13, 11, 381}, - dictWord{14, 11, 188}, - dictWord{15, 11, 150}, - dictWord{16, 11, 76}, - dictWord{18, 11, 30}, - dictWord{148, 11, 52}, - dictWord{4, 0, 518}, - dictWord{135, 0, 1136}, - dictWord{6, 11, 568}, - dictWord{7, 11, 112}, - dictWord{7, 11, 1804}, - dictWord{8, 11, 362}, - dictWord{8, 11, 410}, - dictWord{8, 11, 830}, - dictWord{9, 11, 514}, - dictWord{11, 11, 649}, - dictWord{142, 11, 157}, - dictWord{135, 11, 673}, - dictWord{8, 0, 689}, - dictWord{137, 0, 863}, - dictWord{4, 0, 18}, - dictWord{7, 0, 145}, - dictWord{7, 0, 444}, - dictWord{7, 0, 1278}, - dictWord{8, 0, 49}, - dictWord{8, 0, 400}, - dictWord{9, 0, 71}, - dictWord{9, 0, 250}, - dictWord{10, 0, 459}, - dictWord{12, 0, 160}, - dictWord{16, 0, 24}, - dictWord{132, 11, 625}, - dictWord{140, 0, 1020}, - dictWord{4, 0, 997}, - dictWord{6, 0, 1946}, - dictWord{6, 0, 1984}, - dictWord{134, 0, 1998}, - dictWord{6, 11, 16}, - dictWord{6, 11, 158}, - dictWord{7, 11, 43}, - dictWord{ - 7, - 11, - 129, - }, - dictWord{7, 11, 181}, - dictWord{8, 11, 276}, - dictWord{8, 11, 377}, - dictWord{10, 11, 523}, - dictWord{11, 11, 816}, - dictWord{12, 11, 455}, - dictWord{ - 13, - 11, - 303, - }, - dictWord{142, 11, 135}, - dictWord{133, 10, 812}, - dictWord{134, 0, 658}, - dictWord{4, 11, 1}, - dictWord{7, 11, 1143}, - dictWord{7, 11, 1463}, - dictWord{8, 11, 61}, - dictWord{9, 11, 207}, - dictWord{9, 11, 390}, - dictWord{9, 11, 467}, - dictWord{139, 11, 836}, - dictWord{150, 11, 26}, - dictWord{140, 0, 106}, - dictWord{6, 0, 1827}, - dictWord{10, 0, 931}, - dictWord{18, 0, 166}, - dictWord{20, 0, 114}, - dictWord{4, 10, 137}, - dictWord{7, 10, 1178}, - dictWord{7, 11, 1319}, - dictWord{135, 10, 1520}, - dictWord{133, 0, 1010}, - dictWord{4, 11, 723}, - dictWord{5, 11, 895}, - dictWord{7, 11, 1031}, - dictWord{8, 11, 199}, - dictWord{8, 11, 340}, - dictWord{9, 11, 153}, - dictWord{9, 11, 215}, - dictWord{10, 11, 21}, - dictWord{10, 11, 59}, - dictWord{10, 11, 80}, - dictWord{10, 11, 224}, - dictWord{11, 11, 229}, - dictWord{11, 11, 652}, - dictWord{12, 11, 192}, - dictWord{13, 11, 146}, - dictWord{142, 11, 91}, - dictWord{132, 11, 295}, - dictWord{6, 11, 619}, - dictWord{ - 7, - 11, - 898, - }, - dictWord{7, 11, 1092}, - dictWord{8, 11, 485}, - dictWord{18, 11, 28}, - dictWord{147, 11, 116}, - dictWord{137, 11, 51}, - dictWord{6, 10, 1661}, - dictWord{ - 7, - 10, - 1975, - }, - dictWord{7, 10, 2009}, - dictWord{135, 10, 2011}, - dictWord{5, 11, 309}, - dictWord{140, 11, 211}, - dictWord{5, 0, 87}, - dictWord{7, 0, 313}, - dictWord{ - 7, - 0, - 1103, - }, - dictWord{10, 0, 208}, - dictWord{10, 0, 582}, - dictWord{11, 0, 389}, - dictWord{11, 0, 813}, - dictWord{12, 0, 385}, - dictWord{13, 0, 286}, - dictWord{ - 14, - 0, - 124, - }, - dictWord{146, 0, 108}, - dictWord{5, 11, 125}, - dictWord{8, 11, 77}, - dictWord{138, 11, 15}, - dictWord{132, 0, 267}, - dictWord{133, 0, 703}, - dictWord{ - 137, - 11, - 155, - }, - dictWord{133, 11, 439}, - dictWord{11, 11, 164}, - dictWord{140, 11, 76}, - dictWord{9, 0, 496}, - dictWord{5, 10, 89}, - dictWord{7, 10, 1915}, - dictWord{ - 9, - 10, - 185, - }, - dictWord{9, 10, 235}, - dictWord{10, 10, 64}, - dictWord{10, 10, 270}, - dictWord{10, 10, 403}, - dictWord{10, 10, 469}, - dictWord{10, 10, 529}, - dictWord{10, 10, 590}, - dictWord{11, 10, 140}, - dictWord{11, 10, 860}, - dictWord{13, 10, 1}, - dictWord{13, 10, 422}, - dictWord{14, 10, 341}, - dictWord{14, 10, 364}, - dictWord{17, 10, 93}, - dictWord{18, 10, 113}, - dictWord{19, 10, 97}, - dictWord{147, 10, 113}, - dictWord{133, 10, 695}, - dictWord{135, 0, 1121}, - dictWord{ - 5, - 10, - 6, - }, - dictWord{6, 10, 183}, - dictWord{7, 10, 680}, - dictWord{7, 10, 978}, - dictWord{7, 10, 1013}, - dictWord{7, 10, 1055}, - dictWord{12, 10, 230}, - dictWord{ - 13, - 10, - 172, - }, - dictWord{146, 10, 29}, - dictWord{4, 11, 8}, - dictWord{7, 11, 1152}, - dictWord{7, 11, 1153}, - dictWord{7, 11, 1715}, - dictWord{9, 11, 374}, - dictWord{ - 10, - 11, - 478, - }, - dictWord{139, 11, 648}, - dictWord{135, 11, 1099}, - dictWord{6, 10, 29}, - dictWord{139, 10, 63}, - dictWord{4, 0, 561}, - dictWord{10, 0, 249}, - dictWord{ - 139, - 0, - 209, - }, - dictWord{132, 0, 760}, - dictWord{7, 11, 799}, - dictWord{138, 11, 511}, - dictWord{136, 11, 87}, - dictWord{9, 0, 154}, - dictWord{140, 0, 485}, - dictWord{136, 0, 255}, - dictWord{132, 0, 323}, - dictWord{140, 0, 419}, - dictWord{132, 10, 311}, - dictWord{134, 10, 1740}, - dictWord{4, 0, 368}, - dictWord{ - 135, - 0, - 641, - }, - dictWord{7, 10, 170}, - dictWord{8, 10, 90}, - dictWord{8, 10, 177}, - dictWord{8, 10, 415}, - dictWord{11, 10, 714}, - dictWord{142, 10, 281}, - dictWord{ - 4, - 11, - 69, - }, - dictWord{5, 11, 122}, - dictWord{9, 11, 656}, - dictWord{138, 11, 464}, - dictWord{5, 11, 849}, - dictWord{134, 11, 1633}, - dictWord{8, 0, 522}, - dictWord{ - 142, - 0, - 328, - }, - dictWord{11, 10, 91}, - dictWord{13, 10, 129}, - dictWord{15, 10, 101}, - dictWord{145, 10, 125}, - dictWord{7, 0, 562}, - dictWord{8, 0, 551}, - dictWord{ - 4, - 10, - 494, - }, - dictWord{6, 10, 74}, - dictWord{7, 10, 44}, - dictWord{11, 11, 499}, - dictWord{12, 10, 17}, - dictWord{15, 10, 5}, - dictWord{148, 10, 11}, - dictWord{4, 10, 276}, - dictWord{133, 10, 296}, - dictWord{9, 0, 92}, - dictWord{147, 0, 91}, - dictWord{4, 10, 7}, - dictWord{5, 10, 90}, - dictWord{5, 10, 158}, - dictWord{6, 10, 542}, - dictWord{ - 7, - 10, - 221, - }, - dictWord{7, 10, 1574}, - dictWord{9, 10, 490}, - dictWord{10, 10, 540}, - dictWord{11, 10, 443}, - dictWord{139, 10, 757}, - dictWord{6, 0, 525}, - dictWord{ - 6, - 0, - 1976, - }, - dictWord{8, 0, 806}, - dictWord{9, 0, 876}, - dictWord{140, 0, 284}, - dictWord{5, 11, 859}, - dictWord{7, 10, 588}, - dictWord{7, 11, 1160}, - dictWord{ - 8, - 11, - 107, - }, - dictWord{9, 10, 175}, - dictWord{9, 11, 291}, - dictWord{9, 11, 439}, - dictWord{10, 10, 530}, - dictWord{10, 11, 663}, - dictWord{11, 11, 609}, - dictWord{ - 140, - 11, - 197, - }, - dictWord{7, 11, 168}, - dictWord{13, 11, 196}, - dictWord{141, 11, 237}, - dictWord{139, 0, 958}, - dictWord{133, 0, 594}, - dictWord{135, 10, 580}, - dictWord{7, 10, 88}, - dictWord{136, 10, 627}, - dictWord{6, 0, 479}, - dictWord{6, 0, 562}, - dictWord{7, 0, 1060}, - dictWord{13, 0, 6}, - dictWord{5, 10, 872}, - dictWord{ - 6, - 10, - 57, - }, - dictWord{7, 10, 471}, - dictWord{9, 10, 447}, - dictWord{137, 10, 454}, - dictWord{136, 11, 413}, - dictWord{145, 11, 19}, - dictWord{4, 11, 117}, - dictWord{ - 6, - 11, - 372, - }, - dictWord{7, 11, 1905}, - dictWord{142, 11, 323}, - dictWord{4, 11, 722}, - dictWord{139, 11, 471}, - dictWord{17, 0, 61}, - dictWord{5, 10, 31}, - dictWord{134, 10, 614}, - dictWord{8, 10, 330}, - dictWord{140, 10, 477}, - dictWord{7, 10, 1200}, - dictWord{138, 10, 460}, - dictWord{6, 10, 424}, - dictWord{ - 135, - 10, - 1866, - }, - dictWord{6, 0, 1641}, - dictWord{136, 0, 820}, - dictWord{6, 0, 1556}, - dictWord{134, 0, 1618}, - dictWord{9, 11, 5}, - dictWord{12, 11, 216}, - dictWord{ - 12, - 11, - 294, - }, - dictWord{12, 11, 298}, - dictWord{12, 11, 400}, - dictWord{12, 11, 518}, - dictWord{13, 11, 229}, - dictWord{143, 11, 139}, - dictWord{15, 11, 155}, - dictWord{144, 11, 79}, - dictWord{4, 0, 302}, - dictWord{135, 0, 1766}, - dictWord{5, 10, 13}, - dictWord{134, 10, 142}, - dictWord{6, 0, 148}, - dictWord{7, 0, 1313}, - dictWord{ - 7, - 10, - 116, - }, - dictWord{8, 10, 322}, - dictWord{8, 10, 755}, - dictWord{9, 10, 548}, - dictWord{10, 10, 714}, - dictWord{11, 10, 884}, - dictWord{141, 10, 324}, - dictWord{137, 0, 676}, - dictWord{9, 11, 88}, - dictWord{139, 11, 270}, - dictWord{5, 11, 12}, - dictWord{7, 11, 375}, - dictWord{137, 11, 438}, - dictWord{134, 0, 1674}, - dictWord{7, 10, 1472}, - dictWord{135, 10, 1554}, - dictWord{11, 0, 178}, - dictWord{7, 10, 1071}, - dictWord{7, 10, 1541}, - dictWord{7, 10, 1767}, - dictWord{ - 7, - 10, - 1806, - }, - dictWord{11, 10, 162}, - dictWord{11, 10, 242}, - dictWord{12, 10, 605}, - dictWord{15, 10, 26}, - dictWord{144, 10, 44}, - dictWord{6, 0, 389}, - dictWord{ - 7, - 0, - 149, - }, - dictWord{9, 0, 142}, - dictWord{138, 0, 94}, - dictWord{140, 11, 71}, - dictWord{145, 10, 115}, - dictWord{6, 0, 8}, - dictWord{7, 0, 1881}, - dictWord{8, 0, 91}, - dictWord{11, 11, 966}, - dictWord{12, 11, 287}, - dictWord{13, 11, 342}, - dictWord{13, 11, 402}, - dictWord{15, 11, 110}, - dictWord{143, 11, 163}, - dictWord{ - 4, - 11, - 258, - }, - dictWord{136, 11, 639}, - dictWord{6, 11, 22}, - dictWord{7, 11, 903}, - dictWord{138, 11, 577}, - dictWord{133, 11, 681}, - dictWord{135, 10, 1111}, - dictWord{135, 11, 1286}, - dictWord{9, 0, 112}, - dictWord{8, 10, 1}, - dictWord{138, 10, 326}, - dictWord{5, 10, 488}, - dictWord{6, 10, 527}, - dictWord{7, 10, 489}, - dictWord{ - 7, - 10, - 1636, - }, - dictWord{8, 10, 121}, - dictWord{8, 10, 144}, - dictWord{8, 10, 359}, - dictWord{9, 10, 193}, - dictWord{9, 10, 241}, - dictWord{9, 10, 336}, - dictWord{ - 9, - 10, - 882, - }, - dictWord{11, 10, 266}, - dictWord{11, 10, 372}, - dictWord{11, 10, 944}, - dictWord{12, 10, 401}, - dictWord{140, 10, 641}, - dictWord{4, 11, 664}, - dictWord{133, 11, 804}, - dictWord{6, 0, 747}, - dictWord{134, 0, 1015}, - dictWord{135, 0, 1746}, - dictWord{9, 10, 31}, - dictWord{10, 10, 244}, - dictWord{ - 10, - 10, - 699, - }, - dictWord{12, 10, 149}, - dictWord{141, 10, 497}, - dictWord{133, 10, 377}, - dictWord{135, 0, 24}, - dictWord{6, 0, 1352}, - dictWord{5, 11, 32}, - dictWord{ - 145, - 10, - 101, - }, - dictWord{7, 0, 1530}, - dictWord{10, 0, 158}, - dictWord{13, 0, 13}, - dictWord{13, 0, 137}, - dictWord{13, 0, 258}, - dictWord{14, 0, 111}, - dictWord{ - 14, - 0, - 225, - }, - dictWord{14, 0, 253}, - dictWord{14, 0, 304}, - dictWord{14, 0, 339}, - dictWord{14, 0, 417}, - dictWord{146, 0, 33}, - dictWord{4, 0, 503}, - dictWord{ - 135, - 0, - 1661, - }, - dictWord{5, 0, 130}, - dictWord{6, 0, 845}, - dictWord{7, 0, 1314}, - dictWord{9, 0, 610}, - dictWord{10, 0, 718}, - dictWord{11, 0, 601}, - dictWord{11, 0, 819}, - dictWord{11, 0, 946}, - dictWord{140, 0, 536}, - dictWord{10, 0, 149}, - dictWord{11, 0, 280}, - dictWord{142, 0, 336}, - dictWord{134, 0, 1401}, - dictWord{ - 135, - 0, - 1946, - }, - dictWord{8, 0, 663}, - dictWord{144, 0, 8}, - dictWord{134, 0, 1607}, - dictWord{135, 10, 2023}, - dictWord{4, 11, 289}, - dictWord{7, 11, 629}, - dictWord{ - 7, - 11, - 1698, - }, - dictWord{7, 11, 1711}, - dictWord{140, 11, 215}, - dictWord{6, 11, 450}, - dictWord{136, 11, 109}, - dictWord{10, 0, 882}, - dictWord{10, 0, 883}, - dictWord{10, 0, 914}, - dictWord{138, 0, 928}, - dictWord{133, 10, 843}, - dictWord{136, 11, 705}, - dictWord{132, 10, 554}, - dictWord{133, 10, 536}, - dictWord{ - 5, - 0, - 417, - }, - dictWord{9, 10, 79}, - dictWord{11, 10, 625}, - dictWord{145, 10, 7}, - dictWord{7, 11, 1238}, - dictWord{142, 11, 37}, - dictWord{4, 0, 392}, - dictWord{ - 135, - 0, - 1597, - }, - dictWord{5, 0, 433}, - dictWord{9, 0, 633}, - dictWord{11, 0, 629}, - dictWord{132, 10, 424}, - dictWord{7, 10, 336}, - dictWord{136, 10, 785}, - dictWord{ - 134, - 11, - 355, - }, - dictWord{6, 0, 234}, - dictWord{7, 0, 769}, - dictWord{9, 0, 18}, - dictWord{138, 0, 358}, - dictWord{4, 10, 896}, - dictWord{134, 10, 1777}, - dictWord{ - 138, - 11, - 323, - }, - dictWord{7, 0, 140}, - dictWord{7, 0, 1950}, - dictWord{8, 0, 680}, - dictWord{11, 0, 817}, - dictWord{147, 0, 88}, - dictWord{7, 0, 1222}, - dictWord{ - 138, - 0, - 386, - }, - dictWord{139, 11, 908}, - dictWord{11, 0, 249}, - dictWord{12, 0, 313}, - dictWord{16, 0, 66}, - dictWord{145, 0, 26}, - dictWord{134, 0, 5}, - dictWord{7, 10, 750}, - dictWord{9, 10, 223}, - dictWord{11, 10, 27}, - dictWord{11, 10, 466}, - dictWord{12, 10, 624}, - dictWord{14, 10, 265}, - dictWord{146, 10, 61}, - dictWord{ - 134, - 11, - 26, - }, - dictWord{134, 0, 1216}, - dictWord{5, 0, 963}, - dictWord{134, 0, 1773}, - dictWord{4, 11, 414}, - dictWord{5, 11, 467}, - dictWord{9, 11, 654}, - dictWord{ - 10, - 11, - 451, - }, - dictWord{12, 11, 59}, - dictWord{141, 11, 375}, - dictWord{135, 11, 17}, - dictWord{4, 10, 603}, - dictWord{133, 10, 661}, - dictWord{4, 10, 11}, - dictWord{ - 6, - 10, - 128, - }, - dictWord{7, 10, 231}, - dictWord{7, 10, 1533}, - dictWord{138, 10, 725}, - dictWord{135, 11, 955}, - dictWord{7, 0, 180}, - dictWord{8, 0, 509}, - dictWord{ - 136, - 0, - 792, - }, - dictWord{132, 10, 476}, - dictWord{132, 0, 1002}, - dictWord{133, 11, 538}, - dictWord{135, 10, 1807}, - dictWord{132, 0, 931}, - dictWord{7, 0, 943}, - dictWord{11, 0, 614}, - dictWord{140, 0, 747}, - dictWord{135, 0, 1837}, - dictWord{9, 10, 20}, - dictWord{10, 10, 324}, - dictWord{10, 10, 807}, - dictWord{ - 139, - 10, - 488, - }, - dictWord{134, 0, 641}, - dictWord{6, 11, 280}, - dictWord{10, 11, 502}, - dictWord{11, 11, 344}, - dictWord{140, 11, 38}, - dictWord{5, 11, 45}, - dictWord{ - 7, - 11, - 1161, - }, - dictWord{11, 11, 448}, - dictWord{11, 11, 880}, - dictWord{13, 11, 139}, - dictWord{13, 11, 407}, - dictWord{15, 11, 16}, - dictWord{17, 11, 95}, - dictWord{ - 18, - 11, - 66, - }, - dictWord{18, 11, 88}, - dictWord{18, 11, 123}, - dictWord{149, 11, 7}, - dictWord{9, 0, 280}, - dictWord{138, 0, 134}, - dictWord{22, 0, 22}, - dictWord{23, 0, 5}, - dictWord{151, 0, 29}, - dictWord{136, 11, 777}, - dictWord{4, 0, 90}, - dictWord{5, 0, 545}, - dictWord{7, 0, 754}, - dictWord{9, 0, 186}, - dictWord{10, 0, 72}, - dictWord{ - 10, - 0, - 782, - }, - dictWord{11, 0, 577}, - dictWord{11, 0, 610}, - dictWord{11, 0, 960}, - dictWord{12, 0, 354}, - dictWord{12, 0, 362}, - dictWord{12, 0, 595}, - dictWord{ - 4, - 11, - 410, - }, - dictWord{135, 11, 521}, - dictWord{135, 11, 1778}, - dictWord{5, 10, 112}, - dictWord{6, 10, 103}, - dictWord{134, 10, 150}, - dictWord{138, 10, 356}, - dictWord{132, 0, 742}, - dictWord{7, 0, 151}, - dictWord{9, 0, 329}, - dictWord{139, 0, 254}, - dictWord{8, 0, 853}, - dictWord{8, 0, 881}, - dictWord{8, 0, 911}, - dictWord{ - 8, - 0, - 912, - }, - dictWord{10, 0, 872}, - dictWord{12, 0, 741}, - dictWord{12, 0, 742}, - dictWord{152, 0, 18}, - dictWord{4, 11, 573}, - dictWord{136, 11, 655}, - dictWord{ - 6, - 0, - 921, - }, - dictWord{134, 0, 934}, - dictWord{9, 0, 187}, - dictWord{10, 0, 36}, - dictWord{11, 0, 1016}, - dictWord{17, 0, 44}, - dictWord{146, 0, 64}, - dictWord{7, 0, 833}, - dictWord{136, 0, 517}, - dictWord{4, 0, 506}, - dictWord{5, 0, 295}, - dictWord{135, 0, 1680}, - dictWord{4, 10, 708}, - dictWord{8, 10, 15}, - dictWord{9, 10, 50}, - dictWord{ - 9, - 10, - 386, - }, - dictWord{11, 10, 18}, - dictWord{11, 10, 529}, - dictWord{140, 10, 228}, - dictWord{7, 0, 251}, - dictWord{7, 0, 1701}, - dictWord{8, 0, 436}, - dictWord{ - 4, - 10, - 563, - }, - dictWord{7, 10, 592}, - dictWord{7, 10, 637}, - dictWord{7, 10, 770}, - dictWord{8, 10, 463}, - dictWord{9, 10, 60}, - dictWord{9, 10, 335}, - dictWord{9, 10, 904}, - dictWord{10, 10, 73}, - dictWord{11, 10, 434}, - dictWord{12, 10, 585}, - dictWord{13, 10, 331}, - dictWord{18, 10, 110}, - dictWord{148, 10, 60}, - dictWord{ - 132, - 10, - 502, - }, - dictWord{136, 0, 584}, - dictWord{6, 10, 347}, - dictWord{138, 10, 161}, - dictWord{7, 0, 987}, - dictWord{9, 0, 688}, - dictWord{10, 0, 522}, - dictWord{ - 11, - 0, - 788, - }, - dictWord{12, 0, 137}, - dictWord{12, 0, 566}, - dictWord{14, 0, 9}, - dictWord{14, 0, 24}, - dictWord{14, 0, 64}, - dictWord{7, 11, 899}, - dictWord{142, 11, 325}, - dictWord{4, 0, 214}, - dictWord{5, 0, 500}, - dictWord{5, 10, 102}, - dictWord{6, 10, 284}, - dictWord{7, 10, 1079}, - dictWord{7, 10, 1423}, - dictWord{7, 10, 1702}, - dictWord{ - 8, - 10, - 470, - }, - dictWord{9, 10, 554}, - dictWord{9, 10, 723}, - dictWord{139, 10, 333}, - dictWord{7, 10, 246}, - dictWord{135, 10, 840}, - dictWord{6, 10, 10}, - dictWord{ - 8, - 10, - 571, - }, - dictWord{9, 10, 739}, - dictWord{143, 10, 91}, - dictWord{133, 10, 626}, - dictWord{146, 0, 195}, - dictWord{134, 0, 1775}, - dictWord{7, 0, 389}, - dictWord{7, 0, 700}, - dictWord{7, 0, 940}, - dictWord{8, 0, 514}, - dictWord{9, 0, 116}, - dictWord{9, 0, 535}, - dictWord{10, 0, 118}, - dictWord{11, 0, 107}, - dictWord{ - 11, - 0, - 148, - }, - dictWord{11, 0, 922}, - dictWord{12, 0, 254}, - dictWord{12, 0, 421}, - dictWord{142, 0, 238}, - dictWord{5, 10, 18}, - dictWord{6, 10, 526}, - dictWord{13, 10, 24}, - dictWord{13, 10, 110}, - dictWord{19, 10, 5}, - dictWord{147, 10, 44}, - dictWord{132, 0, 743}, - dictWord{11, 0, 292}, - dictWord{4, 10, 309}, - dictWord{5, 10, 462}, - dictWord{7, 10, 970}, - dictWord{135, 10, 1097}, - dictWord{22, 10, 30}, - dictWord{150, 10, 33}, - dictWord{139, 11, 338}, - dictWord{135, 11, 1598}, - dictWord{ - 7, - 0, - 1283, - }, - dictWord{9, 0, 227}, - dictWord{11, 0, 325}, - dictWord{11, 0, 408}, - dictWord{14, 0, 180}, - dictWord{146, 0, 47}, - dictWord{4, 0, 953}, - dictWord{6, 0, 1805}, - dictWord{6, 0, 1814}, - dictWord{6, 0, 1862}, - dictWord{140, 0, 774}, - dictWord{6, 11, 611}, - dictWord{135, 11, 1733}, - dictWord{135, 11, 1464}, - dictWord{ - 5, - 0, - 81, - }, - dictWord{7, 0, 146}, - dictWord{7, 0, 1342}, - dictWord{8, 0, 53}, - dictWord{8, 0, 561}, - dictWord{8, 0, 694}, - dictWord{8, 0, 754}, - dictWord{9, 0, 115}, - dictWord{ - 9, - 0, - 179, - }, - dictWord{9, 0, 894}, - dictWord{10, 0, 462}, - dictWord{10, 0, 813}, - dictWord{11, 0, 230}, - dictWord{11, 0, 657}, - dictWord{11, 0, 699}, - dictWord{11, 0, 748}, - dictWord{12, 0, 119}, - dictWord{12, 0, 200}, - dictWord{12, 0, 283}, - dictWord{142, 0, 273}, - dictWord{5, 0, 408}, - dictWord{6, 0, 789}, - dictWord{6, 0, 877}, - dictWord{ - 6, - 0, - 1253, - }, - dictWord{6, 0, 1413}, - dictWord{137, 0, 747}, - dictWord{134, 10, 1704}, - dictWord{135, 11, 663}, - dictWord{6, 0, 1910}, - dictWord{6, 0, 1915}, - dictWord{6, 0, 1923}, - dictWord{9, 0, 913}, - dictWord{9, 0, 928}, - dictWord{9, 0, 950}, - dictWord{9, 0, 954}, - dictWord{9, 0, 978}, - dictWord{9, 0, 993}, - dictWord{12, 0, 812}, - dictWord{12, 0, 819}, - dictWord{12, 0, 831}, - dictWord{12, 0, 833}, - dictWord{12, 0, 838}, - dictWord{12, 0, 909}, - dictWord{12, 0, 928}, - dictWord{12, 0, 931}, - dictWord{12, 0, 950}, - dictWord{15, 0, 186}, - dictWord{15, 0, 187}, - dictWord{15, 0, 195}, - dictWord{15, 0, 196}, - dictWord{15, 0, 209}, - dictWord{15, 0, 215}, - dictWord{ - 15, - 0, - 236, - }, - dictWord{15, 0, 241}, - dictWord{15, 0, 249}, - dictWord{15, 0, 253}, - dictWord{18, 0, 180}, - dictWord{18, 0, 221}, - dictWord{18, 0, 224}, - dictWord{ - 18, - 0, - 227, - }, - dictWord{18, 0, 229}, - dictWord{149, 0, 60}, - dictWord{7, 0, 1826}, - dictWord{135, 0, 1938}, - dictWord{11, 0, 490}, - dictWord{18, 0, 143}, - dictWord{ - 5, - 10, - 86, - }, - dictWord{7, 10, 743}, - dictWord{9, 10, 85}, - dictWord{10, 10, 281}, - dictWord{10, 10, 432}, - dictWord{12, 10, 251}, - dictWord{13, 10, 118}, - dictWord{ - 142, - 10, - 378, - }, - dictWord{5, 10, 524}, - dictWord{133, 10, 744}, - dictWord{141, 11, 442}, - dictWord{10, 10, 107}, - dictWord{140, 10, 436}, - dictWord{135, 11, 503}, - dictWord{134, 0, 1162}, - dictWord{132, 10, 927}, - dictWord{7, 0, 30}, - dictWord{8, 0, 86}, - dictWord{8, 0, 315}, - dictWord{8, 0, 700}, - dictWord{9, 0, 576}, - dictWord{ - 9, - 0, - 858, - }, - dictWord{10, 0, 414}, - dictWord{11, 0, 310}, - dictWord{11, 0, 888}, - dictWord{11, 0, 904}, - dictWord{12, 0, 361}, - dictWord{13, 0, 248}, - dictWord{13, 0, 371}, - dictWord{14, 0, 142}, - dictWord{12, 10, 670}, - dictWord{146, 10, 94}, - dictWord{134, 0, 721}, - dictWord{4, 11, 113}, - dictWord{5, 11, 163}, - dictWord{5, 11, 735}, - dictWord{7, 11, 1009}, - dictWord{7, 10, 1149}, - dictWord{9, 11, 9}, - dictWord{9, 10, 156}, - dictWord{9, 11, 771}, - dictWord{12, 11, 90}, - dictWord{13, 11, 138}, - dictWord{13, 11, 410}, - dictWord{143, 11, 128}, - dictWord{138, 0, 839}, - dictWord{133, 10, 778}, - dictWord{137, 0, 617}, - dictWord{133, 10, 502}, - dictWord{ - 8, - 10, - 196, - }, - dictWord{10, 10, 283}, - dictWord{139, 10, 406}, - dictWord{6, 0, 428}, - dictWord{7, 0, 524}, - dictWord{8, 0, 169}, - dictWord{8, 0, 234}, - dictWord{9, 0, 480}, - dictWord{138, 0, 646}, - dictWord{133, 10, 855}, - dictWord{134, 0, 1648}, - dictWord{7, 0, 1205}, - dictWord{138, 0, 637}, - dictWord{7, 0, 1596}, - dictWord{ - 4, - 11, - 935, - }, - dictWord{133, 11, 823}, - dictWord{5, 11, 269}, - dictWord{7, 11, 434}, - dictWord{7, 11, 891}, - dictWord{8, 11, 339}, - dictWord{9, 11, 702}, - dictWord{ - 11, - 11, - 594, - }, - dictWord{11, 11, 718}, - dictWord{145, 11, 100}, - dictWord{7, 11, 878}, - dictWord{9, 11, 485}, - dictWord{141, 11, 264}, - dictWord{4, 0, 266}, - dictWord{ - 8, - 0, - 4, - }, - dictWord{9, 0, 39}, - dictWord{10, 0, 166}, - dictWord{11, 0, 918}, - dictWord{12, 0, 635}, - dictWord{20, 0, 10}, - dictWord{22, 0, 27}, - dictWord{22, 0, 43}, - dictWord{ - 22, - 0, - 52, - }, - dictWord{134, 11, 1713}, - dictWord{7, 10, 1400}, - dictWord{9, 10, 446}, - dictWord{138, 10, 45}, - dictWord{135, 11, 900}, - dictWord{132, 0, 862}, - dictWord{134, 0, 1554}, - dictWord{135, 11, 1033}, - dictWord{19, 0, 16}, - dictWord{147, 11, 16}, - dictWord{135, 11, 1208}, - dictWord{7, 0, 157}, - dictWord{ - 136, - 0, - 279, - }, - dictWord{6, 0, 604}, - dictWord{136, 0, 391}, - dictWord{13, 10, 455}, - dictWord{15, 10, 99}, - dictWord{15, 10, 129}, - dictWord{144, 10, 68}, - dictWord{ - 135, - 10, - 172, - }, - dictWord{7, 0, 945}, - dictWord{11, 0, 713}, - dictWord{139, 0, 744}, - dictWord{4, 0, 973}, - dictWord{10, 0, 877}, - dictWord{10, 0, 937}, - dictWord{ - 10, - 0, - 938, - }, - dictWord{140, 0, 711}, - dictWord{139, 0, 1022}, - dictWord{132, 10, 568}, - dictWord{142, 11, 143}, - dictWord{4, 0, 567}, - dictWord{9, 0, 859}, - dictWord{ - 132, - 10, - 732, - }, - dictWord{7, 0, 1846}, - dictWord{136, 0, 628}, - dictWord{136, 10, 733}, - dictWord{133, 0, 762}, - dictWord{4, 10, 428}, - dictWord{135, 10, 1789}, - dictWord{10, 0, 784}, - dictWord{13, 0, 191}, - dictWord{7, 10, 2015}, - dictWord{140, 10, 665}, - dictWord{133, 0, 298}, - dictWord{7, 0, 633}, - dictWord{7, 0, 905}, - dictWord{7, 0, 909}, - dictWord{7, 0, 1538}, - dictWord{9, 0, 767}, - dictWord{140, 0, 636}, - dictWord{138, 10, 806}, - dictWord{132, 0, 795}, - dictWord{139, 0, 301}, - dictWord{135, 0, 1970}, - dictWord{5, 11, 625}, - dictWord{135, 11, 1617}, - dictWord{135, 11, 275}, - dictWord{7, 11, 37}, - dictWord{8, 11, 425}, - dictWord{ - 8, - 11, - 693, - }, - dictWord{9, 11, 720}, - dictWord{10, 11, 380}, - dictWord{10, 11, 638}, - dictWord{11, 11, 273}, - dictWord{11, 11, 307}, - dictWord{11, 11, 473}, - dictWord{ - 12, - 11, - 61, - }, - dictWord{143, 11, 43}, - dictWord{135, 11, 198}, - dictWord{134, 0, 1236}, - dictWord{7, 0, 369}, - dictWord{12, 0, 644}, - dictWord{12, 0, 645}, - dictWord{144, 0, 90}, - dictWord{19, 0, 15}, - dictWord{149, 0, 27}, - dictWord{6, 0, 71}, - dictWord{7, 0, 845}, - dictWord{8, 0, 160}, - dictWord{9, 0, 318}, - dictWord{6, 10, 1623}, - dictWord{134, 10, 1681}, - dictWord{134, 0, 1447}, - dictWord{134, 0, 1255}, - dictWord{138, 0, 735}, - dictWord{8, 0, 76}, - dictWord{132, 11, 168}, - dictWord{ - 6, - 10, - 1748, - }, - dictWord{8, 10, 715}, - dictWord{9, 10, 802}, - dictWord{10, 10, 46}, - dictWord{10, 10, 819}, - dictWord{13, 10, 308}, - dictWord{14, 10, 351}, - dictWord{14, 10, 363}, - dictWord{146, 10, 67}, - dictWord{135, 11, 91}, - dictWord{6, 0, 474}, - dictWord{4, 10, 63}, - dictWord{133, 10, 347}, - dictWord{133, 10, 749}, - dictWord{138, 0, 841}, - dictWord{133, 10, 366}, - dictWord{6, 0, 836}, - dictWord{132, 11, 225}, - dictWord{135, 0, 1622}, - dictWord{135, 10, 89}, - dictWord{ - 140, - 0, - 735, - }, - dictWord{134, 0, 1601}, - dictWord{138, 11, 145}, - dictWord{6, 0, 1390}, - dictWord{137, 0, 804}, - dictWord{142, 0, 394}, - dictWord{6, 11, 15}, - dictWord{ - 7, - 11, - 70, - }, - dictWord{10, 11, 240}, - dictWord{147, 11, 93}, - dictWord{6, 0, 96}, - dictWord{135, 0, 1426}, - dictWord{4, 0, 651}, - dictWord{133, 0, 289}, - dictWord{ - 7, - 11, - 956, - }, - dictWord{7, 10, 977}, - dictWord{7, 11, 1157}, - dictWord{7, 11, 1506}, - dictWord{7, 11, 1606}, - dictWord{7, 11, 1615}, - dictWord{7, 11, 1619}, - dictWord{ - 7, - 11, - 1736, - }, - dictWord{7, 11, 1775}, - dictWord{8, 11, 590}, - dictWord{9, 11, 324}, - dictWord{9, 11, 736}, - dictWord{9, 11, 774}, - dictWord{9, 11, 776}, - dictWord{ - 9, - 11, - 784, - }, - dictWord{10, 11, 567}, - dictWord{10, 11, 708}, - dictWord{11, 11, 518}, - dictWord{11, 11, 613}, - dictWord{11, 11, 695}, - dictWord{11, 11, 716}, - dictWord{11, 11, 739}, - dictWord{11, 11, 770}, - dictWord{11, 11, 771}, - dictWord{11, 11, 848}, - dictWord{11, 11, 857}, - dictWord{11, 11, 931}, - dictWord{ - 11, - 11, - 947, - }, - dictWord{12, 11, 326}, - dictWord{12, 11, 387}, - dictWord{12, 11, 484}, - dictWord{12, 11, 528}, - dictWord{12, 11, 552}, - dictWord{12, 11, 613}, - dictWord{ - 13, - 11, - 189, - }, - dictWord{13, 11, 256}, - dictWord{13, 11, 340}, - dictWord{13, 11, 432}, - dictWord{13, 11, 436}, - dictWord{13, 11, 440}, - dictWord{13, 11, 454}, - dictWord{14, 11, 174}, - dictWord{14, 11, 220}, - dictWord{14, 11, 284}, - dictWord{14, 11, 390}, - dictWord{145, 11, 121}, - dictWord{7, 0, 688}, - dictWord{8, 0, 35}, - dictWord{9, 0, 511}, - dictWord{10, 0, 767}, - dictWord{147, 0, 118}, - dictWord{134, 0, 667}, - dictWord{4, 0, 513}, - dictWord{5, 10, 824}, - dictWord{133, 10, 941}, - dictWord{7, 10, 440}, - dictWord{8, 10, 230}, - dictWord{139, 10, 106}, - dictWord{134, 0, 2034}, - dictWord{135, 11, 1399}, - dictWord{143, 11, 66}, - dictWord{ - 135, - 11, - 1529, - }, - dictWord{4, 11, 145}, - dictWord{6, 11, 176}, - dictWord{7, 11, 395}, - dictWord{9, 11, 562}, - dictWord{144, 11, 28}, - dictWord{132, 11, 501}, - dictWord{132, 0, 704}, - dictWord{134, 0, 1524}, - dictWord{7, 0, 1078}, - dictWord{134, 11, 464}, - dictWord{6, 11, 509}, - dictWord{10, 11, 82}, - dictWord{20, 11, 91}, - dictWord{151, 11, 13}, - dictWord{4, 0, 720}, - dictWord{133, 0, 306}, - dictWord{133, 0, 431}, - dictWord{7, 0, 1196}, - dictWord{4, 10, 914}, - dictWord{5, 10, 800}, - dictWord{133, 10, 852}, - dictWord{135, 11, 1189}, - dictWord{10, 0, 54}, - dictWord{141, 10, 115}, - dictWord{7, 10, 564}, - dictWord{142, 10, 168}, - dictWord{ - 5, - 0, - 464, - }, - dictWord{6, 0, 236}, - dictWord{7, 0, 696}, - dictWord{7, 0, 914}, - dictWord{7, 0, 1108}, - dictWord{7, 0, 1448}, - dictWord{9, 0, 15}, - dictWord{9, 0, 564}, - dictWord{ - 10, - 0, - 14, - }, - dictWord{12, 0, 565}, - dictWord{13, 0, 449}, - dictWord{14, 0, 53}, - dictWord{15, 0, 13}, - dictWord{16, 0, 64}, - dictWord{17, 0, 41}, - dictWord{4, 10, 918}, - dictWord{133, 10, 876}, - dictWord{6, 0, 1418}, - dictWord{134, 10, 1764}, - dictWord{4, 10, 92}, - dictWord{133, 10, 274}, - dictWord{134, 0, 907}, - dictWord{ - 4, - 11, - 114, - }, - dictWord{8, 10, 501}, - dictWord{9, 11, 492}, - dictWord{13, 11, 462}, - dictWord{142, 11, 215}, - dictWord{4, 11, 77}, - dictWord{5, 11, 361}, - dictWord{ - 6, - 11, - 139, - }, - dictWord{6, 11, 401}, - dictWord{6, 11, 404}, - dictWord{7, 11, 413}, - dictWord{7, 11, 715}, - dictWord{7, 11, 1716}, - dictWord{11, 11, 279}, - dictWord{ - 12, - 11, - 179, - }, - dictWord{12, 11, 258}, - dictWord{13, 11, 244}, - dictWord{142, 11, 358}, - dictWord{6, 0, 1767}, - dictWord{12, 0, 194}, - dictWord{145, 0, 107}, - dictWord{ - 134, - 11, - 1717, - }, - dictWord{5, 10, 743}, - dictWord{142, 11, 329}, - dictWord{4, 10, 49}, - dictWord{7, 10, 280}, - dictWord{135, 10, 1633}, - dictWord{5, 0, 840}, - dictWord{7, 11, 1061}, - dictWord{8, 11, 82}, - dictWord{11, 11, 250}, - dictWord{12, 11, 420}, - dictWord{141, 11, 184}, - dictWord{135, 11, 724}, - dictWord{ - 134, - 0, - 900, - }, - dictWord{136, 10, 47}, - dictWord{134, 0, 1436}, - dictWord{144, 11, 0}, - dictWord{6, 0, 675}, - dictWord{7, 0, 1008}, - dictWord{7, 0, 1560}, - dictWord{ - 9, - 0, - 642, - }, - dictWord{11, 0, 236}, - dictWord{14, 0, 193}, - dictWord{5, 10, 272}, - dictWord{5, 10, 908}, - dictWord{5, 10, 942}, - dictWord{8, 10, 197}, - dictWord{9, 10, 47}, - dictWord{11, 10, 538}, - dictWord{139, 10, 742}, - dictWord{4, 0, 68}, - dictWord{5, 0, 628}, - dictWord{5, 0, 634}, - dictWord{6, 0, 386}, - dictWord{7, 0, 794}, - dictWord{ - 8, - 0, - 273, - }, - dictWord{9, 0, 563}, - dictWord{10, 0, 105}, - dictWord{10, 0, 171}, - dictWord{11, 0, 94}, - dictWord{139, 0, 354}, - dictWord{135, 10, 1911}, - dictWord{ - 137, - 10, - 891, - }, - dictWord{4, 0, 95}, - dictWord{6, 0, 1297}, - dictWord{6, 0, 1604}, - dictWord{7, 0, 416}, - dictWord{139, 0, 830}, - dictWord{6, 11, 513}, - dictWord{ - 135, - 11, - 1052, - }, - dictWord{7, 0, 731}, - dictWord{13, 0, 20}, - dictWord{143, 0, 11}, - dictWord{137, 11, 899}, - dictWord{10, 0, 850}, - dictWord{140, 0, 697}, - dictWord{ - 4, - 0, - 662, - }, - dictWord{7, 11, 1417}, - dictWord{12, 11, 382}, - dictWord{17, 11, 48}, - dictWord{152, 11, 12}, - dictWord{133, 0, 736}, - dictWord{132, 0, 861}, - dictWord{ - 4, - 10, - 407, - }, - dictWord{132, 10, 560}, - dictWord{141, 10, 490}, - dictWord{6, 11, 545}, - dictWord{7, 11, 565}, - dictWord{7, 11, 1669}, - dictWord{10, 11, 114}, - dictWord{11, 11, 642}, - dictWord{140, 11, 618}, - dictWord{6, 0, 871}, - dictWord{134, 0, 1000}, - dictWord{5, 0, 864}, - dictWord{10, 0, 648}, - dictWord{11, 0, 671}, - dictWord{15, 0, 46}, - dictWord{133, 11, 5}, - dictWord{133, 0, 928}, - dictWord{11, 0, 90}, - dictWord{13, 0, 7}, - dictWord{4, 10, 475}, - dictWord{11, 10, 35}, - dictWord{ - 13, - 10, - 71, - }, - dictWord{13, 10, 177}, - dictWord{142, 10, 422}, - dictWord{136, 0, 332}, - dictWord{135, 11, 192}, - dictWord{134, 0, 1055}, - dictWord{136, 11, 763}, - dictWord{11, 0, 986}, - dictWord{140, 0, 682}, - dictWord{7, 0, 76}, - dictWord{8, 0, 44}, - dictWord{9, 0, 884}, - dictWord{10, 0, 580}, - dictWord{11, 0, 399}, - dictWord{ - 11, - 0, - 894, - }, - dictWord{143, 0, 122}, - dictWord{135, 11, 1237}, - dictWord{135, 10, 636}, - dictWord{11, 0, 300}, - dictWord{6, 10, 222}, - dictWord{7, 10, 1620}, - dictWord{ - 8, - 10, - 409, - }, - dictWord{137, 10, 693}, - dictWord{4, 11, 87}, - dictWord{5, 11, 250}, - dictWord{10, 11, 601}, - dictWord{13, 11, 298}, - dictWord{13, 11, 353}, - dictWord{141, 11, 376}, - dictWord{5, 0, 518}, - dictWord{10, 0, 340}, - dictWord{11, 0, 175}, - dictWord{149, 0, 16}, - dictWord{140, 0, 771}, - dictWord{6, 0, 1108}, - dictWord{137, 0, 831}, - dictWord{132, 0, 836}, - dictWord{135, 0, 1852}, - dictWord{4, 0, 957}, - dictWord{6, 0, 1804}, - dictWord{8, 0, 842}, - dictWord{8, 0, 843}, - dictWord{ - 8, - 0, - 851, - }, - dictWord{8, 0, 855}, - dictWord{140, 0, 767}, - dictWord{135, 11, 814}, - dictWord{4, 11, 57}, - dictWord{7, 11, 1195}, - dictWord{7, 11, 1438}, - dictWord{ - 7, - 11, - 1548, - }, - dictWord{7, 11, 1835}, - dictWord{7, 11, 1904}, - dictWord{9, 11, 757}, - dictWord{10, 11, 604}, - dictWord{139, 11, 519}, - dictWord{133, 10, 882}, - dictWord{138, 0, 246}, - dictWord{4, 0, 934}, - dictWord{5, 0, 202}, - dictWord{8, 0, 610}, - dictWord{7, 11, 1897}, - dictWord{12, 11, 290}, - dictWord{13, 11, 80}, - dictWord{13, 11, 437}, - dictWord{145, 11, 74}, - dictWord{8, 0, 96}, - dictWord{9, 0, 36}, - dictWord{10, 0, 607}, - dictWord{10, 0, 804}, - dictWord{10, 0, 832}, - dictWord{ - 11, - 0, - 423, - }, - dictWord{11, 0, 442}, - dictWord{12, 0, 309}, - dictWord{14, 0, 199}, - dictWord{15, 0, 90}, - dictWord{145, 0, 110}, - dictWord{132, 10, 426}, - dictWord{ - 7, - 0, - 654, - }, - dictWord{8, 0, 240}, - dictWord{6, 10, 58}, - dictWord{7, 10, 745}, - dictWord{7, 10, 1969}, - dictWord{8, 10, 675}, - dictWord{9, 10, 479}, - dictWord{9, 10, 731}, - dictWord{10, 10, 330}, - dictWord{10, 10, 593}, - dictWord{10, 10, 817}, - dictWord{11, 10, 32}, - dictWord{11, 10, 133}, - dictWord{11, 10, 221}, - dictWord{ - 145, - 10, - 68, - }, - dictWord{9, 0, 13}, - dictWord{9, 0, 398}, - dictWord{9, 0, 727}, - dictWord{10, 0, 75}, - dictWord{10, 0, 184}, - dictWord{10, 0, 230}, - dictWord{10, 0, 564}, - dictWord{ - 10, - 0, - 569, - }, - dictWord{11, 0, 973}, - dictWord{12, 0, 70}, - dictWord{12, 0, 189}, - dictWord{13, 0, 57}, - dictWord{141, 0, 257}, - dictWord{4, 11, 209}, - dictWord{ - 135, - 11, - 902, - }, - dictWord{7, 0, 391}, - dictWord{137, 10, 538}, - dictWord{134, 0, 403}, - dictWord{6, 11, 303}, - dictWord{7, 11, 335}, - dictWord{7, 11, 1437}, - dictWord{ - 7, - 11, - 1668, - }, - dictWord{8, 11, 553}, - dictWord{8, 11, 652}, - dictWord{8, 11, 656}, - dictWord{9, 11, 558}, - dictWord{11, 11, 743}, - dictWord{149, 11, 18}, - dictWord{ - 132, - 11, - 559, - }, - dictWord{11, 0, 75}, - dictWord{142, 0, 267}, - dictWord{6, 0, 815}, - dictWord{141, 11, 2}, - dictWord{141, 0, 366}, - dictWord{137, 0, 631}, - dictWord{ - 133, - 11, - 1017, - }, - dictWord{5, 0, 345}, - dictWord{135, 0, 1016}, - dictWord{133, 11, 709}, - dictWord{134, 11, 1745}, - dictWord{133, 10, 566}, - dictWord{7, 0, 952}, - dictWord{6, 10, 48}, - dictWord{9, 10, 139}, - dictWord{10, 10, 399}, - dictWord{11, 10, 469}, - dictWord{12, 10, 634}, - dictWord{141, 10, 223}, - dictWord{ - 133, - 0, - 673, - }, - dictWord{9, 0, 850}, - dictWord{7, 11, 8}, - dictWord{136, 11, 206}, - dictWord{6, 0, 662}, - dictWord{149, 0, 35}, - dictWord{4, 0, 287}, - dictWord{133, 0, 1018}, - dictWord{6, 10, 114}, - dictWord{7, 10, 1224}, - dictWord{7, 10, 1556}, - dictWord{136, 10, 3}, - dictWord{8, 10, 576}, - dictWord{137, 10, 267}, - dictWord{4, 0, 884}, - dictWord{5, 0, 34}, - dictWord{10, 0, 724}, - dictWord{12, 0, 444}, - dictWord{13, 0, 354}, - dictWord{18, 0, 32}, - dictWord{23, 0, 24}, - dictWord{23, 0, 31}, - dictWord{ - 152, - 0, - 5, - }, - dictWord{133, 10, 933}, - dictWord{132, 11, 776}, - dictWord{138, 0, 151}, - dictWord{136, 0, 427}, - dictWord{134, 0, 382}, - dictWord{132, 0, 329}, - dictWord{ - 9, - 0, - 846, - }, - dictWord{10, 0, 827}, - dictWord{138, 11, 33}, - dictWord{9, 0, 279}, - dictWord{10, 0, 407}, - dictWord{14, 0, 84}, - dictWord{22, 0, 18}, - dictWord{ - 135, - 11, - 1297, - }, - dictWord{136, 11, 406}, - dictWord{132, 0, 906}, - dictWord{136, 0, 366}, - dictWord{134, 0, 843}, - dictWord{134, 0, 1443}, - dictWord{135, 0, 1372}, - dictWord{138, 0, 992}, - dictWord{4, 0, 123}, - dictWord{5, 0, 605}, - dictWord{7, 0, 1509}, - dictWord{136, 0, 36}, - dictWord{132, 0, 649}, - dictWord{8, 11, 175}, - dictWord{10, 11, 168}, - dictWord{138, 11, 573}, - dictWord{133, 0, 767}, - dictWord{134, 0, 1018}, - dictWord{135, 11, 1305}, - dictWord{12, 10, 30}, - dictWord{ - 13, - 10, - 148, - }, - dictWord{14, 10, 87}, - dictWord{14, 10, 182}, - dictWord{16, 10, 42}, - dictWord{148, 10, 70}, - dictWord{134, 11, 607}, - dictWord{4, 0, 273}, - dictWord{ - 5, - 0, - 658, - }, - dictWord{133, 0, 995}, - dictWord{6, 0, 72}, - dictWord{139, 11, 174}, - dictWord{10, 0, 483}, - dictWord{12, 0, 368}, - dictWord{7, 10, 56}, - dictWord{ - 7, - 10, - 1989, - }, - dictWord{8, 10, 337}, - dictWord{8, 10, 738}, - dictWord{9, 10, 600}, - dictWord{13, 10, 447}, - dictWord{142, 10, 92}, - dictWord{5, 11, 784}, - dictWord{ - 138, - 10, - 666, - }, - dictWord{135, 0, 1345}, - dictWord{139, 11, 882}, - dictWord{134, 0, 1293}, - dictWord{133, 0, 589}, - dictWord{134, 0, 1988}, - dictWord{5, 0, 117}, - dictWord{6, 0, 514}, - dictWord{6, 0, 541}, - dictWord{7, 0, 1164}, - dictWord{7, 0, 1436}, - dictWord{8, 0, 220}, - dictWord{8, 0, 648}, - dictWord{10, 0, 688}, - dictWord{ - 139, - 0, - 560, - }, - dictWord{136, 0, 379}, - dictWord{5, 0, 686}, - dictWord{7, 10, 866}, - dictWord{135, 10, 1163}, - dictWord{132, 10, 328}, - dictWord{9, 11, 14}, - dictWord{ - 9, - 11, - 441, - }, - dictWord{10, 11, 306}, - dictWord{139, 11, 9}, - dictWord{4, 10, 101}, - dictWord{135, 10, 1171}, - dictWord{5, 10, 833}, - dictWord{136, 10, 744}, - dictWord{5, 11, 161}, - dictWord{7, 11, 839}, - dictWord{135, 11, 887}, - dictWord{7, 0, 196}, - dictWord{10, 0, 765}, - dictWord{11, 0, 347}, - dictWord{11, 0, 552}, - dictWord{11, 0, 790}, - dictWord{12, 0, 263}, - dictWord{13, 0, 246}, - dictWord{13, 0, 270}, - dictWord{13, 0, 395}, - dictWord{14, 0, 176}, - dictWord{14, 0, 190}, - dictWord{ - 14, - 0, - 398, - }, - dictWord{14, 0, 412}, - dictWord{15, 0, 32}, - dictWord{15, 0, 63}, - dictWord{16, 0, 88}, - dictWord{147, 0, 105}, - dictWord{6, 10, 9}, - dictWord{6, 10, 397}, - dictWord{7, 10, 53}, - dictWord{7, 10, 1742}, - dictWord{10, 10, 632}, - dictWord{11, 10, 828}, - dictWord{140, 10, 146}, - dictWord{5, 0, 381}, - dictWord{135, 0, 1792}, - dictWord{134, 0, 1452}, - dictWord{135, 11, 429}, - dictWord{8, 0, 367}, - dictWord{10, 0, 760}, - dictWord{14, 0, 79}, - dictWord{20, 0, 17}, - dictWord{152, 0, 0}, - dictWord{7, 0, 616}, - dictWord{138, 0, 413}, - dictWord{11, 10, 417}, - dictWord{12, 10, 223}, - dictWord{140, 10, 265}, - dictWord{7, 11, 1611}, - dictWord{13, 11, 14}, - dictWord{15, 11, 44}, - dictWord{19, 11, 13}, - dictWord{148, 11, 76}, - dictWord{135, 0, 1229}, - dictWord{6, 0, 120}, - dictWord{7, 0, 1188}, - dictWord{7, 0, 1710}, - dictWord{8, 0, 286}, - dictWord{9, 0, 667}, - dictWord{11, 0, 592}, - dictWord{139, 0, 730}, - dictWord{135, 11, 1814}, - dictWord{135, 0, 1146}, - dictWord{4, 10, 186}, - dictWord{5, 10, 157}, - dictWord{8, 10, 168}, - dictWord{138, 10, 6}, - dictWord{4, 0, 352}, - dictWord{135, 0, 687}, - dictWord{4, 0, 192}, - dictWord{5, 0, 49}, - dictWord{ - 6, - 0, - 200, - }, - dictWord{6, 0, 293}, - dictWord{6, 0, 1696}, - dictWord{135, 0, 1151}, - dictWord{133, 10, 875}, - dictWord{5, 10, 773}, - dictWord{5, 10, 991}, - dictWord{ - 6, - 10, - 1635, - }, - dictWord{134, 10, 1788}, - dictWord{7, 10, 111}, - dictWord{136, 10, 581}, - dictWord{6, 0, 935}, - dictWord{134, 0, 1151}, - dictWord{134, 0, 1050}, - dictWord{132, 0, 650}, - dictWord{132, 0, 147}, - dictWord{11, 0, 194}, - dictWord{12, 0, 62}, - dictWord{12, 0, 88}, - dictWord{11, 11, 194}, - dictWord{12, 11, 62}, - dictWord{140, 11, 88}, - dictWord{6, 0, 339}, - dictWord{135, 0, 923}, - dictWord{134, 10, 1747}, - dictWord{7, 11, 643}, - dictWord{136, 11, 236}, - dictWord{ - 133, - 0, - 934, - }, - dictWord{7, 10, 1364}, - dictWord{7, 10, 1907}, - dictWord{141, 10, 158}, - dictWord{132, 10, 659}, - dictWord{4, 10, 404}, - dictWord{135, 10, 675}, - dictWord{7, 11, 581}, - dictWord{9, 11, 644}, - dictWord{137, 11, 699}, - dictWord{13, 0, 211}, - dictWord{14, 0, 133}, - dictWord{14, 0, 204}, - dictWord{15, 0, 64}, - dictWord{ - 15, - 0, - 69, - }, - dictWord{15, 0, 114}, - dictWord{16, 0, 10}, - dictWord{19, 0, 23}, - dictWord{19, 0, 35}, - dictWord{19, 0, 39}, - dictWord{19, 0, 51}, - dictWord{19, 0, 71}, - dictWord{19, 0, 75}, - dictWord{152, 0, 15}, - dictWord{133, 10, 391}, - dictWord{5, 11, 54}, - dictWord{135, 11, 1513}, - dictWord{7, 0, 222}, - dictWord{8, 0, 341}, - dictWord{ - 5, - 10, - 540, - }, - dictWord{134, 10, 1697}, - dictWord{134, 10, 78}, - dictWord{132, 11, 744}, - dictWord{136, 0, 293}, - dictWord{137, 11, 701}, - dictWord{ - 7, - 11, - 930, - }, - dictWord{10, 11, 402}, - dictWord{10, 11, 476}, - dictWord{13, 11, 452}, - dictWord{18, 11, 55}, - dictWord{147, 11, 104}, - dictWord{132, 0, 637}, - dictWord{133, 10, 460}, - dictWord{8, 11, 50}, - dictWord{137, 11, 624}, - dictWord{132, 11, 572}, - dictWord{134, 0, 1159}, - dictWord{4, 10, 199}, - dictWord{ - 139, - 10, - 34, - }, - dictWord{134, 0, 847}, - dictWord{134, 10, 388}, - dictWord{6, 11, 43}, - dictWord{7, 11, 38}, - dictWord{8, 11, 248}, - dictWord{9, 11, 504}, - dictWord{ - 138, - 11, - 513, - }, - dictWord{9, 0, 683}, - dictWord{4, 10, 511}, - dictWord{6, 10, 608}, - dictWord{9, 10, 333}, - dictWord{10, 10, 602}, - dictWord{11, 10, 441}, - dictWord{ - 11, - 10, - 723, - }, - dictWord{11, 10, 976}, - dictWord{140, 10, 357}, - dictWord{9, 0, 867}, - dictWord{138, 0, 837}, - dictWord{6, 0, 944}, - dictWord{135, 11, 326}, - dictWord{ - 135, - 0, - 1809, - }, - dictWord{5, 10, 938}, - dictWord{7, 11, 783}, - dictWord{136, 10, 707}, - dictWord{133, 11, 766}, - dictWord{133, 11, 363}, - dictWord{6, 0, 170}, - dictWord{7, 0, 1080}, - dictWord{8, 0, 395}, - dictWord{8, 0, 487}, - dictWord{141, 0, 147}, - dictWord{6, 11, 258}, - dictWord{140, 11, 409}, - dictWord{4, 0, 535}, - dictWord{ - 8, - 0, - 618, - }, - dictWord{5, 11, 249}, - dictWord{148, 11, 82}, - dictWord{6, 0, 1379}, - dictWord{149, 11, 15}, - dictWord{135, 0, 1625}, - dictWord{150, 0, 23}, - dictWord{ - 5, - 11, - 393, - }, - dictWord{6, 11, 378}, - dictWord{7, 11, 1981}, - dictWord{9, 11, 32}, - dictWord{9, 11, 591}, - dictWord{10, 11, 685}, - dictWord{10, 11, 741}, - dictWord{ - 142, - 11, - 382, - }, - dictWord{133, 11, 788}, - dictWord{7, 11, 1968}, - dictWord{10, 11, 19}, - dictWord{139, 11, 911}, - dictWord{7, 11, 1401}, - dictWord{ - 135, - 11, - 1476, - }, - dictWord{4, 11, 61}, - dictWord{5, 11, 58}, - dictWord{5, 11, 171}, - dictWord{5, 11, 635}, - dictWord{5, 11, 683}, - dictWord{5, 11, 700}, - dictWord{6, 11, 291}, - dictWord{6, 11, 566}, - dictWord{7, 11, 1650}, - dictWord{11, 11, 523}, - dictWord{12, 11, 273}, - dictWord{12, 11, 303}, - dictWord{15, 11, 39}, - dictWord{ - 143, - 11, - 111, - }, - dictWord{6, 10, 469}, - dictWord{7, 10, 1709}, - dictWord{138, 10, 515}, - dictWord{4, 0, 778}, - dictWord{134, 11, 589}, - dictWord{132, 0, 46}, - dictWord{ - 5, - 0, - 811, - }, - dictWord{6, 0, 1679}, - dictWord{6, 0, 1714}, - dictWord{135, 0, 2032}, - dictWord{7, 0, 1458}, - dictWord{9, 0, 407}, - dictWord{11, 0, 15}, - dictWord{12, 0, 651}, - dictWord{149, 0, 37}, - dictWord{7, 0, 938}, - dictWord{132, 10, 500}, - dictWord{6, 0, 34}, - dictWord{7, 0, 69}, - dictWord{7, 0, 1089}, - dictWord{7, 0, 1281}, - dictWord{ - 8, - 0, - 708, - }, - dictWord{8, 0, 721}, - dictWord{9, 0, 363}, - dictWord{148, 0, 98}, - dictWord{10, 11, 231}, - dictWord{147, 11, 124}, - dictWord{7, 11, 726}, - dictWord{ - 152, - 11, - 9, - }, - dictWord{5, 10, 68}, - dictWord{134, 10, 383}, - dictWord{136, 11, 583}, - dictWord{4, 11, 917}, - dictWord{133, 11, 1005}, - dictWord{11, 10, 216}, - dictWord{139, 10, 340}, - dictWord{135, 11, 1675}, - dictWord{8, 0, 441}, - dictWord{10, 0, 314}, - dictWord{143, 0, 3}, - dictWord{132, 11, 919}, - dictWord{4, 10, 337}, - dictWord{6, 10, 353}, - dictWord{7, 10, 1934}, - dictWord{8, 10, 488}, - dictWord{137, 10, 429}, - dictWord{7, 0, 889}, - dictWord{7, 10, 1795}, - dictWord{8, 10, 259}, - dictWord{9, 10, 135}, - dictWord{9, 10, 177}, - dictWord{9, 10, 860}, - dictWord{10, 10, 825}, - dictWord{11, 10, 115}, - dictWord{11, 10, 370}, - dictWord{11, 10, 405}, - dictWord{11, 10, 604}, - dictWord{12, 10, 10}, - dictWord{12, 10, 667}, - dictWord{12, 10, 669}, - dictWord{13, 10, 76}, - dictWord{14, 10, 310}, - dictWord{ - 15, - 10, - 76, - }, - dictWord{15, 10, 147}, - dictWord{148, 10, 23}, - dictWord{4, 10, 15}, - dictWord{4, 11, 255}, - dictWord{5, 10, 22}, - dictWord{5, 11, 302}, - dictWord{6, 11, 132}, - dictWord{6, 10, 244}, - dictWord{7, 10, 40}, - dictWord{7, 11, 128}, - dictWord{7, 10, 200}, - dictWord{7, 11, 283}, - dictWord{7, 10, 906}, - dictWord{7, 10, 1199}, - dictWord{ - 7, - 11, - 1299, - }, - dictWord{9, 10, 616}, - dictWord{10, 11, 52}, - dictWord{10, 11, 514}, - dictWord{10, 10, 716}, - dictWord{11, 10, 635}, - dictWord{11, 10, 801}, - dictWord{11, 11, 925}, - dictWord{12, 10, 458}, - dictWord{13, 11, 92}, - dictWord{142, 11, 309}, - dictWord{132, 0, 462}, - dictWord{137, 11, 173}, - dictWord{ - 135, - 10, - 1735, - }, - dictWord{8, 0, 525}, - dictWord{5, 10, 598}, - dictWord{7, 10, 791}, - dictWord{8, 10, 108}, - dictWord{137, 10, 123}, - dictWord{5, 0, 73}, - dictWord{6, 0, 23}, - dictWord{134, 0, 338}, - dictWord{132, 0, 676}, - dictWord{132, 10, 683}, - dictWord{7, 0, 725}, - dictWord{8, 0, 498}, - dictWord{139, 0, 268}, - dictWord{12, 0, 21}, - dictWord{151, 0, 7}, - dictWord{135, 0, 773}, - dictWord{4, 10, 155}, - dictWord{135, 10, 1689}, - dictWord{4, 0, 164}, - dictWord{5, 0, 730}, - dictWord{5, 10, 151}, - dictWord{ - 5, - 10, - 741, - }, - dictWord{6, 11, 210}, - dictWord{7, 10, 498}, - dictWord{7, 10, 870}, - dictWord{7, 10, 1542}, - dictWord{12, 10, 213}, - dictWord{14, 10, 36}, - dictWord{ - 14, - 10, - 391, - }, - dictWord{17, 10, 111}, - dictWord{18, 10, 6}, - dictWord{18, 10, 46}, - dictWord{18, 10, 151}, - dictWord{19, 10, 36}, - dictWord{20, 10, 32}, - dictWord{ - 20, - 10, - 56, - }, - dictWord{20, 10, 69}, - dictWord{20, 10, 102}, - dictWord{21, 10, 4}, - dictWord{22, 10, 8}, - dictWord{22, 10, 10}, - dictWord{22, 10, 14}, - dictWord{ - 150, - 10, - 31, - }, - dictWord{4, 10, 624}, - dictWord{135, 10, 1752}, - dictWord{4, 0, 583}, - dictWord{9, 0, 936}, - dictWord{15, 0, 214}, - dictWord{18, 0, 199}, - dictWord{24, 0, 26}, - dictWord{134, 11, 588}, - dictWord{7, 0, 1462}, - dictWord{11, 0, 659}, - dictWord{4, 11, 284}, - dictWord{134, 11, 223}, - dictWord{133, 0, 220}, - dictWord{ - 139, - 0, - 803, - }, - dictWord{132, 0, 544}, - dictWord{4, 10, 492}, - dictWord{133, 10, 451}, - dictWord{16, 0, 98}, - dictWord{148, 0, 119}, - dictWord{4, 11, 218}, - dictWord{ - 7, - 11, - 526, - }, - dictWord{143, 11, 137}, - dictWord{135, 10, 835}, - dictWord{4, 11, 270}, - dictWord{5, 11, 192}, - dictWord{6, 11, 332}, - dictWord{7, 11, 1322}, - dictWord{ - 13, - 11, - 9, - }, - dictWord{13, 10, 70}, - dictWord{14, 11, 104}, - dictWord{142, 11, 311}, - dictWord{132, 10, 539}, - dictWord{140, 11, 661}, - dictWord{5, 0, 176}, - dictWord{ - 6, - 0, - 437, - }, - dictWord{6, 0, 564}, - dictWord{11, 0, 181}, - dictWord{141, 0, 183}, - dictWord{135, 0, 1192}, - dictWord{6, 10, 113}, - dictWord{135, 10, 436}, - dictWord{136, 10, 718}, - dictWord{135, 10, 520}, - dictWord{135, 0, 1878}, - dictWord{140, 11, 196}, - dictWord{7, 11, 379}, - dictWord{8, 11, 481}, - dictWord{ - 137, - 11, - 377, - }, - dictWord{5, 11, 1003}, - dictWord{6, 11, 149}, - dictWord{137, 11, 746}, - dictWord{8, 11, 262}, - dictWord{9, 11, 627}, - dictWord{10, 11, 18}, - dictWord{ - 11, - 11, - 214, - }, - dictWord{11, 11, 404}, - dictWord{11, 11, 457}, - dictWord{11, 11, 780}, - dictWord{11, 11, 849}, - dictWord{11, 11, 913}, - dictWord{13, 11, 330}, - dictWord{13, 11, 401}, - dictWord{142, 11, 200}, - dictWord{149, 0, 26}, - dictWord{136, 11, 304}, - dictWord{132, 11, 142}, - dictWord{135, 0, 944}, - dictWord{ - 4, - 0, - 790, - }, - dictWord{5, 0, 273}, - dictWord{134, 0, 394}, - dictWord{134, 0, 855}, - dictWord{4, 0, 135}, - dictWord{6, 0, 127}, - dictWord{7, 0, 1185}, - dictWord{7, 0, 1511}, - dictWord{8, 0, 613}, - dictWord{11, 0, 5}, - dictWord{12, 0, 336}, - dictWord{12, 0, 495}, - dictWord{12, 0, 586}, - dictWord{12, 0, 660}, - dictWord{12, 0, 668}, - dictWord{ - 14, - 0, - 385, - }, - dictWord{15, 0, 118}, - dictWord{17, 0, 20}, - dictWord{146, 0, 98}, - dictWord{6, 0, 230}, - dictWord{9, 0, 752}, - dictWord{18, 0, 109}, - dictWord{12, 10, 610}, - dictWord{13, 10, 431}, - dictWord{144, 10, 59}, - dictWord{7, 0, 1954}, - dictWord{135, 11, 925}, - dictWord{4, 11, 471}, - dictWord{5, 11, 51}, - dictWord{6, 11, 602}, - dictWord{8, 11, 484}, - dictWord{10, 11, 195}, - dictWord{140, 11, 159}, - dictWord{132, 10, 307}, - dictWord{136, 11, 688}, - dictWord{132, 11, 697}, - dictWord{ - 7, - 11, - 812, - }, - dictWord{7, 11, 1261}, - dictWord{7, 11, 1360}, - dictWord{9, 11, 632}, - dictWord{140, 11, 352}, - dictWord{5, 0, 162}, - dictWord{8, 0, 68}, - dictWord{ - 133, - 10, - 964, - }, - dictWord{4, 0, 654}, - dictWord{136, 11, 212}, - dictWord{4, 0, 156}, - dictWord{7, 0, 998}, - dictWord{7, 0, 1045}, - dictWord{7, 0, 1860}, - dictWord{9, 0, 48}, - dictWord{9, 0, 692}, - dictWord{11, 0, 419}, - dictWord{139, 0, 602}, - dictWord{133, 11, 221}, - dictWord{4, 11, 373}, - dictWord{5, 11, 283}, - dictWord{6, 11, 480}, - dictWord{135, 11, 609}, - dictWord{142, 11, 216}, - dictWord{132, 0, 240}, - dictWord{6, 11, 192}, - dictWord{9, 11, 793}, - dictWord{145, 11, 55}, - dictWord{ - 4, - 10, - 75, - }, - dictWord{5, 10, 180}, - dictWord{6, 10, 500}, - dictWord{7, 10, 58}, - dictWord{7, 10, 710}, - dictWord{138, 10, 645}, - dictWord{4, 11, 132}, - dictWord{5, 11, 69}, - dictWord{5, 10, 649}, - dictWord{135, 11, 1242}, - dictWord{6, 10, 276}, - dictWord{7, 10, 282}, - dictWord{7, 10, 879}, - dictWord{7, 10, 924}, - dictWord{8, 10, 459}, - dictWord{9, 10, 599}, - dictWord{9, 10, 754}, - dictWord{11, 10, 574}, - dictWord{12, 10, 128}, - dictWord{12, 10, 494}, - dictWord{13, 10, 52}, - dictWord{13, 10, 301}, - dictWord{15, 10, 30}, - dictWord{143, 10, 132}, - dictWord{132, 10, 200}, - dictWord{4, 11, 111}, - dictWord{135, 11, 302}, - dictWord{9, 0, 197}, - dictWord{ - 10, - 0, - 300, - }, - dictWord{12, 0, 473}, - dictWord{13, 0, 90}, - dictWord{141, 0, 405}, - dictWord{132, 11, 767}, - dictWord{6, 11, 42}, - dictWord{7, 11, 1416}, - dictWord{ - 7, - 11, - 1590, - }, - dictWord{7, 11, 2005}, - dictWord{8, 11, 131}, - dictWord{8, 11, 466}, - dictWord{9, 11, 672}, - dictWord{13, 11, 252}, - dictWord{148, 11, 103}, - dictWord{ - 8, - 0, - 958, - }, - dictWord{8, 0, 999}, - dictWord{10, 0, 963}, - dictWord{138, 0, 1001}, - dictWord{135, 10, 1621}, - dictWord{135, 0, 858}, - dictWord{4, 0, 606}, - dictWord{ - 137, - 11, - 444, - }, - dictWord{6, 11, 44}, - dictWord{136, 11, 368}, - dictWord{139, 11, 172}, - dictWord{4, 11, 570}, - dictWord{133, 11, 120}, - dictWord{139, 11, 624}, - dictWord{7, 0, 1978}, - dictWord{8, 0, 676}, - dictWord{6, 10, 225}, - dictWord{137, 10, 211}, - dictWord{7, 0, 972}, - dictWord{11, 0, 102}, - dictWord{136, 10, 687}, - dictWord{6, 11, 227}, - dictWord{135, 11, 1589}, - dictWord{8, 10, 58}, - dictWord{9, 10, 724}, - dictWord{11, 10, 809}, - dictWord{13, 10, 113}, - dictWord{ - 145, - 10, - 72, - }, - dictWord{4, 0, 361}, - dictWord{133, 0, 315}, - dictWord{132, 0, 461}, - dictWord{6, 10, 345}, - dictWord{135, 10, 1247}, - dictWord{132, 0, 472}, - dictWord{ - 8, - 10, - 767, - }, - dictWord{8, 10, 803}, - dictWord{9, 10, 301}, - dictWord{137, 10, 903}, - dictWord{135, 11, 1333}, - dictWord{135, 11, 477}, - dictWord{7, 10, 1949}, - dictWord{136, 10, 674}, - dictWord{6, 0, 905}, - dictWord{138, 0, 747}, - dictWord{133, 0, 155}, - dictWord{134, 10, 259}, - dictWord{7, 0, 163}, - dictWord{8, 0, 319}, - dictWord{9, 0, 402}, - dictWord{10, 0, 24}, - dictWord{10, 0, 681}, - dictWord{11, 0, 200}, - dictWord{12, 0, 253}, - dictWord{12, 0, 410}, - dictWord{142, 0, 219}, - dictWord{ - 5, - 0, - 475, - }, - dictWord{7, 0, 1780}, - dictWord{9, 0, 230}, - dictWord{11, 0, 297}, - dictWord{11, 0, 558}, - dictWord{14, 0, 322}, - dictWord{19, 0, 76}, - dictWord{6, 11, 1667}, - dictWord{7, 11, 2036}, - dictWord{138, 11, 600}, - dictWord{136, 10, 254}, - dictWord{6, 0, 848}, - dictWord{135, 0, 1956}, - dictWord{6, 11, 511}, - dictWord{ - 140, - 11, - 132, - }, - dictWord{5, 11, 568}, - dictWord{6, 11, 138}, - dictWord{135, 11, 1293}, - dictWord{6, 0, 631}, - dictWord{137, 0, 838}, - dictWord{149, 0, 36}, - dictWord{ - 4, - 11, - 565, - }, - dictWord{8, 11, 23}, - dictWord{136, 11, 827}, - dictWord{5, 0, 944}, - dictWord{134, 0, 1769}, - dictWord{4, 0, 144}, - dictWord{6, 0, 842}, - dictWord{ - 6, - 0, - 1400, - }, - dictWord{4, 11, 922}, - dictWord{133, 11, 1023}, - dictWord{133, 10, 248}, - dictWord{9, 10, 800}, - dictWord{10, 10, 693}, - dictWord{11, 10, 482}, - dictWord{11, 10, 734}, - dictWord{139, 10, 789}, - dictWord{7, 11, 1002}, - dictWord{139, 11, 145}, - dictWord{4, 10, 116}, - dictWord{5, 10, 95}, - dictWord{5, 10, 445}, - dictWord{7, 10, 1688}, - dictWord{8, 10, 29}, - dictWord{9, 10, 272}, - dictWord{11, 10, 509}, - dictWord{139, 10, 915}, - dictWord{14, 0, 369}, - dictWord{146, 0, 72}, - dictWord{135, 10, 1641}, - dictWord{132, 11, 740}, - dictWord{133, 10, 543}, - dictWord{140, 11, 116}, - dictWord{6, 0, 247}, - dictWord{9, 0, 555}, - dictWord{ - 5, - 10, - 181, - }, - dictWord{136, 10, 41}, - dictWord{133, 10, 657}, - dictWord{136, 0, 996}, - dictWord{138, 10, 709}, - dictWord{7, 0, 189}, - dictWord{8, 10, 202}, - dictWord{ - 138, - 10, - 536, - }, - dictWord{136, 11, 402}, - dictWord{4, 11, 716}, - dictWord{141, 11, 31}, - dictWord{10, 0, 280}, - dictWord{138, 0, 797}, - dictWord{9, 10, 423}, - dictWord{140, 10, 89}, - dictWord{8, 10, 113}, - dictWord{9, 10, 877}, - dictWord{10, 10, 554}, - dictWord{11, 10, 83}, - dictWord{12, 10, 136}, - dictWord{147, 10, 109}, - dictWord{133, 10, 976}, - dictWord{7, 0, 746}, - dictWord{132, 10, 206}, - dictWord{136, 0, 526}, - dictWord{139, 0, 345}, - dictWord{136, 0, 1017}, - dictWord{ - 8, - 11, - 152, - }, - dictWord{9, 11, 53}, - dictWord{9, 11, 268}, - dictWord{9, 11, 901}, - dictWord{10, 11, 518}, - dictWord{10, 11, 829}, - dictWord{11, 11, 188}, - dictWord{ - 13, - 11, - 74, - }, - dictWord{14, 11, 46}, - dictWord{15, 11, 17}, - dictWord{15, 11, 33}, - dictWord{17, 11, 40}, - dictWord{18, 11, 36}, - dictWord{19, 11, 20}, - dictWord{22, 11, 1}, - dictWord{152, 11, 2}, - dictWord{133, 11, 736}, - dictWord{136, 11, 532}, - dictWord{5, 0, 428}, - dictWord{138, 0, 651}, - dictWord{135, 11, 681}, - dictWord{ - 135, - 0, - 1162, - }, - dictWord{7, 0, 327}, - dictWord{13, 0, 230}, - dictWord{17, 0, 113}, - dictWord{8, 10, 226}, - dictWord{10, 10, 537}, - dictWord{11, 10, 570}, - dictWord{ - 11, - 10, - 605, - }, - dictWord{11, 10, 799}, - dictWord{11, 10, 804}, - dictWord{12, 10, 85}, - dictWord{12, 10, 516}, - dictWord{12, 10, 623}, - dictWord{12, 11, 677}, - dictWord{ - 13, - 10, - 361, - }, - dictWord{14, 10, 77}, - dictWord{14, 10, 78}, - dictWord{147, 10, 110}, - dictWord{4, 0, 792}, - dictWord{7, 0, 1717}, - dictWord{10, 0, 546}, - dictWord{ - 132, - 10, - 769, - }, - dictWord{4, 11, 684}, - dictWord{136, 11, 384}, - dictWord{132, 10, 551}, - dictWord{134, 0, 1203}, - dictWord{9, 10, 57}, - dictWord{9, 10, 459}, - dictWord{10, 10, 425}, - dictWord{11, 10, 119}, - dictWord{12, 10, 184}, - dictWord{12, 10, 371}, - dictWord{13, 10, 358}, - dictWord{145, 10, 51}, - dictWord{5, 0, 672}, - dictWord{5, 10, 814}, - dictWord{8, 10, 10}, - dictWord{9, 10, 421}, - dictWord{9, 10, 729}, - dictWord{10, 10, 609}, - dictWord{139, 10, 689}, - dictWord{138, 0, 189}, - dictWord{134, 10, 624}, - dictWord{7, 11, 110}, - dictWord{7, 11, 188}, - dictWord{8, 11, 290}, - dictWord{8, 11, 591}, - dictWord{9, 11, 382}, - dictWord{9, 11, 649}, - dictWord{11, 11, 71}, - dictWord{11, 11, 155}, - dictWord{11, 11, 313}, - dictWord{12, 11, 5}, - dictWord{13, 11, 325}, - dictWord{142, 11, 287}, - dictWord{133, 0, 99}, - dictWord{6, 0, 1053}, - dictWord{135, 0, 298}, - dictWord{7, 11, 360}, - dictWord{7, 11, 425}, - dictWord{9, 11, 66}, - dictWord{9, 11, 278}, - dictWord{138, 11, 644}, - dictWord{4, 0, 397}, - dictWord{136, 0, 555}, - dictWord{137, 10, 269}, - dictWord{132, 10, 528}, - dictWord{4, 11, 900}, - dictWord{133, 11, 861}, - dictWord{ - 6, - 0, - 1157, - }, - dictWord{5, 11, 254}, - dictWord{7, 11, 985}, - dictWord{136, 11, 73}, - dictWord{7, 11, 1959}, - dictWord{136, 11, 683}, - dictWord{12, 0, 398}, - dictWord{ - 20, - 0, - 39, - }, - dictWord{21, 0, 11}, - dictWord{150, 0, 41}, - dictWord{4, 0, 485}, - dictWord{7, 0, 353}, - dictWord{135, 0, 1523}, - dictWord{6, 0, 366}, - dictWord{7, 0, 1384}, - dictWord{135, 0, 1601}, - dictWord{138, 0, 787}, - dictWord{137, 0, 282}, - dictWord{5, 10, 104}, - dictWord{6, 10, 173}, - dictWord{135, 10, 1631}, - dictWord{ - 139, - 11, - 146, - }, - dictWord{4, 0, 157}, - dictWord{133, 0, 471}, - dictWord{134, 0, 941}, - dictWord{132, 11, 725}, - dictWord{7, 0, 1336}, - dictWord{8, 10, 138}, - dictWord{ - 8, - 10, - 342, - }, - dictWord{9, 10, 84}, - dictWord{10, 10, 193}, - dictWord{11, 10, 883}, - dictWord{140, 10, 359}, - dictWord{134, 11, 196}, - dictWord{136, 0, 116}, - dictWord{133, 11, 831}, - dictWord{134, 0, 787}, - dictWord{134, 10, 95}, - dictWord{6, 10, 406}, - dictWord{10, 10, 409}, - dictWord{10, 10, 447}, - dictWord{ - 11, - 10, - 44, - }, - dictWord{140, 10, 100}, - dictWord{5, 0, 160}, - dictWord{7, 0, 363}, - dictWord{7, 0, 589}, - dictWord{10, 0, 170}, - dictWord{141, 0, 55}, - dictWord{134, 0, 1815}, - dictWord{132, 0, 866}, - dictWord{6, 0, 889}, - dictWord{6, 0, 1067}, - dictWord{6, 0, 1183}, - dictWord{4, 11, 321}, - dictWord{134, 11, 569}, - dictWord{5, 11, 848}, - dictWord{134, 11, 66}, - dictWord{4, 11, 36}, - dictWord{6, 10, 1636}, - dictWord{7, 11, 1387}, - dictWord{10, 11, 205}, - dictWord{11, 11, 755}, - dictWord{ - 141, - 11, - 271, - }, - dictWord{132, 0, 689}, - dictWord{9, 0, 820}, - dictWord{4, 10, 282}, - dictWord{7, 10, 1034}, - dictWord{11, 10, 398}, - dictWord{11, 10, 634}, - dictWord{ - 12, - 10, - 1, - }, - dictWord{12, 10, 79}, - dictWord{12, 10, 544}, - dictWord{14, 10, 237}, - dictWord{17, 10, 10}, - dictWord{146, 10, 20}, - dictWord{4, 0, 108}, - dictWord{7, 0, 804}, - dictWord{139, 0, 498}, - dictWord{132, 11, 887}, - dictWord{6, 0, 1119}, - dictWord{135, 11, 620}, - dictWord{6, 11, 165}, - dictWord{138, 11, 388}, - dictWord{ - 5, - 0, - 244, - }, - dictWord{5, 10, 499}, - dictWord{6, 10, 476}, - dictWord{7, 10, 600}, - dictWord{7, 10, 888}, - dictWord{135, 10, 1096}, - dictWord{140, 0, 609}, - dictWord{ - 135, - 0, - 1005, - }, - dictWord{4, 0, 412}, - dictWord{133, 0, 581}, - dictWord{4, 11, 719}, - dictWord{135, 11, 155}, - dictWord{7, 10, 296}, - dictWord{7, 10, 596}, - dictWord{ - 8, - 10, - 560, - }, - dictWord{8, 10, 586}, - dictWord{9, 10, 612}, - dictWord{11, 10, 304}, - dictWord{12, 10, 46}, - dictWord{13, 10, 89}, - dictWord{14, 10, 112}, - dictWord{ - 145, - 10, - 122, - }, - dictWord{4, 0, 895}, - dictWord{133, 0, 772}, - dictWord{142, 11, 307}, - dictWord{135, 0, 1898}, - dictWord{4, 0, 926}, - dictWord{133, 0, 983}, - dictWord{4, 11, 353}, - dictWord{6, 11, 146}, - dictWord{6, 11, 1789}, - dictWord{7, 11, 288}, - dictWord{7, 11, 990}, - dictWord{7, 11, 1348}, - dictWord{9, 11, 665}, - dictWord{ - 9, - 11, - 898, - }, - dictWord{11, 11, 893}, - dictWord{142, 11, 212}, - dictWord{132, 0, 538}, - dictWord{133, 11, 532}, - dictWord{6, 0, 294}, - dictWord{7, 0, 1267}, - dictWord{8, 0, 624}, - dictWord{141, 0, 496}, - dictWord{7, 0, 1325}, - dictWord{4, 11, 45}, - dictWord{135, 11, 1257}, - dictWord{138, 0, 301}, - dictWord{9, 0, 298}, - dictWord{12, 0, 291}, - dictWord{13, 0, 276}, - dictWord{14, 0, 6}, - dictWord{17, 0, 18}, - dictWord{21, 0, 32}, - dictWord{7, 10, 1599}, - dictWord{7, 10, 1723}, - dictWord{ - 8, - 10, - 79, - }, - dictWord{8, 10, 106}, - dictWord{8, 10, 190}, - dictWord{8, 10, 302}, - dictWord{8, 10, 383}, - dictWord{8, 10, 713}, - dictWord{9, 10, 119}, - dictWord{9, 10, 233}, - dictWord{9, 10, 419}, - dictWord{9, 10, 471}, - dictWord{10, 10, 181}, - dictWord{10, 10, 406}, - dictWord{11, 10, 57}, - dictWord{11, 10, 85}, - dictWord{11, 10, 120}, - dictWord{11, 10, 177}, - dictWord{11, 10, 296}, - dictWord{11, 10, 382}, - dictWord{11, 10, 454}, - dictWord{11, 10, 758}, - dictWord{11, 10, 999}, - dictWord{ - 12, - 10, - 27, - }, - dictWord{12, 10, 131}, - dictWord{12, 10, 245}, - dictWord{12, 10, 312}, - dictWord{12, 10, 446}, - dictWord{12, 10, 454}, - dictWord{13, 10, 98}, - dictWord{ - 13, - 10, - 426, - }, - dictWord{13, 10, 508}, - dictWord{14, 10, 163}, - dictWord{14, 10, 272}, - dictWord{14, 10, 277}, - dictWord{14, 10, 370}, - dictWord{15, 10, 95}, - dictWord{15, 10, 138}, - dictWord{15, 10, 167}, - dictWord{17, 10, 38}, - dictWord{148, 10, 96}, - dictWord{132, 0, 757}, - dictWord{134, 0, 1263}, - dictWord{4, 0, 820}, - dictWord{134, 10, 1759}, - dictWord{133, 0, 722}, - dictWord{136, 11, 816}, - dictWord{138, 10, 372}, - dictWord{145, 10, 16}, - dictWord{134, 0, 1039}, - dictWord{ - 4, - 0, - 991, - }, - dictWord{134, 0, 2028}, - dictWord{133, 10, 258}, - dictWord{7, 0, 1875}, - dictWord{139, 0, 124}, - dictWord{6, 11, 559}, - dictWord{6, 11, 1691}, - dictWord{135, 11, 586}, - dictWord{5, 0, 324}, - dictWord{7, 0, 881}, - dictWord{8, 10, 134}, - dictWord{9, 10, 788}, - dictWord{140, 10, 438}, - dictWord{7, 11, 1823}, - dictWord{139, 11, 693}, - dictWord{6, 0, 1348}, - dictWord{134, 0, 1545}, - dictWord{134, 0, 911}, - dictWord{132, 0, 954}, - dictWord{8, 0, 329}, - dictWord{8, 0, 414}, - dictWord{7, 10, 1948}, - dictWord{135, 10, 2004}, - dictWord{5, 0, 517}, - dictWord{6, 10, 439}, - dictWord{7, 10, 780}, - dictWord{135, 10, 1040}, - dictWord{ - 132, - 0, - 816, - }, - dictWord{5, 10, 1}, - dictWord{6, 10, 81}, - dictWord{138, 10, 520}, - dictWord{9, 0, 713}, - dictWord{10, 0, 222}, - dictWord{5, 10, 482}, - dictWord{8, 10, 98}, - dictWord{10, 10, 700}, - dictWord{10, 10, 822}, - dictWord{11, 10, 302}, - dictWord{11, 10, 778}, - dictWord{12, 10, 50}, - dictWord{12, 10, 127}, - dictWord{12, 10, 396}, - dictWord{13, 10, 62}, - dictWord{13, 10, 328}, - dictWord{14, 10, 122}, - dictWord{147, 10, 72}, - dictWord{137, 0, 33}, - dictWord{5, 10, 2}, - dictWord{7, 10, 1494}, - dictWord{136, 10, 589}, - dictWord{6, 10, 512}, - dictWord{7, 10, 797}, - dictWord{8, 10, 253}, - dictWord{9, 10, 77}, - dictWord{10, 10, 1}, - dictWord{10, 11, 108}, - dictWord{10, 10, 129}, - dictWord{10, 10, 225}, - dictWord{11, 11, 116}, - dictWord{11, 10, 118}, - dictWord{11, 10, 226}, - dictWord{11, 10, 251}, - dictWord{ - 11, - 10, - 430, - }, - dictWord{11, 10, 701}, - dictWord{11, 10, 974}, - dictWord{11, 10, 982}, - dictWord{12, 10, 64}, - dictWord{12, 10, 260}, - dictWord{12, 10, 488}, - dictWord{ - 140, - 10, - 690, - }, - dictWord{134, 11, 456}, - dictWord{133, 11, 925}, - dictWord{5, 0, 150}, - dictWord{7, 0, 106}, - dictWord{7, 0, 774}, - dictWord{8, 0, 603}, - dictWord{ - 9, - 0, - 593, - }, - dictWord{9, 0, 634}, - dictWord{10, 0, 44}, - dictWord{10, 0, 173}, - dictWord{11, 0, 462}, - dictWord{11, 0, 515}, - dictWord{13, 0, 216}, - dictWord{13, 0, 288}, - dictWord{142, 0, 400}, - dictWord{137, 10, 347}, - dictWord{5, 0, 748}, - dictWord{134, 0, 553}, - dictWord{12, 0, 108}, - dictWord{141, 0, 291}, - dictWord{7, 0, 420}, - dictWord{4, 10, 12}, - dictWord{7, 10, 522}, - dictWord{7, 10, 809}, - dictWord{8, 10, 797}, - dictWord{141, 10, 88}, - dictWord{6, 11, 193}, - dictWord{7, 11, 240}, - dictWord{ - 7, - 11, - 1682, - }, - dictWord{10, 11, 51}, - dictWord{10, 11, 640}, - dictWord{11, 11, 410}, - dictWord{13, 11, 82}, - dictWord{14, 11, 247}, - dictWord{14, 11, 331}, - dictWord{142, 11, 377}, - dictWord{133, 10, 528}, - dictWord{135, 0, 1777}, - dictWord{4, 0, 493}, - dictWord{144, 0, 55}, - dictWord{136, 11, 633}, - dictWord{ - 139, - 0, - 81, - }, - dictWord{6, 0, 980}, - dictWord{136, 0, 321}, - dictWord{148, 10, 109}, - dictWord{5, 10, 266}, - dictWord{9, 10, 290}, - dictWord{9, 10, 364}, - dictWord{ - 10, - 10, - 293, - }, - dictWord{11, 10, 606}, - dictWord{142, 10, 45}, - dictWord{6, 0, 568}, - dictWord{7, 0, 112}, - dictWord{7, 0, 1804}, - dictWord{8, 0, 362}, - dictWord{8, 0, 410}, - dictWord{8, 0, 830}, - dictWord{9, 0, 514}, - dictWord{11, 0, 649}, - dictWord{142, 0, 157}, - dictWord{4, 0, 74}, - dictWord{6, 0, 510}, - dictWord{6, 10, 594}, - dictWord{ - 9, - 10, - 121, - }, - dictWord{10, 10, 49}, - dictWord{10, 10, 412}, - dictWord{139, 10, 834}, - dictWord{134, 0, 838}, - dictWord{136, 10, 748}, - dictWord{132, 10, 466}, - dictWord{132, 0, 625}, - dictWord{135, 11, 1443}, - dictWord{4, 11, 237}, - dictWord{135, 11, 514}, - dictWord{9, 10, 378}, - dictWord{141, 10, 162}, - dictWord{6, 0, 16}, - dictWord{6, 0, 158}, - dictWord{7, 0, 43}, - dictWord{7, 0, 129}, - dictWord{7, 0, 181}, - dictWord{8, 0, 276}, - dictWord{8, 0, 377}, - dictWord{10, 0, 523}, - dictWord{ - 11, - 0, - 816, - }, - dictWord{12, 0, 455}, - dictWord{13, 0, 303}, - dictWord{142, 0, 135}, - dictWord{135, 0, 281}, - dictWord{4, 0, 1}, - dictWord{7, 0, 1143}, - dictWord{7, 0, 1463}, - dictWord{8, 0, 61}, - dictWord{9, 0, 207}, - dictWord{9, 0, 390}, - dictWord{9, 0, 467}, - dictWord{139, 0, 836}, - dictWord{6, 11, 392}, - dictWord{7, 11, 65}, - dictWord{ - 135, - 11, - 2019, - }, - dictWord{132, 10, 667}, - dictWord{4, 0, 723}, - dictWord{5, 0, 895}, - dictWord{7, 0, 1031}, - dictWord{8, 0, 199}, - dictWord{8, 0, 340}, - dictWord{9, 0, 153}, - dictWord{9, 0, 215}, - dictWord{10, 0, 21}, - dictWord{10, 0, 59}, - dictWord{10, 0, 80}, - dictWord{10, 0, 224}, - dictWord{10, 0, 838}, - dictWord{11, 0, 229}, - dictWord{ - 11, - 0, - 652, - }, - dictWord{12, 0, 192}, - dictWord{13, 0, 146}, - dictWord{142, 0, 91}, - dictWord{132, 0, 295}, - dictWord{137, 0, 51}, - dictWord{9, 11, 222}, - dictWord{ - 10, - 11, - 43, - }, - dictWord{139, 11, 900}, - dictWord{5, 0, 309}, - dictWord{140, 0, 211}, - dictWord{5, 0, 125}, - dictWord{8, 0, 77}, - dictWord{138, 0, 15}, - dictWord{136, 11, 604}, - dictWord{138, 0, 789}, - dictWord{5, 0, 173}, - dictWord{4, 10, 39}, - dictWord{7, 10, 1843}, - dictWord{8, 10, 407}, - dictWord{11, 10, 144}, - dictWord{140, 10, 523}, - dictWord{138, 11, 265}, - dictWord{133, 0, 439}, - dictWord{132, 10, 510}, - dictWord{7, 0, 648}, - dictWord{7, 0, 874}, - dictWord{11, 0, 164}, - dictWord{12, 0, 76}, - dictWord{18, 0, 9}, - dictWord{7, 10, 1980}, - dictWord{10, 10, 487}, - dictWord{138, 10, 809}, - dictWord{12, 0, 111}, - dictWord{14, 0, 294}, - dictWord{19, 0, 45}, - dictWord{13, 10, 260}, - dictWord{146, 10, 63}, - dictWord{133, 11, 549}, - dictWord{134, 10, 570}, - dictWord{4, 0, 8}, - dictWord{7, 0, 1152}, - dictWord{7, 0, 1153}, - dictWord{7, 0, 1715}, - dictWord{9, 0, 374}, - dictWord{10, 0, 478}, - dictWord{139, 0, 648}, - dictWord{135, 0, 1099}, - dictWord{5, 0, 575}, - dictWord{6, 0, 354}, - dictWord{ - 135, - 0, - 701, - }, - dictWord{7, 11, 36}, - dictWord{8, 11, 201}, - dictWord{136, 11, 605}, - dictWord{4, 10, 787}, - dictWord{136, 11, 156}, - dictWord{6, 0, 518}, - dictWord{ - 149, - 11, - 13, - }, - dictWord{140, 11, 224}, - dictWord{134, 0, 702}, - dictWord{132, 10, 516}, - dictWord{5, 11, 724}, - dictWord{10, 11, 305}, - dictWord{11, 11, 151}, - dictWord{12, 11, 33}, - dictWord{12, 11, 121}, - dictWord{12, 11, 381}, - dictWord{17, 11, 3}, - dictWord{17, 11, 27}, - dictWord{17, 11, 78}, - dictWord{18, 11, 18}, - dictWord{19, 11, 54}, - dictWord{149, 11, 5}, - dictWord{8, 0, 87}, - dictWord{4, 11, 523}, - dictWord{5, 11, 638}, - dictWord{11, 10, 887}, - dictWord{14, 10, 365}, - dictWord{ - 142, - 10, - 375, - }, - dictWord{138, 0, 438}, - dictWord{136, 10, 821}, - dictWord{135, 11, 1908}, - dictWord{6, 11, 242}, - dictWord{7, 11, 227}, - dictWord{7, 11, 1581}, - dictWord{8, 11, 104}, - dictWord{9, 11, 113}, - dictWord{9, 11, 220}, - dictWord{9, 11, 427}, - dictWord{10, 11, 74}, - dictWord{10, 11, 239}, - dictWord{11, 11, 579}, - dictWord{11, 11, 1023}, - dictWord{13, 11, 4}, - dictWord{13, 11, 204}, - dictWord{13, 11, 316}, - dictWord{18, 11, 95}, - dictWord{148, 11, 86}, - dictWord{4, 0, 69}, - dictWord{5, 0, 122}, - dictWord{5, 0, 849}, - dictWord{6, 0, 1633}, - dictWord{9, 0, 656}, - dictWord{138, 0, 464}, - dictWord{7, 0, 1802}, - dictWord{4, 10, 10}, - dictWord{ - 139, - 10, - 786, - }, - dictWord{135, 11, 861}, - dictWord{139, 0, 499}, - dictWord{7, 0, 476}, - dictWord{7, 0, 1592}, - dictWord{138, 0, 87}, - dictWord{133, 10, 684}, - dictWord{ - 4, - 0, - 840, - }, - dictWord{134, 10, 27}, - dictWord{142, 0, 283}, - dictWord{6, 0, 1620}, - dictWord{7, 11, 1328}, - dictWord{136, 11, 494}, - dictWord{5, 0, 859}, - dictWord{ - 7, - 0, - 1160, - }, - dictWord{8, 0, 107}, - dictWord{9, 0, 291}, - dictWord{9, 0, 439}, - dictWord{10, 0, 663}, - dictWord{11, 0, 609}, - dictWord{140, 0, 197}, - dictWord{ - 7, - 11, - 1306, - }, - dictWord{8, 11, 505}, - dictWord{9, 11, 482}, - dictWord{10, 11, 126}, - dictWord{11, 11, 225}, - dictWord{12, 11, 347}, - dictWord{12, 11, 449}, - dictWord{ - 13, - 11, - 19, - }, - dictWord{142, 11, 218}, - dictWord{5, 11, 268}, - dictWord{10, 11, 764}, - dictWord{12, 11, 120}, - dictWord{13, 11, 39}, - dictWord{145, 11, 127}, - dictWord{145, 10, 56}, - dictWord{7, 11, 1672}, - dictWord{10, 11, 472}, - dictWord{11, 11, 189}, - dictWord{143, 11, 51}, - dictWord{6, 10, 342}, - dictWord{6, 10, 496}, - dictWord{8, 10, 275}, - dictWord{137, 10, 206}, - dictWord{133, 0, 600}, - dictWord{4, 0, 117}, - dictWord{6, 0, 372}, - dictWord{7, 0, 1905}, - dictWord{142, 0, 323}, - dictWord{4, 10, 909}, - dictWord{5, 10, 940}, - dictWord{135, 11, 1471}, - dictWord{132, 10, 891}, - dictWord{4, 0, 722}, - dictWord{139, 0, 471}, - dictWord{4, 11, 384}, - dictWord{135, 11, 1022}, - dictWord{132, 10, 687}, - dictWord{9, 0, 5}, - dictWord{12, 0, 216}, - dictWord{12, 0, 294}, - dictWord{12, 0, 298}, - dictWord{12, 0, 400}, - dictWord{12, 0, 518}, - dictWord{13, 0, 229}, - dictWord{143, 0, 139}, - dictWord{135, 11, 1703}, - dictWord{7, 11, 1602}, - dictWord{10, 11, 698}, - dictWord{ - 12, - 11, - 212, - }, - dictWord{141, 11, 307}, - dictWord{6, 10, 41}, - dictWord{141, 10, 160}, - dictWord{135, 11, 1077}, - dictWord{9, 11, 159}, - dictWord{11, 11, 28}, - dictWord{140, 11, 603}, - dictWord{4, 0, 514}, - dictWord{7, 0, 1304}, - dictWord{138, 0, 477}, - dictWord{134, 0, 1774}, - dictWord{9, 0, 88}, - dictWord{139, 0, 270}, - dictWord{5, 0, 12}, - dictWord{7, 0, 375}, - dictWord{9, 0, 438}, - dictWord{134, 10, 1718}, - dictWord{132, 11, 515}, - dictWord{136, 10, 778}, - dictWord{8, 11, 632}, - dictWord{8, 11, 697}, - dictWord{137, 11, 854}, - dictWord{6, 0, 362}, - dictWord{6, 0, 997}, - dictWord{146, 0, 51}, - dictWord{7, 0, 816}, - dictWord{7, 0, 1241}, - dictWord{ - 9, - 0, - 283, - }, - dictWord{9, 0, 520}, - dictWord{10, 0, 213}, - dictWord{10, 0, 307}, - dictWord{10, 0, 463}, - dictWord{10, 0, 671}, - dictWord{10, 0, 746}, - dictWord{11, 0, 401}, - dictWord{11, 0, 794}, - dictWord{12, 0, 517}, - dictWord{18, 0, 107}, - dictWord{147, 0, 115}, - dictWord{133, 10, 115}, - dictWord{150, 11, 28}, - dictWord{4, 11, 136}, - dictWord{133, 11, 551}, - dictWord{142, 10, 314}, - dictWord{132, 0, 258}, - dictWord{6, 0, 22}, - dictWord{7, 0, 903}, - dictWord{7, 0, 1963}, - dictWord{8, 0, 639}, - dictWord{138, 0, 577}, - dictWord{5, 0, 681}, - dictWord{8, 0, 782}, - dictWord{13, 0, 130}, - dictWord{17, 0, 84}, - dictWord{5, 10, 193}, - dictWord{140, 10, 178}, - dictWord{ - 9, - 11, - 17, - }, - dictWord{138, 11, 291}, - dictWord{7, 11, 1287}, - dictWord{9, 11, 44}, - dictWord{10, 11, 552}, - dictWord{10, 11, 642}, - dictWord{11, 11, 839}, - dictWord{12, 11, 274}, - dictWord{12, 11, 275}, - dictWord{12, 11, 372}, - dictWord{13, 11, 91}, - dictWord{142, 11, 125}, - dictWord{135, 10, 174}, - dictWord{4, 0, 664}, - dictWord{5, 0, 804}, - dictWord{139, 0, 1013}, - dictWord{134, 0, 942}, - dictWord{6, 0, 1349}, - dictWord{6, 0, 1353}, - dictWord{6, 0, 1450}, - dictWord{7, 11, 1518}, - dictWord{139, 11, 694}, - dictWord{11, 0, 356}, - dictWord{4, 10, 122}, - dictWord{5, 10, 796}, - dictWord{5, 10, 952}, - dictWord{6, 10, 1660}, - dictWord{ - 6, - 10, - 1671, - }, - dictWord{8, 10, 567}, - dictWord{9, 10, 687}, - dictWord{9, 10, 742}, - dictWord{10, 10, 686}, - dictWord{11, 10, 682}, - dictWord{140, 10, 281}, - dictWord{ - 5, - 0, - 32, - }, - dictWord{6, 11, 147}, - dictWord{7, 11, 886}, - dictWord{9, 11, 753}, - dictWord{138, 11, 268}, - dictWord{5, 10, 179}, - dictWord{7, 10, 1095}, - dictWord{ - 135, - 10, - 1213, - }, - dictWord{4, 10, 66}, - dictWord{7, 10, 722}, - dictWord{135, 10, 904}, - dictWord{135, 10, 352}, - dictWord{9, 11, 245}, - dictWord{138, 11, 137}, - dictWord{4, 0, 289}, - dictWord{7, 0, 629}, - dictWord{7, 0, 1698}, - dictWord{7, 0, 1711}, - dictWord{12, 0, 215}, - dictWord{133, 11, 414}, - dictWord{6, 0, 1975}, - dictWord{135, 11, 1762}, - dictWord{6, 0, 450}, - dictWord{136, 0, 109}, - dictWord{141, 10, 35}, - dictWord{134, 11, 599}, - dictWord{136, 0, 705}, - dictWord{ - 133, - 0, - 664, - }, - dictWord{134, 11, 1749}, - dictWord{11, 11, 402}, - dictWord{12, 11, 109}, - dictWord{12, 11, 431}, - dictWord{13, 11, 179}, - dictWord{13, 11, 206}, - dictWord{14, 11, 175}, - dictWord{14, 11, 217}, - dictWord{16, 11, 3}, - dictWord{148, 11, 53}, - dictWord{135, 0, 1238}, - dictWord{134, 11, 1627}, - dictWord{ - 132, - 11, - 488, - }, - dictWord{13, 0, 318}, - dictWord{10, 10, 592}, - dictWord{10, 10, 753}, - dictWord{12, 10, 317}, - dictWord{12, 10, 355}, - dictWord{12, 10, 465}, - dictWord{ - 12, - 10, - 469, - }, - dictWord{12, 10, 560}, - dictWord{140, 10, 578}, - dictWord{133, 10, 564}, - dictWord{132, 11, 83}, - dictWord{140, 11, 676}, - dictWord{6, 0, 1872}, - dictWord{6, 0, 1906}, - dictWord{6, 0, 1907}, - dictWord{9, 0, 934}, - dictWord{9, 0, 956}, - dictWord{9, 0, 960}, - dictWord{9, 0, 996}, - dictWord{12, 0, 794}, - dictWord{ - 12, - 0, - 876, - }, - dictWord{12, 0, 880}, - dictWord{12, 0, 918}, - dictWord{15, 0, 230}, - dictWord{18, 0, 234}, - dictWord{18, 0, 238}, - dictWord{21, 0, 38}, - dictWord{149, 0, 62}, - dictWord{134, 10, 556}, - dictWord{134, 11, 278}, - dictWord{137, 0, 103}, - dictWord{7, 10, 544}, - dictWord{8, 10, 719}, - dictWord{138, 10, 61}, - dictWord{ - 4, - 10, - 5, - }, - dictWord{5, 10, 498}, - dictWord{8, 10, 637}, - dictWord{137, 10, 521}, - dictWord{7, 0, 777}, - dictWord{12, 0, 229}, - dictWord{12, 0, 239}, - dictWord{15, 0, 12}, - dictWord{12, 11, 229}, - dictWord{12, 11, 239}, - dictWord{143, 11, 12}, - dictWord{6, 0, 26}, - dictWord{7, 11, 388}, - dictWord{7, 11, 644}, - dictWord{139, 11, 781}, - dictWord{7, 11, 229}, - dictWord{8, 11, 59}, - dictWord{9, 11, 190}, - dictWord{9, 11, 257}, - dictWord{10, 11, 378}, - dictWord{140, 11, 191}, - dictWord{133, 10, 927}, - dictWord{135, 10, 1441}, - dictWord{4, 10, 893}, - dictWord{5, 10, 780}, - dictWord{133, 10, 893}, - dictWord{4, 0, 414}, - dictWord{5, 0, 467}, - dictWord{9, 0, 654}, - dictWord{10, 0, 451}, - dictWord{12, 0, 59}, - dictWord{141, 0, 375}, - dictWord{142, 0, 173}, - dictWord{135, 0, 17}, - dictWord{7, 0, 1350}, - dictWord{133, 10, 238}, - dictWord{135, 0, 955}, - dictWord{4, 0, 960}, - dictWord{10, 0, 887}, - dictWord{12, 0, 753}, - dictWord{18, 0, 161}, - dictWord{18, 0, 162}, - dictWord{152, 0, 19}, - dictWord{136, 11, 344}, - dictWord{6, 10, 1729}, - dictWord{137, 11, 288}, - dictWord{132, 11, 660}, - dictWord{4, 0, 217}, - dictWord{5, 0, 710}, - dictWord{7, 0, 760}, - dictWord{7, 0, 1926}, - dictWord{9, 0, 428}, - dictWord{9, 0, 708}, - dictWord{10, 0, 254}, - dictWord{10, 0, 296}, - dictWord{10, 0, 720}, - dictWord{11, 0, 109}, - dictWord{ - 11, - 0, - 255, - }, - dictWord{12, 0, 165}, - dictWord{12, 0, 315}, - dictWord{13, 0, 107}, - dictWord{13, 0, 203}, - dictWord{14, 0, 54}, - dictWord{14, 0, 99}, - dictWord{14, 0, 114}, - dictWord{14, 0, 388}, - dictWord{16, 0, 85}, - dictWord{17, 0, 9}, - dictWord{17, 0, 33}, - dictWord{20, 0, 25}, - dictWord{20, 0, 28}, - dictWord{20, 0, 29}, - dictWord{21, 0, 9}, - dictWord{21, 0, 10}, - dictWord{21, 0, 34}, - dictWord{22, 0, 17}, - dictWord{4, 10, 60}, - dictWord{7, 10, 1800}, - dictWord{8, 10, 314}, - dictWord{9, 10, 700}, - dictWord{ - 139, - 10, - 487, - }, - dictWord{7, 11, 1035}, - dictWord{138, 11, 737}, - dictWord{7, 11, 690}, - dictWord{9, 11, 217}, - dictWord{9, 11, 587}, - dictWord{140, 11, 521}, - dictWord{6, 0, 919}, - dictWord{7, 11, 706}, - dictWord{7, 11, 1058}, - dictWord{138, 11, 538}, - dictWord{7, 10, 1853}, - dictWord{138, 10, 437}, - dictWord{ - 136, - 10, - 419, - }, - dictWord{6, 0, 280}, - dictWord{10, 0, 502}, - dictWord{11, 0, 344}, - dictWord{140, 0, 38}, - dictWord{5, 0, 45}, - dictWord{7, 0, 1161}, - dictWord{11, 0, 448}, - dictWord{11, 0, 880}, - dictWord{13, 0, 139}, - dictWord{13, 0, 407}, - dictWord{15, 0, 16}, - dictWord{17, 0, 95}, - dictWord{18, 0, 66}, - dictWord{18, 0, 88}, - dictWord{ - 18, - 0, - 123, - }, - dictWord{149, 0, 7}, - dictWord{11, 11, 92}, - dictWord{11, 11, 196}, - dictWord{11, 11, 409}, - dictWord{11, 11, 450}, - dictWord{11, 11, 666}, - dictWord{ - 11, - 11, - 777, - }, - dictWord{12, 11, 262}, - dictWord{13, 11, 385}, - dictWord{13, 11, 393}, - dictWord{15, 11, 115}, - dictWord{16, 11, 45}, - dictWord{145, 11, 82}, - dictWord{136, 0, 777}, - dictWord{134, 11, 1744}, - dictWord{4, 0, 410}, - dictWord{7, 0, 521}, - dictWord{133, 10, 828}, - dictWord{134, 0, 673}, - dictWord{7, 0, 1110}, - dictWord{7, 0, 1778}, - dictWord{7, 10, 176}, - dictWord{135, 10, 178}, - dictWord{5, 10, 806}, - dictWord{7, 11, 268}, - dictWord{7, 10, 1976}, - dictWord{ - 136, - 11, - 569, - }, - dictWord{4, 11, 733}, - dictWord{9, 11, 194}, - dictWord{10, 11, 92}, - dictWord{11, 11, 198}, - dictWord{12, 11, 84}, - dictWord{12, 11, 87}, - dictWord{ - 13, - 11, - 128, - }, - dictWord{144, 11, 74}, - dictWord{5, 0, 341}, - dictWord{7, 0, 1129}, - dictWord{11, 0, 414}, - dictWord{4, 10, 51}, - dictWord{6, 10, 4}, - dictWord{7, 10, 591}, - dictWord{7, 10, 849}, - dictWord{7, 10, 951}, - dictWord{7, 10, 1613}, - dictWord{7, 10, 1760}, - dictWord{7, 10, 1988}, - dictWord{9, 10, 434}, - dictWord{10, 10, 754}, - dictWord{11, 10, 25}, - dictWord{139, 10, 37}, - dictWord{133, 10, 902}, - dictWord{135, 10, 928}, - dictWord{135, 0, 787}, - dictWord{132, 0, 436}, - dictWord{ - 134, - 10, - 270, - }, - dictWord{7, 0, 1587}, - dictWord{135, 0, 1707}, - dictWord{6, 0, 377}, - dictWord{7, 0, 1025}, - dictWord{9, 0, 613}, - dictWord{145, 0, 104}, - dictWord{ - 7, - 11, - 982, - }, - dictWord{7, 11, 1361}, - dictWord{10, 11, 32}, - dictWord{143, 11, 56}, - dictWord{139, 0, 96}, - dictWord{132, 0, 451}, - dictWord{132, 10, 416}, - dictWord{ - 142, - 10, - 372, - }, - dictWord{5, 10, 152}, - dictWord{5, 10, 197}, - dictWord{7, 11, 306}, - dictWord{7, 10, 340}, - dictWord{7, 10, 867}, - dictWord{10, 10, 548}, - dictWord{ - 10, - 10, - 581, - }, - dictWord{11, 10, 6}, - dictWord{12, 10, 3}, - dictWord{12, 10, 19}, - dictWord{14, 10, 110}, - dictWord{142, 10, 289}, - dictWord{134, 0, 680}, - dictWord{ - 134, - 11, - 609, - }, - dictWord{7, 0, 483}, - dictWord{7, 10, 190}, - dictWord{8, 10, 28}, - dictWord{8, 10, 141}, - dictWord{8, 10, 444}, - dictWord{8, 10, 811}, - dictWord{ - 9, - 10, - 468, - }, - dictWord{11, 10, 334}, - dictWord{12, 10, 24}, - dictWord{12, 10, 386}, - dictWord{140, 10, 576}, - dictWord{10, 0, 916}, - dictWord{133, 10, 757}, - dictWord{ - 5, - 10, - 721, - }, - dictWord{135, 10, 1553}, - dictWord{133, 11, 178}, - dictWord{134, 0, 937}, - dictWord{132, 10, 898}, - dictWord{133, 0, 739}, - dictWord{ - 147, - 0, - 82, - }, - dictWord{135, 0, 663}, - dictWord{146, 0, 128}, - dictWord{5, 10, 277}, - dictWord{141, 10, 247}, - dictWord{134, 0, 1087}, - dictWord{132, 10, 435}, - dictWord{ - 6, - 11, - 381, - }, - dictWord{7, 11, 645}, - dictWord{7, 11, 694}, - dictWord{136, 11, 546}, - dictWord{7, 0, 503}, - dictWord{135, 0, 1885}, - dictWord{6, 0, 1965}, - dictWord{ - 8, - 0, - 925, - }, - dictWord{138, 0, 955}, - dictWord{4, 0, 113}, - dictWord{5, 0, 163}, - dictWord{5, 0, 735}, - dictWord{7, 0, 1009}, - dictWord{9, 0, 9}, - dictWord{9, 0, 771}, - dictWord{12, 0, 90}, - dictWord{13, 0, 138}, - dictWord{13, 0, 410}, - dictWord{143, 0, 128}, - dictWord{4, 0, 324}, - dictWord{138, 0, 104}, - dictWord{7, 0, 460}, - dictWord{ - 5, - 10, - 265, - }, - dictWord{134, 10, 212}, - dictWord{133, 11, 105}, - dictWord{7, 11, 261}, - dictWord{7, 11, 1107}, - dictWord{7, 11, 1115}, - dictWord{7, 11, 1354}, - dictWord{7, 11, 1588}, - dictWord{7, 11, 1705}, - dictWord{7, 11, 1902}, - dictWord{9, 11, 465}, - dictWord{10, 11, 248}, - dictWord{10, 11, 349}, - dictWord{10, 11, 647}, - dictWord{11, 11, 527}, - dictWord{11, 11, 660}, - dictWord{11, 11, 669}, - dictWord{12, 11, 529}, - dictWord{141, 11, 305}, - dictWord{5, 11, 438}, - dictWord{ - 9, - 11, - 694, - }, - dictWord{12, 11, 627}, - dictWord{141, 11, 210}, - dictWord{152, 11, 11}, - dictWord{4, 0, 935}, - dictWord{133, 0, 823}, - dictWord{132, 10, 702}, - dictWord{ - 5, - 0, - 269, - }, - dictWord{7, 0, 434}, - dictWord{7, 0, 891}, - dictWord{8, 0, 339}, - dictWord{9, 0, 702}, - dictWord{11, 0, 594}, - dictWord{11, 0, 718}, - dictWord{17, 0, 100}, - dictWord{5, 10, 808}, - dictWord{135, 10, 2045}, - dictWord{7, 0, 1014}, - dictWord{9, 0, 485}, - dictWord{141, 0, 264}, - dictWord{134, 0, 1713}, - dictWord{7, 0, 1810}, - dictWord{11, 0, 866}, - dictWord{12, 0, 103}, - dictWord{13, 0, 495}, - dictWord{140, 11, 233}, - dictWord{4, 0, 423}, - dictWord{10, 0, 949}, - dictWord{138, 0, 1013}, - dictWord{135, 0, 900}, - dictWord{8, 11, 25}, - dictWord{138, 11, 826}, - dictWord{5, 10, 166}, - dictWord{8, 10, 739}, - dictWord{140, 10, 511}, - dictWord{ - 134, - 0, - 2018, - }, - dictWord{7, 11, 1270}, - dictWord{139, 11, 612}, - dictWord{4, 10, 119}, - dictWord{5, 10, 170}, - dictWord{5, 10, 447}, - dictWord{7, 10, 1708}, - dictWord{ - 7, - 10, - 1889, - }, - dictWord{9, 10, 357}, - dictWord{9, 10, 719}, - dictWord{12, 10, 486}, - dictWord{140, 10, 596}, - dictWord{12, 0, 574}, - dictWord{140, 11, 574}, - dictWord{132, 11, 308}, - dictWord{6, 0, 964}, - dictWord{6, 0, 1206}, - dictWord{134, 0, 1302}, - dictWord{4, 10, 450}, - dictWord{135, 10, 1158}, - dictWord{ - 135, - 11, - 150, - }, - dictWord{136, 11, 649}, - dictWord{14, 0, 213}, - dictWord{148, 0, 38}, - dictWord{9, 11, 45}, - dictWord{9, 11, 311}, - dictWord{141, 11, 42}, - dictWord{ - 134, - 11, - 521, - }, - dictWord{7, 10, 1375}, - dictWord{7, 10, 1466}, - dictWord{138, 10, 331}, - dictWord{132, 10, 754}, - dictWord{5, 11, 339}, - dictWord{7, 11, 1442}, - dictWord{14, 11, 3}, - dictWord{15, 11, 41}, - dictWord{147, 11, 66}, - dictWord{136, 11, 378}, - dictWord{134, 0, 1022}, - dictWord{5, 10, 850}, - dictWord{136, 10, 799}, - dictWord{142, 0, 143}, - dictWord{135, 0, 2029}, - dictWord{134, 11, 1628}, - dictWord{8, 0, 523}, - dictWord{150, 0, 34}, - dictWord{5, 0, 625}, - dictWord{ - 135, - 0, - 1617, - }, - dictWord{7, 0, 275}, - dictWord{7, 10, 238}, - dictWord{7, 10, 2033}, - dictWord{8, 10, 120}, - dictWord{8, 10, 188}, - dictWord{8, 10, 659}, - dictWord{ - 9, - 10, - 598, - }, - dictWord{10, 10, 466}, - dictWord{12, 10, 342}, - dictWord{12, 10, 588}, - dictWord{13, 10, 503}, - dictWord{14, 10, 246}, - dictWord{143, 10, 92}, - dictWord{ - 7, - 0, - 37, - }, - dictWord{8, 0, 425}, - dictWord{8, 0, 693}, - dictWord{9, 0, 720}, - dictWord{10, 0, 380}, - dictWord{10, 0, 638}, - dictWord{11, 0, 273}, - dictWord{11, 0, 473}, - dictWord{12, 0, 61}, - dictWord{143, 0, 43}, - dictWord{135, 11, 829}, - dictWord{135, 0, 1943}, - dictWord{132, 0, 765}, - dictWord{5, 11, 486}, - dictWord{ - 135, - 11, - 1349, - }, - dictWord{7, 11, 1635}, - dictWord{8, 11, 17}, - dictWord{10, 11, 217}, - dictWord{138, 11, 295}, - dictWord{4, 10, 201}, - dictWord{7, 10, 1744}, - dictWord{ - 8, - 10, - 602, - }, - dictWord{11, 10, 247}, - dictWord{11, 10, 826}, - dictWord{145, 10, 65}, - dictWord{138, 11, 558}, - dictWord{11, 0, 551}, - dictWord{142, 0, 159}, - dictWord{8, 10, 164}, - dictWord{146, 10, 62}, - dictWord{139, 11, 176}, - dictWord{132, 0, 168}, - dictWord{136, 0, 1010}, - dictWord{134, 0, 1994}, - dictWord{ - 135, - 0, - 91, - }, - dictWord{138, 0, 532}, - dictWord{135, 10, 1243}, - dictWord{135, 0, 1884}, - dictWord{132, 10, 907}, - dictWord{5, 10, 100}, - dictWord{10, 10, 329}, - dictWord{12, 10, 416}, - dictWord{149, 10, 29}, - dictWord{134, 11, 447}, - dictWord{132, 10, 176}, - dictWord{5, 10, 636}, - dictWord{5, 10, 998}, - dictWord{7, 10, 9}, - dictWord{7, 10, 1508}, - dictWord{8, 10, 26}, - dictWord{9, 10, 317}, - dictWord{9, 10, 358}, - dictWord{10, 10, 210}, - dictWord{10, 10, 292}, - dictWord{10, 10, 533}, - dictWord{11, 10, 555}, - dictWord{12, 10, 526}, - dictWord{12, 10, 607}, - dictWord{13, 10, 263}, - dictWord{13, 10, 459}, - dictWord{142, 10, 271}, - dictWord{ - 4, - 11, - 609, - }, - dictWord{135, 11, 756}, - dictWord{6, 0, 15}, - dictWord{7, 0, 70}, - dictWord{10, 0, 240}, - dictWord{147, 0, 93}, - dictWord{4, 11, 930}, - dictWord{133, 11, 947}, - dictWord{134, 0, 1227}, - dictWord{134, 0, 1534}, - dictWord{133, 11, 939}, - dictWord{133, 11, 962}, - dictWord{5, 11, 651}, - dictWord{8, 11, 170}, - dictWord{ - 9, - 11, - 61, - }, - dictWord{9, 11, 63}, - dictWord{10, 11, 23}, - dictWord{10, 11, 37}, - dictWord{10, 11, 834}, - dictWord{11, 11, 4}, - dictWord{11, 11, 187}, - dictWord{ - 11, - 11, - 281, - }, - dictWord{11, 11, 503}, - dictWord{11, 11, 677}, - dictWord{12, 11, 96}, - dictWord{12, 11, 130}, - dictWord{12, 11, 244}, - dictWord{14, 11, 5}, - dictWord{ - 14, - 11, - 40, - }, - dictWord{14, 11, 162}, - dictWord{14, 11, 202}, - dictWord{146, 11, 133}, - dictWord{4, 11, 406}, - dictWord{5, 11, 579}, - dictWord{12, 11, 492}, - dictWord{ - 150, - 11, - 15, - }, - dictWord{139, 0, 392}, - dictWord{6, 10, 610}, - dictWord{10, 10, 127}, - dictWord{141, 10, 27}, - dictWord{7, 0, 655}, - dictWord{7, 0, 1844}, - dictWord{ - 136, - 10, - 119, - }, - dictWord{4, 0, 145}, - dictWord{6, 0, 176}, - dictWord{7, 0, 395}, - dictWord{137, 0, 562}, - dictWord{132, 0, 501}, - dictWord{140, 11, 145}, - dictWord{ - 136, - 0, - 1019, - }, - dictWord{134, 0, 509}, - dictWord{139, 0, 267}, - dictWord{6, 11, 17}, - dictWord{7, 11, 16}, - dictWord{7, 11, 1001}, - dictWord{7, 11, 1982}, - dictWord{ - 9, - 11, - 886, - }, - dictWord{10, 11, 489}, - dictWord{10, 11, 800}, - dictWord{11, 11, 782}, - dictWord{12, 11, 320}, - dictWord{13, 11, 467}, - dictWord{14, 11, 145}, - dictWord{14, 11, 387}, - dictWord{143, 11, 119}, - dictWord{145, 11, 17}, - dictWord{6, 0, 1099}, - dictWord{133, 11, 458}, - dictWord{7, 11, 1983}, - dictWord{8, 11, 0}, - dictWord{8, 11, 171}, - dictWord{9, 11, 120}, - dictWord{9, 11, 732}, - dictWord{10, 11, 473}, - dictWord{11, 11, 656}, - dictWord{11, 11, 998}, - dictWord{18, 11, 0}, - dictWord{18, 11, 2}, - dictWord{147, 11, 21}, - dictWord{12, 11, 427}, - dictWord{146, 11, 38}, - dictWord{10, 0, 948}, - dictWord{138, 0, 968}, - dictWord{7, 10, 126}, - dictWord{136, 10, 84}, - dictWord{136, 10, 790}, - dictWord{4, 0, 114}, - dictWord{9, 0, 492}, - dictWord{13, 0, 462}, - dictWord{142, 0, 215}, - dictWord{6, 10, 64}, - dictWord{12, 10, 377}, - dictWord{141, 10, 309}, - dictWord{4, 0, 77}, - dictWord{5, 0, 361}, - dictWord{6, 0, 139}, - dictWord{6, 0, 401}, - dictWord{6, 0, 404}, - dictWord{ - 7, - 0, - 413, - }, - dictWord{7, 0, 715}, - dictWord{7, 0, 1716}, - dictWord{11, 0, 279}, - dictWord{12, 0, 179}, - dictWord{12, 0, 258}, - dictWord{13, 0, 244}, - dictWord{142, 0, 358}, - dictWord{134, 0, 1717}, - dictWord{7, 0, 772}, - dictWord{7, 0, 1061}, - dictWord{7, 0, 1647}, - dictWord{8, 0, 82}, - dictWord{11, 0, 250}, - dictWord{11, 0, 607}, - dictWord{12, 0, 311}, - dictWord{12, 0, 420}, - dictWord{13, 0, 184}, - dictWord{13, 0, 367}, - dictWord{7, 10, 1104}, - dictWord{11, 10, 269}, - dictWord{11, 10, 539}, - dictWord{11, 10, 627}, - dictWord{11, 10, 706}, - dictWord{11, 10, 975}, - dictWord{12, 10, 248}, - dictWord{12, 10, 434}, - dictWord{12, 10, 600}, - dictWord{ - 12, - 10, - 622, - }, - dictWord{13, 10, 297}, - dictWord{13, 10, 485}, - dictWord{14, 10, 69}, - dictWord{14, 10, 409}, - dictWord{143, 10, 108}, - dictWord{135, 0, 724}, - dictWord{ - 4, - 11, - 512, - }, - dictWord{4, 11, 519}, - dictWord{133, 11, 342}, - dictWord{134, 0, 1133}, - dictWord{145, 11, 29}, - dictWord{11, 10, 977}, - dictWord{141, 10, 507}, - dictWord{6, 0, 841}, - dictWord{6, 0, 1042}, - dictWord{6, 0, 1194}, - dictWord{10, 0, 993}, - dictWord{140, 0, 1021}, - dictWord{6, 11, 31}, - dictWord{7, 11, 491}, - dictWord{7, 11, 530}, - dictWord{8, 11, 592}, - dictWord{9, 10, 34}, - dictWord{11, 11, 53}, - dictWord{11, 10, 484}, - dictWord{11, 11, 779}, - dictWord{12, 11, 167}, - dictWord{12, 11, 411}, - dictWord{14, 11, 14}, - dictWord{14, 11, 136}, - dictWord{15, 11, 72}, - dictWord{16, 11, 17}, - dictWord{144, 11, 72}, - dictWord{4, 0, 1021}, - dictWord{6, 0, 2037}, - dictWord{133, 11, 907}, - dictWord{7, 0, 373}, - dictWord{8, 0, 335}, - dictWord{8, 0, 596}, - dictWord{9, 0, 488}, - dictWord{6, 10, 1700}, - dictWord{ - 7, - 10, - 293, - }, - dictWord{7, 10, 382}, - dictWord{7, 10, 1026}, - dictWord{7, 10, 1087}, - dictWord{7, 10, 2027}, - dictWord{8, 10, 252}, - dictWord{8, 10, 727}, - dictWord{ - 8, - 10, - 729, - }, - dictWord{9, 10, 30}, - dictWord{9, 10, 199}, - dictWord{9, 10, 231}, - dictWord{9, 10, 251}, - dictWord{9, 10, 334}, - dictWord{9, 10, 361}, - dictWord{9, 10, 712}, - dictWord{10, 10, 55}, - dictWord{10, 10, 60}, - dictWord{10, 10, 232}, - dictWord{10, 10, 332}, - dictWord{10, 10, 384}, - dictWord{10, 10, 396}, - dictWord{ - 10, - 10, - 504, - }, - dictWord{10, 10, 542}, - dictWord{10, 10, 652}, - dictWord{11, 10, 20}, - dictWord{11, 10, 48}, - dictWord{11, 10, 207}, - dictWord{11, 10, 291}, - dictWord{ - 11, - 10, - 298, - }, - dictWord{11, 10, 342}, - dictWord{11, 10, 365}, - dictWord{11, 10, 394}, - dictWord{11, 10, 620}, - dictWord{11, 10, 705}, - dictWord{11, 10, 1017}, - dictWord{12, 10, 123}, - dictWord{12, 10, 340}, - dictWord{12, 10, 406}, - dictWord{12, 10, 643}, - dictWord{13, 10, 61}, - dictWord{13, 10, 269}, - dictWord{ - 13, - 10, - 311, - }, - dictWord{13, 10, 319}, - dictWord{13, 10, 486}, - dictWord{14, 10, 234}, - dictWord{15, 10, 62}, - dictWord{15, 10, 85}, - dictWord{16, 10, 71}, - dictWord{ - 18, - 10, - 119, - }, - dictWord{148, 10, 105}, - dictWord{150, 0, 37}, - dictWord{4, 11, 208}, - dictWord{5, 11, 106}, - dictWord{6, 11, 531}, - dictWord{8, 11, 408}, - dictWord{ - 9, - 11, - 188, - }, - dictWord{138, 11, 572}, - dictWord{132, 0, 564}, - dictWord{6, 0, 513}, - dictWord{135, 0, 1052}, - dictWord{132, 0, 825}, - dictWord{9, 0, 899}, - dictWord{ - 140, - 11, - 441, - }, - dictWord{134, 0, 778}, - dictWord{133, 11, 379}, - dictWord{7, 0, 1417}, - dictWord{12, 0, 382}, - dictWord{17, 0, 48}, - dictWord{152, 0, 12}, - dictWord{ - 132, - 11, - 241, - }, - dictWord{7, 0, 1116}, - dictWord{6, 10, 379}, - dictWord{7, 10, 270}, - dictWord{8, 10, 176}, - dictWord{8, 10, 183}, - dictWord{9, 10, 432}, - dictWord{ - 9, - 10, - 661, - }, - dictWord{12, 10, 247}, - dictWord{12, 10, 617}, - dictWord{146, 10, 125}, - dictWord{5, 10, 792}, - dictWord{133, 10, 900}, - dictWord{6, 0, 545}, - dictWord{ - 7, - 0, - 565, - }, - dictWord{7, 0, 1669}, - dictWord{10, 0, 114}, - dictWord{11, 0, 642}, - dictWord{140, 0, 618}, - dictWord{133, 0, 5}, - dictWord{138, 11, 7}, - dictWord{ - 132, - 11, - 259, - }, - dictWord{135, 0, 192}, - dictWord{134, 0, 701}, - dictWord{136, 0, 763}, - dictWord{135, 10, 1979}, - dictWord{4, 10, 901}, - dictWord{133, 10, 776}, - dictWord{10, 0, 755}, - dictWord{147, 0, 29}, - dictWord{133, 0, 759}, - dictWord{4, 11, 173}, - dictWord{5, 11, 312}, - dictWord{5, 11, 512}, - dictWord{135, 11, 1285}, - dictWord{7, 11, 1603}, - dictWord{7, 11, 1691}, - dictWord{9, 11, 464}, - dictWord{11, 11, 195}, - dictWord{12, 11, 279}, - dictWord{12, 11, 448}, - dictWord{ - 14, - 11, - 11, - }, - dictWord{147, 11, 102}, - dictWord{7, 0, 370}, - dictWord{7, 0, 1007}, - dictWord{7, 0, 1177}, - dictWord{135, 0, 1565}, - dictWord{135, 0, 1237}, - dictWord{ - 4, - 0, - 87, - }, - dictWord{5, 0, 250}, - dictWord{141, 0, 298}, - dictWord{4, 11, 452}, - dictWord{5, 11, 583}, - dictWord{5, 11, 817}, - dictWord{6, 11, 433}, - dictWord{7, 11, 593}, - dictWord{7, 11, 720}, - dictWord{7, 11, 1378}, - dictWord{8, 11, 161}, - dictWord{9, 11, 284}, - dictWord{10, 11, 313}, - dictWord{139, 11, 886}, - dictWord{4, 11, 547}, - dictWord{135, 11, 1409}, - dictWord{136, 11, 722}, - dictWord{4, 10, 37}, - dictWord{5, 10, 334}, - dictWord{135, 10, 1253}, - dictWord{132, 10, 508}, - dictWord{ - 12, - 0, - 107, - }, - dictWord{146, 0, 31}, - dictWord{8, 11, 420}, - dictWord{139, 11, 193}, - dictWord{135, 0, 814}, - dictWord{135, 11, 409}, - dictWord{140, 0, 991}, - dictWord{4, 0, 57}, - dictWord{7, 0, 1195}, - dictWord{7, 0, 1438}, - dictWord{7, 0, 1548}, - dictWord{7, 0, 1835}, - dictWord{7, 0, 1904}, - dictWord{9, 0, 757}, - dictWord{ - 10, - 0, - 604, - }, - dictWord{139, 0, 519}, - dictWord{132, 0, 540}, - dictWord{138, 11, 308}, - dictWord{132, 10, 533}, - dictWord{136, 0, 608}, - dictWord{144, 11, 65}, - dictWord{4, 0, 1014}, - dictWord{134, 0, 2029}, - dictWord{4, 0, 209}, - dictWord{7, 0, 902}, - dictWord{5, 11, 1002}, - dictWord{136, 11, 745}, - dictWord{134, 0, 2030}, - dictWord{6, 0, 303}, - dictWord{7, 0, 335}, - dictWord{7, 0, 1437}, - dictWord{7, 0, 1668}, - dictWord{8, 0, 553}, - dictWord{8, 0, 652}, - dictWord{8, 0, 656}, - dictWord{ - 9, - 0, - 558, - }, - dictWord{11, 0, 743}, - dictWord{149, 0, 18}, - dictWord{5, 11, 575}, - dictWord{6, 11, 354}, - dictWord{135, 11, 701}, - dictWord{4, 11, 239}, - dictWord{ - 6, - 11, - 477, - }, - dictWord{7, 11, 1607}, - dictWord{11, 11, 68}, - dictWord{139, 11, 617}, - dictWord{132, 0, 559}, - dictWord{8, 0, 527}, - dictWord{18, 0, 60}, - dictWord{ - 147, - 0, - 24, - }, - dictWord{133, 10, 920}, - dictWord{138, 0, 511}, - dictWord{133, 0, 1017}, - dictWord{133, 0, 675}, - dictWord{138, 10, 391}, - dictWord{11, 0, 156}, - dictWord{135, 10, 1952}, - dictWord{138, 11, 369}, - dictWord{132, 11, 367}, - dictWord{133, 0, 709}, - dictWord{6, 0, 698}, - dictWord{134, 0, 887}, - dictWord{ - 142, - 10, - 126, - }, - dictWord{134, 0, 1745}, - dictWord{132, 10, 483}, - dictWord{13, 11, 299}, - dictWord{142, 11, 75}, - dictWord{133, 0, 714}, - dictWord{7, 0, 8}, - dictWord{ - 136, - 0, - 206, - }, - dictWord{138, 10, 480}, - dictWord{4, 11, 694}, - dictWord{9, 10, 495}, - dictWord{146, 10, 104}, - dictWord{7, 11, 1248}, - dictWord{11, 11, 621}, - dictWord{139, 11, 702}, - dictWord{140, 11, 687}, - dictWord{132, 0, 776}, - dictWord{139, 10, 1009}, - dictWord{135, 0, 1272}, - dictWord{134, 0, 1059}, - dictWord{ - 8, - 10, - 653, - }, - dictWord{13, 10, 93}, - dictWord{147, 10, 14}, - dictWord{135, 11, 213}, - dictWord{136, 0, 406}, - dictWord{133, 10, 172}, - dictWord{132, 0, 947}, - dictWord{8, 0, 175}, - dictWord{10, 0, 168}, - dictWord{138, 0, 573}, - dictWord{132, 0, 870}, - dictWord{6, 0, 1567}, - dictWord{151, 11, 28}, - dictWord{ - 134, - 11, - 472, - }, - dictWord{5, 10, 260}, - dictWord{136, 11, 132}, - dictWord{4, 11, 751}, - dictWord{11, 11, 390}, - dictWord{140, 11, 32}, - dictWord{4, 11, 409}, - dictWord{ - 133, - 11, - 78, - }, - dictWord{12, 0, 554}, - dictWord{6, 11, 473}, - dictWord{145, 11, 105}, - dictWord{133, 0, 784}, - dictWord{8, 0, 908}, - dictWord{136, 11, 306}, - dictWord{139, 0, 882}, - dictWord{6, 0, 358}, - dictWord{7, 0, 1393}, - dictWord{8, 0, 396}, - dictWord{10, 0, 263}, - dictWord{14, 0, 154}, - dictWord{16, 0, 48}, - dictWord{ - 17, - 0, - 8, - }, - dictWord{7, 11, 1759}, - dictWord{8, 11, 396}, - dictWord{10, 11, 263}, - dictWord{14, 11, 154}, - dictWord{16, 11, 48}, - dictWord{145, 11, 8}, - dictWord{ - 13, - 11, - 163, - }, - dictWord{13, 11, 180}, - dictWord{18, 11, 78}, - dictWord{148, 11, 35}, - dictWord{14, 0, 32}, - dictWord{18, 0, 85}, - dictWord{20, 0, 2}, - dictWord{152, 0, 16}, - dictWord{7, 0, 228}, - dictWord{10, 0, 770}, - dictWord{8, 10, 167}, - dictWord{8, 10, 375}, - dictWord{9, 10, 82}, - dictWord{9, 10, 561}, - dictWord{138, 10, 620}, - dictWord{132, 0, 845}, - dictWord{9, 0, 14}, - dictWord{9, 0, 441}, - dictWord{10, 0, 306}, - dictWord{139, 0, 9}, - dictWord{11, 0, 966}, - dictWord{12, 0, 287}, - dictWord{ - 13, - 0, - 342, - }, - dictWord{13, 0, 402}, - dictWord{15, 0, 110}, - dictWord{15, 0, 163}, - dictWord{8, 10, 194}, - dictWord{136, 10, 756}, - dictWord{134, 0, 1578}, - dictWord{ - 4, - 0, - 967, - }, - dictWord{6, 0, 1820}, - dictWord{6, 0, 1847}, - dictWord{140, 0, 716}, - dictWord{136, 0, 594}, - dictWord{7, 0, 1428}, - dictWord{7, 0, 1640}, - dictWord{ - 7, - 0, - 1867, - }, - dictWord{9, 0, 169}, - dictWord{9, 0, 182}, - dictWord{9, 0, 367}, - dictWord{9, 0, 478}, - dictWord{9, 0, 506}, - dictWord{9, 0, 551}, - dictWord{9, 0, 557}, - dictWord{ - 9, - 0, - 648, - }, - dictWord{9, 0, 697}, - dictWord{9, 0, 705}, - dictWord{9, 0, 725}, - dictWord{9, 0, 787}, - dictWord{9, 0, 794}, - dictWord{10, 0, 198}, - dictWord{10, 0, 214}, - dictWord{10, 0, 267}, - dictWord{10, 0, 275}, - dictWord{10, 0, 456}, - dictWord{10, 0, 551}, - dictWord{10, 0, 561}, - dictWord{10, 0, 613}, - dictWord{10, 0, 627}, - dictWord{ - 10, - 0, - 668, - }, - dictWord{10, 0, 675}, - dictWord{10, 0, 691}, - dictWord{10, 0, 695}, - dictWord{10, 0, 707}, - dictWord{10, 0, 715}, - dictWord{11, 0, 183}, - dictWord{ - 11, - 0, - 201, - }, - dictWord{11, 0, 244}, - dictWord{11, 0, 262}, - dictWord{11, 0, 352}, - dictWord{11, 0, 439}, - dictWord{11, 0, 493}, - dictWord{11, 0, 572}, - dictWord{11, 0, 591}, - dictWord{11, 0, 608}, - dictWord{11, 0, 611}, - dictWord{11, 0, 646}, - dictWord{11, 0, 674}, - dictWord{11, 0, 711}, - dictWord{11, 0, 751}, - dictWord{11, 0, 761}, - dictWord{11, 0, 776}, - dictWord{11, 0, 785}, - dictWord{11, 0, 850}, - dictWord{11, 0, 853}, - dictWord{11, 0, 862}, - dictWord{11, 0, 865}, - dictWord{11, 0, 868}, - dictWord{ - 11, - 0, - 875, - }, - dictWord{11, 0, 898}, - dictWord{11, 0, 902}, - dictWord{11, 0, 903}, - dictWord{11, 0, 910}, - dictWord{11, 0, 932}, - dictWord{11, 0, 942}, - dictWord{ - 11, - 0, - 957, - }, - dictWord{11, 0, 967}, - dictWord{11, 0, 972}, - dictWord{12, 0, 148}, - dictWord{12, 0, 195}, - dictWord{12, 0, 220}, - dictWord{12, 0, 237}, - dictWord{12, 0, 318}, - dictWord{12, 0, 339}, - dictWord{12, 0, 393}, - dictWord{12, 0, 445}, - dictWord{12, 0, 450}, - dictWord{12, 0, 474}, - dictWord{12, 0, 505}, - dictWord{12, 0, 509}, - dictWord{12, 0, 533}, - dictWord{12, 0, 591}, - dictWord{12, 0, 594}, - dictWord{12, 0, 597}, - dictWord{12, 0, 621}, - dictWord{12, 0, 633}, - dictWord{12, 0, 642}, - dictWord{ - 13, - 0, - 59, - }, - dictWord{13, 0, 60}, - dictWord{13, 0, 145}, - dictWord{13, 0, 239}, - dictWord{13, 0, 250}, - dictWord{13, 0, 329}, - dictWord{13, 0, 344}, - dictWord{13, 0, 365}, - dictWord{13, 0, 372}, - dictWord{13, 0, 387}, - dictWord{13, 0, 403}, - dictWord{13, 0, 414}, - dictWord{13, 0, 456}, - dictWord{13, 0, 470}, - dictWord{13, 0, 478}, - dictWord{13, 0, 483}, - dictWord{13, 0, 489}, - dictWord{14, 0, 55}, - dictWord{14, 0, 57}, - dictWord{14, 0, 81}, - dictWord{14, 0, 90}, - dictWord{14, 0, 148}, - dictWord{ - 14, - 0, - 239, - }, - dictWord{14, 0, 266}, - dictWord{14, 0, 321}, - dictWord{14, 0, 326}, - dictWord{14, 0, 327}, - dictWord{14, 0, 330}, - dictWord{14, 0, 347}, - dictWord{14, 0, 355}, - dictWord{14, 0, 401}, - dictWord{14, 0, 404}, - dictWord{14, 0, 411}, - dictWord{14, 0, 414}, - dictWord{14, 0, 416}, - dictWord{14, 0, 420}, - dictWord{15, 0, 61}, - dictWord{15, 0, 74}, - dictWord{15, 0, 87}, - dictWord{15, 0, 88}, - dictWord{15, 0, 94}, - dictWord{15, 0, 96}, - dictWord{15, 0, 116}, - dictWord{15, 0, 149}, - dictWord{15, 0, 154}, - dictWord{16, 0, 50}, - dictWord{16, 0, 63}, - dictWord{16, 0, 73}, - dictWord{17, 0, 2}, - dictWord{17, 0, 66}, - dictWord{17, 0, 92}, - dictWord{17, 0, 103}, - dictWord{ - 17, - 0, - 112, - }, - dictWord{17, 0, 120}, - dictWord{18, 0, 50}, - dictWord{18, 0, 54}, - dictWord{18, 0, 82}, - dictWord{18, 0, 86}, - dictWord{18, 0, 90}, - dictWord{18, 0, 111}, - dictWord{ - 18, - 0, - 115, - }, - dictWord{18, 0, 156}, - dictWord{19, 0, 40}, - dictWord{19, 0, 79}, - dictWord{20, 0, 78}, - dictWord{21, 0, 22}, - dictWord{135, 11, 883}, - dictWord{5, 0, 161}, - dictWord{135, 0, 839}, - dictWord{4, 0, 782}, - dictWord{13, 11, 293}, - dictWord{142, 11, 56}, - dictWord{133, 11, 617}, - dictWord{139, 11, 50}, - dictWord{ - 135, - 10, - 22, - }, - dictWord{145, 0, 64}, - dictWord{5, 10, 639}, - dictWord{7, 10, 1249}, - dictWord{139, 10, 896}, - dictWord{138, 0, 998}, - dictWord{135, 11, 2042}, - dictWord{ - 4, - 11, - 546, - }, - dictWord{142, 11, 233}, - dictWord{6, 0, 1043}, - dictWord{134, 0, 1574}, - dictWord{134, 0, 1496}, - dictWord{4, 10, 102}, - dictWord{7, 10, 815}, - dictWord{7, 10, 1699}, - dictWord{139, 10, 964}, - dictWord{12, 0, 781}, - dictWord{142, 0, 461}, - dictWord{4, 11, 313}, - dictWord{133, 11, 577}, - dictWord{ - 6, - 0, - 639, - }, - dictWord{6, 0, 1114}, - dictWord{137, 0, 817}, - dictWord{8, 11, 184}, - dictWord{141, 11, 433}, - dictWord{7, 0, 1814}, - dictWord{135, 11, 935}, - dictWord{ - 10, - 0, - 997, - }, - dictWord{140, 0, 958}, - dictWord{4, 0, 812}, - dictWord{137, 11, 625}, - dictWord{132, 10, 899}, - dictWord{136, 10, 795}, - dictWord{5, 11, 886}, - dictWord{6, 11, 46}, - dictWord{6, 11, 1790}, - dictWord{7, 11, 14}, - dictWord{7, 11, 732}, - dictWord{7, 11, 1654}, - dictWord{8, 11, 95}, - dictWord{8, 11, 327}, - dictWord{ - 8, - 11, - 616, - }, - dictWord{10, 11, 598}, - dictWord{10, 11, 769}, - dictWord{11, 11, 134}, - dictWord{11, 11, 747}, - dictWord{12, 11, 378}, - dictWord{142, 11, 97}, - dictWord{136, 0, 139}, - dictWord{6, 10, 52}, - dictWord{9, 10, 104}, - dictWord{9, 10, 559}, - dictWord{12, 10, 308}, - dictWord{147, 10, 87}, - dictWord{133, 11, 1021}, - dictWord{132, 10, 604}, - dictWord{132, 10, 301}, - dictWord{136, 10, 779}, - dictWord{7, 0, 643}, - dictWord{136, 0, 236}, - dictWord{132, 11, 153}, - dictWord{ - 134, - 0, - 1172, - }, - dictWord{147, 10, 32}, - dictWord{133, 11, 798}, - dictWord{6, 0, 1338}, - dictWord{132, 11, 587}, - dictWord{6, 11, 598}, - dictWord{7, 11, 42}, - dictWord{ - 8, - 11, - 695, - }, - dictWord{10, 11, 212}, - dictWord{11, 11, 158}, - dictWord{14, 11, 196}, - dictWord{145, 11, 85}, - dictWord{135, 10, 508}, - dictWord{5, 11, 957}, - dictWord{5, 11, 1008}, - dictWord{135, 11, 249}, - dictWord{4, 11, 129}, - dictWord{135, 11, 465}, - dictWord{5, 0, 54}, - dictWord{7, 11, 470}, - dictWord{7, 11, 1057}, - dictWord{7, 11, 1201}, - dictWord{9, 11, 755}, - dictWord{11, 11, 906}, - dictWord{140, 11, 527}, - dictWord{7, 11, 908}, - dictWord{146, 11, 7}, - dictWord{ - 5, - 11, - 148, - }, - dictWord{136, 11, 450}, - dictWord{144, 11, 1}, - dictWord{4, 0, 256}, - dictWord{135, 0, 1488}, - dictWord{9, 0, 351}, - dictWord{6, 10, 310}, - dictWord{ - 7, - 10, - 1849, - }, - dictWord{8, 10, 72}, - dictWord{8, 10, 272}, - dictWord{8, 10, 431}, - dictWord{9, 10, 12}, - dictWord{10, 10, 563}, - dictWord{10, 10, 630}, - dictWord{ - 10, - 10, - 796, - }, - dictWord{10, 10, 810}, - dictWord{11, 10, 367}, - dictWord{11, 10, 599}, - dictWord{11, 10, 686}, - dictWord{140, 10, 672}, - dictWord{6, 0, 1885}, - dictWord{ - 6, - 0, - 1898, - }, - dictWord{6, 0, 1899}, - dictWord{140, 0, 955}, - dictWord{4, 0, 714}, - dictWord{133, 0, 469}, - dictWord{6, 0, 1270}, - dictWord{134, 0, 1456}, - dictWord{132, 0, 744}, - dictWord{6, 0, 313}, - dictWord{7, 10, 537}, - dictWord{8, 10, 64}, - dictWord{9, 10, 127}, - dictWord{10, 10, 496}, - dictWord{12, 10, 510}, - dictWord{141, 10, 384}, - dictWord{4, 11, 217}, - dictWord{4, 10, 244}, - dictWord{5, 11, 710}, - dictWord{7, 10, 233}, - dictWord{7, 11, 1926}, - dictWord{9, 11, 428}, - dictWord{9, 11, 708}, - dictWord{10, 11, 254}, - dictWord{10, 11, 296}, - dictWord{10, 11, 720}, - dictWord{11, 11, 109}, - dictWord{11, 11, 255}, - dictWord{12, 11, 165}, - dictWord{12, 11, 315}, - dictWord{13, 11, 107}, - dictWord{13, 11, 203}, - dictWord{14, 11, 54}, - dictWord{14, 11, 99}, - dictWord{14, 11, 114}, - dictWord{ - 14, - 11, - 388, - }, - dictWord{16, 11, 85}, - dictWord{17, 11, 9}, - dictWord{17, 11, 33}, - dictWord{20, 11, 25}, - dictWord{20, 11, 28}, - dictWord{20, 11, 29}, - dictWord{21, 11, 9}, - dictWord{21, 11, 10}, - dictWord{21, 11, 34}, - dictWord{150, 11, 17}, - dictWord{138, 0, 402}, - dictWord{7, 0, 969}, - dictWord{146, 0, 55}, - dictWord{8, 0, 50}, - dictWord{ - 137, - 0, - 624, - }, - dictWord{134, 0, 1355}, - dictWord{132, 0, 572}, - dictWord{134, 10, 1650}, - dictWord{10, 10, 702}, - dictWord{139, 10, 245}, - dictWord{ - 10, - 0, - 847, - }, - dictWord{142, 0, 445}, - dictWord{6, 0, 43}, - dictWord{7, 0, 38}, - dictWord{8, 0, 248}, - dictWord{138, 0, 513}, - dictWord{133, 0, 369}, - dictWord{137, 10, 338}, - dictWord{133, 0, 766}, - dictWord{133, 0, 363}, - dictWord{133, 10, 896}, - dictWord{8, 11, 392}, - dictWord{11, 11, 54}, - dictWord{13, 11, 173}, - dictWord{ - 13, - 11, - 294, - }, - dictWord{148, 11, 7}, - dictWord{134, 0, 678}, - dictWord{7, 11, 1230}, - dictWord{136, 11, 531}, - dictWord{6, 0, 258}, - dictWord{140, 0, 409}, - dictWord{ - 5, - 0, - 249, - }, - dictWord{148, 0, 82}, - dictWord{7, 10, 1117}, - dictWord{136, 10, 539}, - dictWord{5, 0, 393}, - dictWord{6, 0, 378}, - dictWord{7, 0, 1981}, - dictWord{9, 0, 32}, - dictWord{9, 0, 591}, - dictWord{10, 0, 685}, - dictWord{10, 0, 741}, - dictWord{142, 0, 382}, - dictWord{133, 0, 788}, - dictWord{134, 0, 1281}, - dictWord{ - 134, - 0, - 1295, - }, - dictWord{7, 0, 1968}, - dictWord{141, 0, 509}, - dictWord{4, 0, 61}, - dictWord{5, 0, 58}, - dictWord{5, 0, 171}, - dictWord{5, 0, 683}, - dictWord{6, 0, 291}, - dictWord{ - 6, - 0, - 566, - }, - dictWord{7, 0, 1650}, - dictWord{11, 0, 523}, - dictWord{12, 0, 273}, - dictWord{12, 0, 303}, - dictWord{15, 0, 39}, - dictWord{143, 0, 111}, - dictWord{ - 6, - 0, - 706, - }, - dictWord{134, 0, 1283}, - dictWord{134, 0, 589}, - dictWord{135, 11, 1433}, - dictWord{133, 11, 435}, - dictWord{7, 0, 1059}, - dictWord{13, 0, 54}, - dictWord{ - 5, - 10, - 4, - }, - dictWord{5, 10, 810}, - dictWord{6, 10, 13}, - dictWord{6, 10, 538}, - dictWord{6, 10, 1690}, - dictWord{6, 10, 1726}, - dictWord{7, 10, 1819}, - dictWord{ - 8, - 10, - 148, - }, - dictWord{8, 10, 696}, - dictWord{8, 10, 791}, - dictWord{12, 10, 125}, - dictWord{143, 10, 9}, - dictWord{135, 10, 1268}, - dictWord{5, 11, 85}, - dictWord{ - 6, - 11, - 419, - }, - dictWord{7, 11, 134}, - dictWord{7, 11, 305}, - dictWord{7, 11, 361}, - dictWord{7, 11, 1337}, - dictWord{8, 11, 71}, - dictWord{140, 11, 519}, - dictWord{ - 137, - 0, - 824, - }, - dictWord{140, 11, 688}, - dictWord{5, 11, 691}, - dictWord{7, 11, 345}, - dictWord{7, 10, 1385}, - dictWord{9, 11, 94}, - dictWord{11, 10, 582}, - dictWord{ - 11, - 10, - 650, - }, - dictWord{11, 10, 901}, - dictWord{11, 10, 949}, - dictWord{12, 11, 169}, - dictWord{12, 10, 232}, - dictWord{12, 10, 236}, - dictWord{13, 10, 413}, - dictWord{13, 10, 501}, - dictWord{146, 10, 116}, - dictWord{4, 0, 917}, - dictWord{133, 0, 1005}, - dictWord{7, 0, 1598}, - dictWord{5, 11, 183}, - dictWord{6, 11, 582}, - dictWord{9, 11, 344}, - dictWord{10, 11, 679}, - dictWord{140, 11, 435}, - dictWord{4, 10, 925}, - dictWord{5, 10, 803}, - dictWord{8, 10, 698}, - dictWord{ - 138, - 10, - 828, - }, - dictWord{132, 0, 919}, - dictWord{135, 11, 511}, - dictWord{139, 10, 992}, - dictWord{4, 0, 255}, - dictWord{5, 0, 302}, - dictWord{6, 0, 132}, - dictWord{ - 7, - 0, - 128, - }, - dictWord{7, 0, 283}, - dictWord{7, 0, 1299}, - dictWord{10, 0, 52}, - dictWord{10, 0, 514}, - dictWord{11, 0, 925}, - dictWord{13, 0, 92}, - dictWord{142, 0, 309}, - dictWord{134, 0, 1369}, - dictWord{135, 10, 1847}, - dictWord{134, 0, 328}, - dictWord{7, 11, 1993}, - dictWord{136, 11, 684}, - dictWord{133, 10, 383}, - dictWord{137, 0, 173}, - dictWord{134, 11, 583}, - dictWord{134, 0, 1411}, - dictWord{19, 0, 65}, - dictWord{5, 11, 704}, - dictWord{8, 11, 357}, - dictWord{10, 11, 745}, - dictWord{14, 11, 426}, - dictWord{17, 11, 94}, - dictWord{147, 11, 57}, - dictWord{9, 10, 660}, - dictWord{138, 10, 347}, - dictWord{4, 11, 179}, - dictWord{5, 11, 198}, - dictWord{133, 11, 697}, - dictWord{7, 11, 347}, - dictWord{7, 11, 971}, - dictWord{8, 11, 181}, - dictWord{138, 11, 711}, - dictWord{141, 0, 442}, - dictWord{ - 11, - 0, - 842, - }, - dictWord{11, 0, 924}, - dictWord{13, 0, 317}, - dictWord{13, 0, 370}, - dictWord{13, 0, 469}, - dictWord{13, 0, 471}, - dictWord{14, 0, 397}, - dictWord{18, 0, 69}, - dictWord{18, 0, 145}, - dictWord{7, 10, 572}, - dictWord{9, 10, 592}, - dictWord{11, 10, 680}, - dictWord{12, 10, 356}, - dictWord{140, 10, 550}, - dictWord{14, 11, 19}, - dictWord{14, 11, 28}, - dictWord{144, 11, 29}, - dictWord{136, 0, 534}, - dictWord{4, 11, 243}, - dictWord{5, 11, 203}, - dictWord{7, 11, 19}, - dictWord{7, 11, 71}, - dictWord{7, 11, 113}, - dictWord{10, 11, 405}, - dictWord{11, 11, 357}, - dictWord{142, 11, 240}, - dictWord{6, 0, 210}, - dictWord{10, 0, 845}, - dictWord{138, 0, 862}, - dictWord{7, 11, 1351}, - dictWord{9, 11, 581}, - dictWord{10, 11, 639}, - dictWord{11, 11, 453}, - dictWord{140, 11, 584}, - dictWord{7, 11, 1450}, - dictWord{ - 139, - 11, - 99, - }, - dictWord{10, 0, 892}, - dictWord{12, 0, 719}, - dictWord{144, 0, 105}, - dictWord{4, 0, 284}, - dictWord{6, 0, 223}, - dictWord{134, 11, 492}, - dictWord{5, 11, 134}, - dictWord{6, 11, 408}, - dictWord{6, 11, 495}, - dictWord{135, 11, 1593}, - dictWord{136, 0, 529}, - dictWord{137, 0, 807}, - dictWord{4, 0, 218}, - dictWord{7, 0, 526}, - dictWord{143, 0, 137}, - dictWord{6, 0, 1444}, - dictWord{142, 11, 4}, - dictWord{132, 11, 665}, - dictWord{4, 0, 270}, - dictWord{5, 0, 192}, - dictWord{6, 0, 332}, - dictWord{7, 0, 1322}, - dictWord{4, 11, 248}, - dictWord{7, 11, 137}, - dictWord{137, 11, 349}, - dictWord{140, 0, 661}, - dictWord{7, 0, 1517}, - dictWord{11, 0, 597}, - dictWord{14, 0, 76}, - dictWord{14, 0, 335}, - dictWord{20, 0, 33}, - dictWord{7, 10, 748}, - dictWord{139, 10, 700}, - dictWord{5, 11, 371}, - dictWord{135, 11, 563}, - dictWord{146, 11, 57}, - dictWord{133, 10, 127}, - dictWord{133, 0, 418}, - dictWord{4, 11, 374}, - dictWord{7, 11, 547}, - dictWord{7, 11, 1700}, - dictWord{7, 11, 1833}, - dictWord{139, 11, 858}, - dictWord{6, 10, 198}, - dictWord{140, 10, 83}, - dictWord{7, 11, 1812}, - dictWord{13, 11, 259}, - dictWord{13, 11, 356}, - dictWord{ - 14, - 11, - 242, - }, - dictWord{147, 11, 114}, - dictWord{7, 0, 379}, - dictWord{8, 0, 481}, - dictWord{9, 0, 377}, - dictWord{5, 10, 276}, - dictWord{6, 10, 55}, - dictWord{ - 135, - 10, - 1369, - }, - dictWord{138, 11, 286}, - dictWord{5, 0, 1003}, - dictWord{6, 0, 149}, - dictWord{6, 10, 1752}, - dictWord{136, 10, 726}, - dictWord{8, 0, 262}, - dictWord{ - 9, - 0, - 627, - }, - dictWord{10, 0, 18}, - dictWord{11, 0, 214}, - dictWord{11, 0, 404}, - dictWord{11, 0, 457}, - dictWord{11, 0, 780}, - dictWord{11, 0, 913}, - dictWord{13, 0, 401}, - dictWord{14, 0, 200}, - dictWord{6, 11, 1647}, - dictWord{7, 11, 1552}, - dictWord{7, 11, 2010}, - dictWord{9, 11, 494}, - dictWord{137, 11, 509}, - dictWord{ - 135, - 0, - 742, - }, - dictWord{136, 0, 304}, - dictWord{132, 0, 142}, - dictWord{133, 10, 764}, - dictWord{6, 10, 309}, - dictWord{7, 10, 331}, - dictWord{138, 10, 550}, - dictWord{135, 10, 1062}, - dictWord{6, 11, 123}, - dictWord{7, 11, 214}, - dictWord{7, 10, 986}, - dictWord{9, 11, 728}, - dictWord{10, 11, 157}, - dictWord{11, 11, 346}, - dictWord{11, 11, 662}, - dictWord{143, 11, 106}, - dictWord{135, 10, 1573}, - dictWord{7, 0, 925}, - dictWord{137, 0, 799}, - dictWord{4, 0, 471}, - dictWord{5, 0, 51}, - dictWord{6, 0, 602}, - dictWord{8, 0, 484}, - dictWord{138, 0, 195}, - dictWord{136, 0, 688}, - dictWord{132, 0, 697}, - dictWord{6, 0, 1169}, - dictWord{6, 0, 1241}, - dictWord{6, 10, 194}, - dictWord{7, 10, 133}, - dictWord{10, 10, 493}, - dictWord{10, 10, 570}, - dictWord{139, 10, 664}, - dictWord{140, 0, 751}, - dictWord{7, 0, 929}, - dictWord{10, 0, 452}, - dictWord{11, 0, 878}, - dictWord{16, 0, 33}, - dictWord{5, 10, 24}, - dictWord{5, 10, 569}, - dictWord{6, 10, 3}, - dictWord{6, 10, 119}, - dictWord{ - 6, - 10, - 143, - }, - dictWord{6, 10, 440}, - dictWord{7, 10, 599}, - dictWord{7, 10, 1686}, - dictWord{7, 10, 1854}, - dictWord{8, 10, 424}, - dictWord{9, 10, 43}, - dictWord{ - 9, - 10, - 584, - }, - dictWord{9, 10, 760}, - dictWord{10, 10, 328}, - dictWord{11, 10, 159}, - dictWord{11, 10, 253}, - dictWord{12, 10, 487}, - dictWord{140, 10, 531}, - dictWord{ - 4, - 11, - 707, - }, - dictWord{13, 11, 106}, - dictWord{18, 11, 49}, - dictWord{147, 11, 41}, - dictWord{5, 0, 221}, - dictWord{5, 11, 588}, - dictWord{134, 11, 393}, - dictWord{134, 0, 1437}, - dictWord{6, 11, 211}, - dictWord{7, 11, 1690}, - dictWord{11, 11, 486}, - dictWord{140, 11, 369}, - dictWord{5, 10, 14}, - dictWord{5, 10, 892}, - dictWord{6, 10, 283}, - dictWord{7, 10, 234}, - dictWord{136, 10, 537}, - dictWord{4, 0, 988}, - dictWord{136, 0, 955}, - dictWord{135, 0, 1251}, - dictWord{4, 10, 126}, - dictWord{8, 10, 635}, - dictWord{147, 10, 34}, - dictWord{4, 10, 316}, - dictWord{135, 10, 1561}, - dictWord{137, 10, 861}, - dictWord{4, 10, 64}, - dictWord{ - 5, - 10, - 352, - }, - dictWord{5, 10, 720}, - dictWord{6, 10, 368}, - dictWord{139, 10, 359}, - dictWord{134, 0, 192}, - dictWord{4, 0, 132}, - dictWord{5, 0, 69}, - dictWord{ - 135, - 0, - 1242, - }, - dictWord{7, 10, 1577}, - dictWord{10, 10, 304}, - dictWord{10, 10, 549}, - dictWord{12, 10, 365}, - dictWord{13, 10, 220}, - dictWord{13, 10, 240}, - dictWord{142, 10, 33}, - dictWord{4, 0, 111}, - dictWord{7, 0, 865}, - dictWord{134, 11, 219}, - dictWord{5, 11, 582}, - dictWord{6, 11, 1646}, - dictWord{7, 11, 99}, - dictWord{ - 7, - 11, - 1962, - }, - dictWord{7, 11, 1986}, - dictWord{8, 11, 515}, - dictWord{8, 11, 773}, - dictWord{9, 11, 23}, - dictWord{9, 11, 491}, - dictWord{12, 11, 620}, - dictWord{ - 14, - 11, - 52, - }, - dictWord{145, 11, 50}, - dictWord{132, 0, 767}, - dictWord{7, 11, 568}, - dictWord{148, 11, 21}, - dictWord{6, 0, 42}, - dictWord{7, 0, 1416}, - dictWord{ - 7, - 0, - 2005, - }, - dictWord{8, 0, 131}, - dictWord{8, 0, 466}, - dictWord{9, 0, 672}, - dictWord{13, 0, 252}, - dictWord{20, 0, 103}, - dictWord{133, 11, 851}, - dictWord{ - 135, - 0, - 1050, - }, - dictWord{6, 10, 175}, - dictWord{137, 10, 289}, - dictWord{5, 10, 432}, - dictWord{133, 10, 913}, - dictWord{6, 0, 44}, - dictWord{136, 0, 368}, - dictWord{ - 135, - 11, - 784, - }, - dictWord{132, 0, 570}, - dictWord{133, 0, 120}, - dictWord{139, 10, 595}, - dictWord{140, 0, 29}, - dictWord{6, 0, 227}, - dictWord{135, 0, 1589}, - dictWord{4, 11, 98}, - dictWord{7, 11, 1365}, - dictWord{9, 11, 422}, - dictWord{9, 11, 670}, - dictWord{10, 11, 775}, - dictWord{11, 11, 210}, - dictWord{13, 11, 26}, - dictWord{13, 11, 457}, - dictWord{141, 11, 476}, - dictWord{140, 10, 80}, - dictWord{5, 10, 931}, - dictWord{134, 10, 1698}, - dictWord{133, 0, 522}, - dictWord{ - 134, - 0, - 1120, - }, - dictWord{135, 0, 1529}, - dictWord{12, 0, 739}, - dictWord{14, 0, 448}, - dictWord{142, 0, 467}, - dictWord{11, 10, 526}, - dictWord{11, 10, 939}, - dictWord{141, 10, 290}, - dictWord{5, 10, 774}, - dictWord{6, 10, 1637}, - dictWord{6, 10, 1686}, - dictWord{134, 10, 1751}, - dictWord{6, 0, 1667}, - dictWord{ - 135, - 0, - 2036, - }, - dictWord{7, 10, 1167}, - dictWord{11, 10, 934}, - dictWord{13, 10, 391}, - dictWord{145, 10, 76}, - dictWord{137, 11, 147}, - dictWord{6, 10, 260}, - dictWord{ - 7, - 10, - 1484, - }, - dictWord{11, 11, 821}, - dictWord{12, 11, 110}, - dictWord{12, 11, 153}, - dictWord{18, 11, 41}, - dictWord{150, 11, 19}, - dictWord{6, 0, 511}, - dictWord{12, 0, 132}, - dictWord{134, 10, 573}, - dictWord{5, 0, 568}, - dictWord{6, 0, 138}, - dictWord{135, 0, 1293}, - dictWord{132, 0, 1020}, - dictWord{8, 0, 258}, - dictWord{9, 0, 208}, - dictWord{137, 0, 359}, - dictWord{4, 0, 565}, - dictWord{8, 0, 23}, - dictWord{136, 0, 827}, - dictWord{134, 0, 344}, - dictWord{4, 0, 922}, - dictWord{ - 5, - 0, - 1023, - }, - dictWord{13, 11, 477}, - dictWord{14, 11, 120}, - dictWord{148, 11, 61}, - dictWord{134, 0, 240}, - dictWord{5, 11, 209}, - dictWord{6, 11, 30}, - dictWord{ - 11, - 11, - 56, - }, - dictWord{139, 11, 305}, - dictWord{6, 0, 171}, - dictWord{7, 0, 1002}, - dictWord{7, 0, 1324}, - dictWord{9, 0, 415}, - dictWord{14, 0, 230}, - dictWord{ - 18, - 0, - 68, - }, - dictWord{4, 10, 292}, - dictWord{4, 10, 736}, - dictWord{5, 10, 871}, - dictWord{6, 10, 1689}, - dictWord{7, 10, 1944}, - dictWord{137, 10, 580}, - dictWord{ - 9, - 11, - 635, - }, - dictWord{139, 11, 559}, - dictWord{4, 11, 150}, - dictWord{5, 11, 303}, - dictWord{134, 11, 327}, - dictWord{6, 10, 63}, - dictWord{135, 10, 920}, - dictWord{ - 133, - 10, - 793, - }, - dictWord{8, 11, 192}, - dictWord{10, 11, 78}, - dictWord{10, 11, 555}, - dictWord{11, 11, 308}, - dictWord{13, 11, 359}, - dictWord{147, 11, 95}, - dictWord{135, 11, 786}, - dictWord{135, 11, 1712}, - dictWord{136, 0, 402}, - dictWord{6, 0, 754}, - dictWord{6, 11, 1638}, - dictWord{7, 11, 79}, - dictWord{7, 11, 496}, - dictWord{9, 11, 138}, - dictWord{10, 11, 336}, - dictWord{11, 11, 12}, - dictWord{12, 11, 412}, - dictWord{12, 11, 440}, - dictWord{142, 11, 305}, - dictWord{4, 0, 716}, - dictWord{141, 0, 31}, - dictWord{133, 0, 982}, - dictWord{8, 0, 691}, - dictWord{8, 0, 731}, - dictWord{5, 10, 67}, - dictWord{6, 10, 62}, - dictWord{6, 10, 374}, - dictWord{ - 135, - 10, - 1391, - }, - dictWord{9, 10, 790}, - dictWord{140, 10, 47}, - dictWord{139, 11, 556}, - dictWord{151, 11, 1}, - dictWord{7, 11, 204}, - dictWord{7, 11, 415}, - dictWord{8, 11, 42}, - dictWord{10, 11, 85}, - dictWord{11, 11, 33}, - dictWord{11, 11, 564}, - dictWord{12, 11, 571}, - dictWord{149, 11, 1}, - dictWord{8, 0, 888}, - dictWord{ - 7, - 11, - 610, - }, - dictWord{135, 11, 1501}, - dictWord{4, 10, 391}, - dictWord{135, 10, 1169}, - dictWord{5, 0, 847}, - dictWord{9, 0, 840}, - dictWord{138, 0, 803}, - dictWord{137, 0, 823}, - dictWord{134, 0, 785}, - dictWord{8, 0, 152}, - dictWord{9, 0, 53}, - dictWord{9, 0, 268}, - dictWord{9, 0, 901}, - dictWord{10, 0, 518}, - dictWord{ - 10, - 0, - 829, - }, - dictWord{11, 0, 188}, - dictWord{13, 0, 74}, - dictWord{14, 0, 46}, - dictWord{15, 0, 17}, - dictWord{15, 0, 33}, - dictWord{17, 0, 40}, - dictWord{18, 0, 36}, - dictWord{ - 19, - 0, - 20, - }, - dictWord{22, 0, 1}, - dictWord{152, 0, 2}, - dictWord{4, 11, 3}, - dictWord{5, 11, 247}, - dictWord{5, 11, 644}, - dictWord{7, 11, 744}, - dictWord{7, 11, 1207}, - dictWord{7, 11, 1225}, - dictWord{7, 11, 1909}, - dictWord{146, 11, 147}, - dictWord{136, 0, 532}, - dictWord{135, 0, 681}, - dictWord{132, 10, 271}, - dictWord{ - 140, - 0, - 314, - }, - dictWord{140, 0, 677}, - dictWord{4, 0, 684}, - dictWord{136, 0, 384}, - dictWord{5, 11, 285}, - dictWord{9, 11, 67}, - dictWord{13, 11, 473}, - dictWord{ - 143, - 11, - 82, - }, - dictWord{4, 10, 253}, - dictWord{5, 10, 544}, - dictWord{7, 10, 300}, - dictWord{137, 10, 340}, - dictWord{7, 0, 110}, - dictWord{7, 0, 447}, - dictWord{8, 0, 290}, - dictWord{8, 0, 591}, - dictWord{9, 0, 382}, - dictWord{9, 0, 649}, - dictWord{11, 0, 71}, - dictWord{11, 0, 155}, - dictWord{11, 0, 313}, - dictWord{12, 0, 5}, - dictWord{13, 0, 325}, - dictWord{142, 0, 287}, - dictWord{134, 0, 1818}, - dictWord{136, 0, 1007}, - dictWord{138, 0, 321}, - dictWord{7, 0, 360}, - dictWord{7, 0, 425}, - dictWord{9, 0, 66}, - dictWord{9, 0, 278}, - dictWord{138, 0, 644}, - dictWord{133, 10, 818}, - dictWord{5, 0, 385}, - dictWord{5, 10, 541}, - dictWord{6, 10, 94}, - dictWord{6, 10, 499}, - dictWord{ - 7, - 10, - 230, - }, - dictWord{139, 10, 321}, - dictWord{4, 10, 920}, - dictWord{5, 10, 25}, - dictWord{5, 10, 790}, - dictWord{6, 10, 457}, - dictWord{7, 10, 853}, - dictWord{ - 136, - 10, - 788, - }, - dictWord{4, 0, 900}, - dictWord{133, 0, 861}, - dictWord{5, 0, 254}, - dictWord{7, 0, 985}, - dictWord{136, 0, 73}, - dictWord{7, 0, 1959}, - dictWord{ - 136, - 0, - 683, - }, - dictWord{134, 10, 1765}, - dictWord{133, 10, 822}, - dictWord{132, 10, 634}, - dictWord{4, 11, 29}, - dictWord{6, 11, 532}, - dictWord{7, 11, 1628}, - dictWord{ - 7, - 11, - 1648, - }, - dictWord{9, 11, 303}, - dictWord{9, 11, 350}, - dictWord{10, 11, 433}, - dictWord{11, 11, 97}, - dictWord{11, 11, 557}, - dictWord{11, 11, 745}, - dictWord{12, 11, 289}, - dictWord{12, 11, 335}, - dictWord{12, 11, 348}, - dictWord{12, 11, 606}, - dictWord{13, 11, 116}, - dictWord{13, 11, 233}, - dictWord{ - 13, - 11, - 466, - }, - dictWord{14, 11, 181}, - dictWord{14, 11, 209}, - dictWord{14, 11, 232}, - dictWord{14, 11, 236}, - dictWord{14, 11, 300}, - dictWord{16, 11, 41}, - dictWord{ - 148, - 11, - 97, - }, - dictWord{19, 0, 86}, - dictWord{6, 10, 36}, - dictWord{7, 10, 658}, - dictWord{136, 10, 454}, - dictWord{135, 11, 1692}, - dictWord{132, 0, 725}, - dictWord{ - 5, - 11, - 501, - }, - dictWord{7, 11, 1704}, - dictWord{9, 11, 553}, - dictWord{11, 11, 520}, - dictWord{12, 11, 557}, - dictWord{141, 11, 249}, - dictWord{134, 0, 196}, - dictWord{133, 0, 831}, - dictWord{136, 0, 723}, - dictWord{7, 0, 1897}, - dictWord{13, 0, 80}, - dictWord{13, 0, 437}, - dictWord{145, 0, 74}, - dictWord{4, 0, 992}, - dictWord{ - 6, - 0, - 627, - }, - dictWord{136, 0, 994}, - dictWord{135, 11, 1294}, - dictWord{132, 10, 104}, - dictWord{5, 0, 848}, - dictWord{6, 0, 66}, - dictWord{136, 0, 764}, - dictWord{ - 4, - 0, - 36, - }, - dictWord{7, 0, 1387}, - dictWord{10, 0, 205}, - dictWord{139, 0, 755}, - dictWord{6, 0, 1046}, - dictWord{134, 0, 1485}, - dictWord{134, 0, 950}, - dictWord{132, 0, 887}, - dictWord{14, 0, 450}, - dictWord{148, 0, 111}, - dictWord{7, 0, 620}, - dictWord{7, 0, 831}, - dictWord{9, 10, 542}, - dictWord{9, 10, 566}, - dictWord{ - 138, - 10, - 728, - }, - dictWord{6, 0, 165}, - dictWord{138, 0, 388}, - dictWord{139, 10, 263}, - dictWord{4, 0, 719}, - dictWord{135, 0, 155}, - dictWord{138, 10, 468}, - dictWord{6, 11, 453}, - dictWord{144, 11, 36}, - dictWord{134, 11, 129}, - dictWord{5, 0, 533}, - dictWord{7, 0, 755}, - dictWord{138, 0, 780}, - dictWord{134, 0, 1465}, - dictWord{4, 0, 353}, - dictWord{6, 0, 146}, - dictWord{6, 0, 1789}, - dictWord{7, 0, 427}, - dictWord{7, 0, 990}, - dictWord{7, 0, 1348}, - dictWord{9, 0, 665}, - dictWord{9, 0, 898}, - dictWord{11, 0, 893}, - dictWord{142, 0, 212}, - dictWord{7, 10, 87}, - dictWord{142, 10, 288}, - dictWord{4, 0, 45}, - dictWord{135, 0, 1257}, - dictWord{12, 0, 7}, - dictWord{7, 10, 988}, - dictWord{7, 10, 1939}, - dictWord{9, 10, 64}, - dictWord{9, 10, 502}, - dictWord{12, 10, 34}, - dictWord{13, 10, 12}, - dictWord{13, 10, 234}, - dictWord{147, 10, 77}, - dictWord{4, 0, 607}, - dictWord{5, 11, 60}, - dictWord{6, 11, 504}, - dictWord{7, 11, 614}, - dictWord{7, 11, 1155}, - dictWord{140, 11, 0}, - dictWord{ - 135, - 10, - 141, - }, - dictWord{8, 11, 198}, - dictWord{11, 11, 29}, - dictWord{140, 11, 534}, - dictWord{140, 0, 65}, - dictWord{136, 0, 816}, - dictWord{132, 10, 619}, - dictWord{139, 0, 88}, - dictWord{5, 10, 246}, - dictWord{8, 10, 189}, - dictWord{9, 10, 355}, - dictWord{9, 10, 512}, - dictWord{10, 10, 124}, - dictWord{10, 10, 453}, - dictWord{11, 10, 143}, - dictWord{11, 10, 416}, - dictWord{11, 10, 859}, - dictWord{141, 10, 341}, - dictWord{4, 11, 379}, - dictWord{135, 11, 1397}, - dictWord{ - 4, - 0, - 600, - }, - dictWord{137, 0, 621}, - dictWord{133, 0, 367}, - dictWord{134, 0, 561}, - dictWord{6, 0, 559}, - dictWord{134, 0, 1691}, - dictWord{6, 0, 585}, - dictWord{ - 134, - 11, - 585, - }, - dictWord{135, 11, 1228}, - dictWord{4, 11, 118}, - dictWord{5, 10, 678}, - dictWord{6, 11, 274}, - dictWord{6, 11, 361}, - dictWord{7, 11, 75}, - dictWord{ - 141, - 11, - 441, - }, - dictWord{135, 11, 1818}, - dictWord{137, 11, 841}, - dictWord{5, 0, 573}, - dictWord{6, 0, 287}, - dictWord{7, 10, 862}, - dictWord{7, 10, 1886}, - dictWord{138, 10, 179}, - dictWord{132, 10, 517}, - dictWord{140, 11, 693}, - dictWord{5, 11, 314}, - dictWord{6, 11, 221}, - dictWord{7, 11, 419}, - dictWord{ - 10, - 11, - 650, - }, - dictWord{11, 11, 396}, - dictWord{12, 11, 156}, - dictWord{13, 11, 369}, - dictWord{14, 11, 333}, - dictWord{145, 11, 47}, - dictWord{140, 10, 540}, - dictWord{136, 10, 667}, - dictWord{11, 10, 403}, - dictWord{146, 10, 83}, - dictWord{6, 0, 672}, - dictWord{133, 10, 761}, - dictWord{9, 0, 157}, - dictWord{10, 10, 131}, - dictWord{140, 10, 72}, - dictWord{7, 0, 714}, - dictWord{134, 11, 460}, - dictWord{134, 0, 456}, - dictWord{133, 0, 925}, - dictWord{5, 11, 682}, - dictWord{ - 135, - 11, - 1887, - }, - dictWord{136, 11, 510}, - dictWord{136, 11, 475}, - dictWord{133, 11, 1016}, - dictWord{9, 0, 19}, - dictWord{7, 11, 602}, - dictWord{8, 11, 179}, - dictWord{ - 10, - 11, - 781, - }, - dictWord{140, 11, 126}, - dictWord{6, 11, 329}, - dictWord{138, 11, 111}, - dictWord{6, 0, 822}, - dictWord{134, 0, 1473}, - dictWord{144, 11, 86}, - dictWord{11, 0, 113}, - dictWord{139, 11, 113}, - dictWord{5, 11, 821}, - dictWord{134, 11, 1687}, - dictWord{133, 10, 449}, - dictWord{7, 0, 463}, - dictWord{ - 17, - 0, - 69, - }, - dictWord{136, 10, 103}, - dictWord{7, 10, 2028}, - dictWord{138, 10, 641}, - dictWord{6, 0, 193}, - dictWord{7, 0, 240}, - dictWord{7, 0, 1682}, - dictWord{ - 10, - 0, - 51, - }, - dictWord{10, 0, 640}, - dictWord{11, 0, 410}, - dictWord{13, 0, 82}, - dictWord{14, 0, 247}, - dictWord{14, 0, 331}, - dictWord{142, 0, 377}, - dictWord{6, 0, 471}, - dictWord{11, 0, 411}, - dictWord{142, 0, 2}, - dictWord{5, 11, 71}, - dictWord{7, 11, 1407}, - dictWord{9, 11, 388}, - dictWord{9, 11, 704}, - dictWord{10, 11, 261}, - dictWord{ - 10, - 11, - 619, - }, - dictWord{11, 11, 547}, - dictWord{11, 11, 619}, - dictWord{143, 11, 157}, - dictWord{136, 0, 633}, - dictWord{135, 0, 1148}, - dictWord{6, 0, 554}, - dictWord{7, 0, 1392}, - dictWord{12, 0, 129}, - dictWord{7, 10, 1274}, - dictWord{7, 10, 1386}, - dictWord{7, 11, 2008}, - dictWord{9, 11, 337}, - dictWord{10, 11, 517}, - dictWord{146, 10, 87}, - dictWord{7, 0, 803}, - dictWord{8, 0, 542}, - dictWord{6, 10, 187}, - dictWord{7, 10, 1203}, - dictWord{8, 10, 380}, - dictWord{14, 10, 117}, - dictWord{149, 10, 28}, - dictWord{6, 10, 297}, - dictWord{7, 10, 793}, - dictWord{139, 10, 938}, - dictWord{8, 0, 438}, - dictWord{11, 0, 363}, - dictWord{7, 10, 464}, - dictWord{11, 10, 105}, - dictWord{12, 10, 231}, - dictWord{14, 10, 386}, - dictWord{15, 10, 102}, - dictWord{148, 10, 75}, - dictWord{5, 11, 16}, - dictWord{6, 11, 86}, - dictWord{6, 11, 603}, - dictWord{7, 11, 292}, - dictWord{7, 11, 561}, - dictWord{8, 11, 257}, - dictWord{8, 11, 382}, - dictWord{9, 11, 721}, - dictWord{9, 11, 778}, - dictWord{ - 11, - 11, - 581, - }, - dictWord{140, 11, 466}, - dictWord{6, 0, 717}, - dictWord{4, 11, 486}, - dictWord{133, 11, 491}, - dictWord{132, 0, 875}, - dictWord{132, 11, 72}, - dictWord{6, 11, 265}, - dictWord{135, 11, 847}, - dictWord{4, 0, 237}, - dictWord{135, 0, 514}, - dictWord{6, 0, 392}, - dictWord{7, 0, 65}, - dictWord{135, 0, 2019}, - dictWord{140, 11, 261}, - dictWord{135, 11, 922}, - dictWord{137, 11, 404}, - dictWord{12, 0, 563}, - dictWord{14, 0, 101}, - dictWord{18, 0, 129}, - dictWord{ - 7, - 10, - 1010, - }, - dictWord{11, 10, 733}, - dictWord{11, 10, 759}, - dictWord{13, 10, 34}, - dictWord{146, 10, 45}, - dictWord{7, 10, 1656}, - dictWord{9, 10, 369}, - dictWord{ - 10, - 10, - 338, - }, - dictWord{10, 10, 490}, - dictWord{11, 10, 154}, - dictWord{11, 10, 545}, - dictWord{11, 10, 775}, - dictWord{13, 10, 77}, - dictWord{141, 10, 274}, - dictWord{4, 0, 444}, - dictWord{10, 0, 146}, - dictWord{140, 0, 9}, - dictWord{139, 11, 163}, - dictWord{7, 0, 1260}, - dictWord{135, 0, 1790}, - dictWord{9, 0, 222}, - dictWord{10, 0, 43}, - dictWord{139, 0, 900}, - dictWord{137, 11, 234}, - dictWord{138, 0, 971}, - dictWord{137, 0, 761}, - dictWord{134, 0, 699}, - dictWord{ - 136, - 11, - 434, - }, - dictWord{6, 0, 1116}, - dictWord{7, 0, 1366}, - dictWord{5, 10, 20}, - dictWord{6, 11, 197}, - dictWord{6, 10, 298}, - dictWord{7, 10, 659}, - dictWord{8, 11, 205}, - dictWord{137, 10, 219}, - dictWord{132, 11, 490}, - dictWord{11, 11, 820}, - dictWord{150, 11, 51}, - dictWord{7, 10, 1440}, - dictWord{11, 10, 854}, - dictWord{ - 11, - 10, - 872, - }, - dictWord{11, 10, 921}, - dictWord{12, 10, 551}, - dictWord{13, 10, 472}, - dictWord{142, 10, 367}, - dictWord{140, 11, 13}, - dictWord{132, 0, 829}, - dictWord{12, 0, 242}, - dictWord{132, 10, 439}, - dictWord{136, 10, 669}, - dictWord{6, 0, 593}, - dictWord{6, 11, 452}, - dictWord{7, 11, 312}, - dictWord{ - 138, - 11, - 219, - }, - dictWord{4, 11, 333}, - dictWord{9, 11, 176}, - dictWord{12, 11, 353}, - dictWord{141, 11, 187}, - dictWord{7, 0, 36}, - dictWord{8, 0, 201}, - dictWord{ - 136, - 0, - 605, - }, - dictWord{140, 0, 224}, - dictWord{132, 10, 233}, - dictWord{134, 0, 1430}, - dictWord{134, 0, 1806}, - dictWord{4, 0, 523}, - dictWord{133, 0, 638}, - dictWord{ - 6, - 0, - 1889, - }, - dictWord{9, 0, 958}, - dictWord{9, 0, 971}, - dictWord{9, 0, 976}, - dictWord{12, 0, 796}, - dictWord{12, 0, 799}, - dictWord{12, 0, 808}, - dictWord{ - 12, - 0, - 835, - }, - dictWord{12, 0, 836}, - dictWord{12, 0, 914}, - dictWord{12, 0, 946}, - dictWord{15, 0, 216}, - dictWord{15, 0, 232}, - dictWord{18, 0, 183}, - dictWord{18, 0, 187}, - dictWord{18, 0, 194}, - dictWord{18, 0, 212}, - dictWord{18, 0, 232}, - dictWord{149, 0, 49}, - dictWord{132, 10, 482}, - dictWord{6, 0, 827}, - dictWord{134, 0, 1434}, - dictWord{135, 10, 346}, - dictWord{134, 0, 2043}, - dictWord{6, 0, 242}, - dictWord{7, 0, 227}, - dictWord{7, 0, 1581}, - dictWord{8, 0, 104}, - dictWord{9, 0, 113}, - dictWord{9, 0, 220}, - dictWord{9, 0, 427}, - dictWord{10, 0, 136}, - dictWord{10, 0, 239}, - dictWord{11, 0, 579}, - dictWord{11, 0, 1023}, - dictWord{13, 0, 4}, - dictWord{ - 13, - 0, - 204, - }, - dictWord{13, 0, 316}, - dictWord{148, 0, 86}, - dictWord{134, 11, 1685}, - dictWord{7, 0, 148}, - dictWord{8, 0, 284}, - dictWord{141, 0, 63}, - dictWord{ - 142, - 0, - 10, - }, - dictWord{135, 11, 584}, - dictWord{134, 0, 1249}, - dictWord{7, 0, 861}, - dictWord{135, 10, 334}, - dictWord{5, 10, 795}, - dictWord{6, 10, 1741}, - dictWord{ - 137, - 11, - 70, - }, - dictWord{132, 0, 807}, - dictWord{7, 11, 135}, - dictWord{8, 11, 7}, - dictWord{8, 11, 62}, - dictWord{9, 11, 243}, - dictWord{10, 11, 658}, - dictWord{ - 10, - 11, - 697, - }, - dictWord{11, 11, 456}, - dictWord{139, 11, 756}, - dictWord{9, 11, 395}, - dictWord{138, 11, 79}, - dictWord{137, 11, 108}, - dictWord{147, 0, 94}, - dictWord{136, 0, 494}, - dictWord{135, 11, 631}, - dictWord{135, 10, 622}, - dictWord{7, 0, 1510}, - dictWord{135, 10, 1750}, - dictWord{4, 10, 203}, - dictWord{ - 135, - 10, - 1936, - }, - dictWord{7, 11, 406}, - dictWord{7, 11, 459}, - dictWord{8, 11, 606}, - dictWord{139, 11, 726}, - dictWord{7, 0, 1306}, - dictWord{8, 0, 505}, - dictWord{ - 9, - 0, - 482, - }, - dictWord{10, 0, 126}, - dictWord{11, 0, 225}, - dictWord{12, 0, 347}, - dictWord{12, 0, 449}, - dictWord{13, 0, 19}, - dictWord{14, 0, 218}, - dictWord{142, 0, 435}, - dictWord{5, 0, 268}, - dictWord{10, 0, 764}, - dictWord{12, 0, 120}, - dictWord{13, 0, 39}, - dictWord{145, 0, 127}, - dictWord{142, 11, 68}, - dictWord{11, 10, 678}, - dictWord{140, 10, 307}, - dictWord{12, 11, 268}, - dictWord{12, 11, 640}, - dictWord{142, 11, 119}, - dictWord{135, 10, 2044}, - dictWord{133, 11, 612}, - dictWord{ - 4, - 11, - 372, - }, - dictWord{7, 11, 482}, - dictWord{8, 11, 158}, - dictWord{9, 11, 602}, - dictWord{9, 11, 615}, - dictWord{10, 11, 245}, - dictWord{10, 11, 678}, - dictWord{ - 10, - 11, - 744, - }, - dictWord{11, 11, 248}, - dictWord{139, 11, 806}, - dictWord{7, 10, 311}, - dictWord{9, 10, 308}, - dictWord{140, 10, 255}, - dictWord{4, 0, 384}, - dictWord{135, 0, 1022}, - dictWord{5, 11, 854}, - dictWord{135, 11, 1991}, - dictWord{135, 10, 1266}, - dictWord{4, 10, 400}, - dictWord{5, 10, 267}, - dictWord{ - 135, - 10, - 232, - }, - dictWord{135, 0, 1703}, - dictWord{9, 0, 159}, - dictWord{11, 0, 661}, - dictWord{140, 0, 603}, - dictWord{4, 0, 964}, - dictWord{14, 0, 438}, - dictWord{ - 14, - 0, - 444, - }, - dictWord{14, 0, 456}, - dictWord{22, 0, 60}, - dictWord{22, 0, 63}, - dictWord{9, 11, 106}, - dictWord{9, 11, 163}, - dictWord{9, 11, 296}, - dictWord{10, 11, 167}, - dictWord{10, 11, 172}, - dictWord{10, 11, 777}, - dictWord{139, 11, 16}, - dictWord{136, 0, 583}, - dictWord{132, 0, 515}, - dictWord{8, 0, 632}, - dictWord{8, 0, 697}, - dictWord{137, 0, 854}, - dictWord{5, 11, 195}, - dictWord{135, 11, 1685}, - dictWord{6, 0, 1123}, - dictWord{134, 0, 1365}, - dictWord{134, 11, 328}, - dictWord{ - 7, - 11, - 1997, - }, - dictWord{8, 11, 730}, - dictWord{139, 11, 1006}, - dictWord{4, 0, 136}, - dictWord{133, 0, 551}, - dictWord{134, 0, 1782}, - dictWord{7, 0, 1287}, - dictWord{ - 9, - 0, - 44, - }, - dictWord{10, 0, 552}, - dictWord{10, 0, 642}, - dictWord{11, 0, 839}, - dictWord{12, 0, 274}, - dictWord{12, 0, 275}, - dictWord{12, 0, 372}, - dictWord{ - 13, - 0, - 91, - }, - dictWord{142, 0, 125}, - dictWord{5, 11, 751}, - dictWord{11, 11, 797}, - dictWord{140, 11, 203}, - dictWord{133, 0, 732}, - dictWord{7, 0, 679}, - dictWord{ - 8, - 0, - 313, - }, - dictWord{4, 10, 100}, - dictWord{135, 11, 821}, - dictWord{10, 0, 361}, - dictWord{142, 0, 316}, - dictWord{134, 0, 595}, - dictWord{6, 0, 147}, - dictWord{ - 7, - 0, - 886, - }, - dictWord{9, 0, 753}, - dictWord{138, 0, 268}, - dictWord{5, 10, 362}, - dictWord{5, 10, 443}, - dictWord{6, 10, 318}, - dictWord{7, 10, 1019}, - dictWord{ - 139, - 10, - 623, - }, - dictWord{5, 10, 463}, - dictWord{136, 10, 296}, - dictWord{4, 10, 454}, - dictWord{5, 11, 950}, - dictWord{5, 11, 994}, - dictWord{134, 11, 351}, - dictWord{ - 138, - 0, - 137, - }, - dictWord{5, 10, 48}, - dictWord{5, 10, 404}, - dictWord{6, 10, 557}, - dictWord{7, 10, 458}, - dictWord{8, 10, 597}, - dictWord{10, 10, 455}, - dictWord{ - 10, - 10, - 606, - }, - dictWord{11, 10, 49}, - dictWord{11, 10, 548}, - dictWord{12, 10, 476}, - dictWord{13, 10, 18}, - dictWord{141, 10, 450}, - dictWord{133, 0, 414}, - dictWord{ - 135, - 0, - 1762, - }, - dictWord{5, 11, 421}, - dictWord{135, 11, 47}, - dictWord{5, 10, 442}, - dictWord{135, 10, 1984}, - dictWord{134, 0, 599}, - dictWord{134, 0, 1749}, - dictWord{134, 0, 1627}, - dictWord{4, 0, 488}, - dictWord{132, 11, 350}, - dictWord{137, 11, 751}, - dictWord{132, 0, 83}, - dictWord{140, 0, 676}, - dictWord{ - 133, - 11, - 967, - }, - dictWord{7, 0, 1639}, - dictWord{5, 10, 55}, - dictWord{140, 10, 161}, - dictWord{4, 11, 473}, - dictWord{7, 11, 623}, - dictWord{8, 11, 808}, - dictWord{ - 9, - 11, - 871, - }, - dictWord{9, 11, 893}, - dictWord{11, 11, 38}, - dictWord{11, 11, 431}, - dictWord{12, 11, 112}, - dictWord{12, 11, 217}, - dictWord{12, 11, 243}, - dictWord{ - 12, - 11, - 562, - }, - dictWord{12, 11, 683}, - dictWord{13, 11, 141}, - dictWord{13, 11, 197}, - dictWord{13, 11, 227}, - dictWord{13, 11, 406}, - dictWord{13, 11, 487}, - dictWord{14, 11, 156}, - dictWord{14, 11, 203}, - dictWord{14, 11, 224}, - dictWord{14, 11, 256}, - dictWord{18, 11, 58}, - dictWord{150, 11, 0}, - dictWord{ - 133, - 10, - 450, - }, - dictWord{7, 11, 736}, - dictWord{139, 11, 264}, - dictWord{134, 0, 278}, - dictWord{4, 11, 222}, - dictWord{7, 11, 286}, - dictWord{136, 11, 629}, - dictWord{ - 135, - 10, - 869, - }, - dictWord{140, 0, 97}, - dictWord{144, 0, 14}, - dictWord{134, 0, 1085}, - dictWord{4, 10, 213}, - dictWord{7, 10, 223}, - dictWord{136, 10, 80}, - dictWord{ - 7, - 0, - 388, - }, - dictWord{7, 0, 644}, - dictWord{139, 0, 781}, - dictWord{132, 0, 849}, - dictWord{7, 0, 229}, - dictWord{8, 0, 59}, - dictWord{9, 0, 190}, - dictWord{10, 0, 378}, - dictWord{140, 0, 191}, - dictWord{7, 10, 381}, - dictWord{7, 10, 806}, - dictWord{7, 10, 820}, - dictWord{8, 10, 354}, - dictWord{8, 10, 437}, - dictWord{8, 10, 787}, - dictWord{9, 10, 657}, - dictWord{10, 10, 58}, - dictWord{10, 10, 339}, - dictWord{10, 10, 749}, - dictWord{11, 10, 914}, - dictWord{12, 10, 162}, - dictWord{13, 10, 75}, - dictWord{14, 10, 106}, - dictWord{14, 10, 198}, - dictWord{14, 10, 320}, - dictWord{14, 10, 413}, - dictWord{146, 10, 43}, - dictWord{141, 11, 306}, - dictWord{ - 136, - 10, - 747, - }, - dictWord{134, 0, 1115}, - dictWord{16, 0, 94}, - dictWord{16, 0, 108}, - dictWord{136, 11, 146}, - dictWord{6, 0, 700}, - dictWord{6, 0, 817}, - dictWord{ - 134, - 0, - 1002, - }, - dictWord{133, 10, 692}, - dictWord{4, 11, 465}, - dictWord{135, 11, 1663}, - dictWord{134, 10, 191}, - dictWord{6, 0, 1414}, - dictWord{ - 135, - 11, - 913, - }, - dictWord{132, 0, 660}, - dictWord{7, 0, 1035}, - dictWord{138, 0, 737}, - dictWord{6, 10, 162}, - dictWord{7, 10, 1960}, - dictWord{136, 10, 831}, - dictWord{ - 132, - 10, - 706, - }, - dictWord{7, 0, 690}, - dictWord{9, 0, 217}, - dictWord{9, 0, 587}, - dictWord{140, 0, 521}, - dictWord{138, 10, 426}, - dictWord{135, 10, 1235}, - dictWord{ - 6, - 11, - 82, - }, - dictWord{7, 11, 138}, - dictWord{7, 11, 517}, - dictWord{9, 11, 673}, - dictWord{139, 11, 238}, - dictWord{138, 0, 272}, - dictWord{5, 11, 495}, - dictWord{ - 7, - 11, - 834, - }, - dictWord{9, 11, 733}, - dictWord{139, 11, 378}, - dictWord{134, 0, 1744}, - dictWord{132, 0, 1011}, - dictWord{7, 11, 828}, - dictWord{142, 11, 116}, - dictWord{4, 0, 733}, - dictWord{9, 0, 194}, - dictWord{10, 0, 92}, - dictWord{11, 0, 198}, - dictWord{12, 0, 84}, - dictWord{13, 0, 128}, - dictWord{133, 11, 559}, - dictWord{ - 10, - 0, - 57, - }, - dictWord{10, 0, 277}, - dictWord{6, 11, 21}, - dictWord{6, 11, 1737}, - dictWord{7, 11, 1444}, - dictWord{136, 11, 224}, - dictWord{4, 10, 204}, - dictWord{ - 137, - 10, - 902, - }, - dictWord{136, 10, 833}, - dictWord{11, 0, 348}, - dictWord{12, 0, 99}, - dictWord{18, 0, 1}, - dictWord{18, 0, 11}, - dictWord{19, 0, 4}, - dictWord{7, 10, 366}, - dictWord{9, 10, 287}, - dictWord{12, 10, 199}, - dictWord{12, 10, 556}, - dictWord{140, 10, 577}, - dictWord{6, 0, 1981}, - dictWord{136, 0, 936}, - dictWord{ - 21, - 0, - 33, - }, - dictWord{150, 0, 40}, - dictWord{5, 11, 519}, - dictWord{138, 11, 204}, - dictWord{5, 10, 356}, - dictWord{135, 10, 224}, - dictWord{134, 0, 775}, - dictWord{ - 135, - 0, - 306, - }, - dictWord{7, 10, 630}, - dictWord{9, 10, 567}, - dictWord{11, 10, 150}, - dictWord{11, 10, 444}, - dictWord{141, 10, 119}, - dictWord{5, 0, 979}, - dictWord{ - 134, - 10, - 539, - }, - dictWord{133, 0, 611}, - dictWord{4, 11, 402}, - dictWord{135, 11, 1679}, - dictWord{5, 0, 178}, - dictWord{7, 11, 2}, - dictWord{8, 11, 323}, - dictWord{ - 136, - 11, - 479, - }, - dictWord{5, 11, 59}, - dictWord{135, 11, 672}, - dictWord{4, 0, 1010}, - dictWord{6, 0, 1969}, - dictWord{138, 11, 237}, - dictWord{133, 11, 412}, - dictWord{146, 11, 34}, - dictWord{7, 11, 1740}, - dictWord{146, 11, 48}, - dictWord{134, 0, 664}, - dictWord{139, 10, 814}, - dictWord{4, 11, 85}, - dictWord{ - 135, - 11, - 549, - }, - dictWord{133, 11, 94}, - dictWord{133, 11, 457}, - dictWord{132, 0, 390}, - dictWord{134, 0, 1510}, - dictWord{4, 10, 235}, - dictWord{135, 10, 255}, - dictWord{4, 10, 194}, - dictWord{5, 10, 584}, - dictWord{6, 11, 11}, - dictWord{6, 10, 384}, - dictWord{7, 11, 187}, - dictWord{7, 10, 583}, - dictWord{10, 10, 761}, - dictWord{ - 11, - 10, - 760, - }, - dictWord{139, 10, 851}, - dictWord{4, 11, 522}, - dictWord{139, 11, 802}, - dictWord{135, 0, 493}, - dictWord{10, 11, 776}, - dictWord{13, 11, 345}, - dictWord{142, 11, 425}, - dictWord{146, 0, 37}, - dictWord{4, 11, 52}, - dictWord{135, 11, 661}, - dictWord{134, 0, 724}, - dictWord{134, 0, 829}, - dictWord{ - 133, - 11, - 520, - }, - dictWord{133, 10, 562}, - dictWord{4, 11, 281}, - dictWord{5, 11, 38}, - dictWord{7, 11, 194}, - dictWord{7, 11, 668}, - dictWord{7, 11, 1893}, - dictWord{ - 137, - 11, - 397, - }, - dictWord{5, 10, 191}, - dictWord{137, 10, 271}, - dictWord{7, 0, 1537}, - dictWord{14, 0, 96}, - dictWord{143, 0, 73}, - dictWord{5, 0, 473}, - dictWord{ - 11, - 0, - 168, - }, - dictWord{4, 10, 470}, - dictWord{6, 10, 153}, - dictWord{7, 10, 1503}, - dictWord{7, 10, 1923}, - dictWord{10, 10, 701}, - dictWord{11, 10, 132}, - dictWord{ - 11, - 10, - 227, - }, - dictWord{11, 10, 320}, - dictWord{11, 10, 436}, - dictWord{11, 10, 525}, - dictWord{11, 10, 855}, - dictWord{12, 10, 41}, - dictWord{12, 10, 286}, - dictWord{13, 10, 103}, - dictWord{13, 10, 284}, - dictWord{14, 10, 255}, - dictWord{14, 10, 262}, - dictWord{15, 10, 117}, - dictWord{143, 10, 127}, - dictWord{ - 133, - 0, - 105, - }, - dictWord{5, 0, 438}, - dictWord{9, 0, 694}, - dictWord{12, 0, 627}, - dictWord{141, 0, 210}, - dictWord{133, 10, 327}, - dictWord{6, 10, 552}, - dictWord{ - 7, - 10, - 1754, - }, - dictWord{137, 10, 604}, - dictWord{134, 0, 1256}, - dictWord{152, 0, 11}, - dictWord{5, 11, 448}, - dictWord{11, 11, 98}, - dictWord{139, 11, 524}, - dictWord{ - 7, - 0, - 1626, - }, - dictWord{5, 10, 80}, - dictWord{6, 10, 405}, - dictWord{7, 10, 403}, - dictWord{7, 10, 1502}, - dictWord{8, 10, 456}, - dictWord{9, 10, 487}, - dictWord{ - 9, - 10, - 853, - }, - dictWord{9, 10, 889}, - dictWord{10, 10, 309}, - dictWord{11, 10, 721}, - dictWord{11, 10, 994}, - dictWord{12, 10, 430}, - dictWord{13, 10, 165}, - dictWord{ - 14, - 11, - 16, - }, - dictWord{146, 11, 44}, - dictWord{132, 0, 779}, - dictWord{8, 0, 25}, - dictWord{138, 0, 826}, - dictWord{4, 10, 453}, - dictWord{5, 10, 887}, - dictWord{ - 6, - 10, - 535, - }, - dictWord{8, 10, 6}, - dictWord{8, 10, 543}, - dictWord{136, 10, 826}, - dictWord{137, 11, 461}, - dictWord{140, 11, 632}, - dictWord{132, 0, 308}, - dictWord{135, 0, 741}, - dictWord{132, 0, 671}, - dictWord{7, 0, 150}, - dictWord{8, 0, 649}, - dictWord{136, 0, 1020}, - dictWord{9, 0, 99}, - dictWord{6, 11, 336}, - dictWord{ - 8, - 11, - 552, - }, - dictWord{9, 11, 285}, - dictWord{10, 11, 99}, - dictWord{139, 11, 568}, - dictWord{134, 0, 521}, - dictWord{5, 0, 339}, - dictWord{14, 0, 3}, - dictWord{ - 15, - 0, - 41, - }, - dictWord{15, 0, 166}, - dictWord{147, 0, 66}, - dictWord{6, 11, 423}, - dictWord{7, 11, 665}, - dictWord{7, 11, 1210}, - dictWord{9, 11, 218}, - dictWord{ - 141, - 11, - 222, - }, - dictWord{6, 0, 543}, - dictWord{5, 10, 101}, - dictWord{5, 11, 256}, - dictWord{6, 10, 88}, - dictWord{7, 10, 1677}, - dictWord{9, 10, 100}, - dictWord{10, 10, 677}, - dictWord{14, 10, 169}, - dictWord{14, 10, 302}, - dictWord{14, 10, 313}, - dictWord{15, 10, 48}, - dictWord{143, 10, 84}, - dictWord{4, 10, 310}, - dictWord{ - 7, - 10, - 708, - }, - dictWord{7, 10, 996}, - dictWord{9, 10, 795}, - dictWord{10, 10, 390}, - dictWord{10, 10, 733}, - dictWord{11, 10, 451}, - dictWord{12, 10, 249}, - dictWord{ - 14, - 10, - 115, - }, - dictWord{14, 10, 286}, - dictWord{143, 10, 100}, - dictWord{133, 10, 587}, - dictWord{13, 11, 417}, - dictWord{14, 11, 129}, - dictWord{143, 11, 15}, - dictWord{134, 0, 1358}, - dictWord{136, 11, 554}, - dictWord{132, 10, 498}, - dictWord{7, 10, 217}, - dictWord{8, 10, 140}, - dictWord{138, 10, 610}, - dictWord{ - 135, - 11, - 989, - }, - dictWord{135, 11, 634}, - dictWord{6, 0, 155}, - dictWord{140, 0, 234}, - dictWord{135, 11, 462}, - dictWord{132, 11, 618}, - dictWord{ - 134, - 0, - 1628, - }, - dictWord{132, 0, 766}, - dictWord{4, 11, 339}, - dictWord{5, 10, 905}, - dictWord{135, 11, 259}, - dictWord{135, 0, 829}, - dictWord{4, 11, 759}, - dictWord{ - 141, - 11, - 169, - }, - dictWord{7, 0, 1445}, - dictWord{4, 10, 456}, - dictWord{7, 10, 358}, - dictWord{7, 10, 1637}, - dictWord{8, 10, 643}, - dictWord{139, 10, 483}, - dictWord{ - 5, - 0, - 486, - }, - dictWord{135, 0, 1349}, - dictWord{5, 11, 688}, - dictWord{135, 11, 712}, - dictWord{7, 0, 1635}, - dictWord{8, 0, 17}, - dictWord{10, 0, 217}, - dictWord{ - 10, - 0, - 295, - }, - dictWord{12, 0, 2}, - dictWord{140, 11, 2}, - dictWord{138, 0, 558}, - dictWord{150, 10, 56}, - dictWord{4, 11, 278}, - dictWord{5, 11, 465}, - dictWord{ - 135, - 11, - 1367, - }, - dictWord{136, 11, 482}, - dictWord{133, 10, 535}, - dictWord{6, 0, 1362}, - dictWord{6, 0, 1461}, - dictWord{10, 11, 274}, - dictWord{10, 11, 625}, - dictWord{139, 11, 530}, - dictWord{5, 0, 599}, - dictWord{5, 11, 336}, - dictWord{6, 11, 341}, - dictWord{6, 11, 478}, - dictWord{6, 11, 1763}, - dictWord{136, 11, 386}, - dictWord{7, 10, 1748}, - dictWord{137, 11, 151}, - dictWord{134, 0, 1376}, - dictWord{133, 10, 539}, - dictWord{135, 11, 73}, - dictWord{135, 11, 1971}, - dictWord{139, 11, 283}, - dictWord{9, 0, 93}, - dictWord{139, 0, 474}, - dictWord{6, 10, 91}, - dictWord{135, 10, 435}, - dictWord{6, 0, 447}, - dictWord{5, 11, 396}, - dictWord{134, 11, 501}, - dictWord{4, 10, 16}, - dictWord{5, 10, 316}, - dictWord{5, 10, 842}, - dictWord{6, 10, 370}, - dictWord{6, 10, 1778}, - dictWord{8, 10, 166}, - dictWord{11, 10, 812}, - dictWord{12, 10, 206}, - dictWord{12, 10, 351}, - dictWord{14, 10, 418}, - dictWord{16, 10, 15}, - dictWord{16, 10, 34}, - dictWord{18, 10, 3}, - dictWord{19, 10, 3}, - dictWord{19, 10, 7}, - dictWord{20, 10, 4}, - dictWord{149, 10, 21}, - dictWord{7, 0, 577}, - dictWord{7, 0, 1432}, - dictWord{9, 0, 475}, - dictWord{9, 0, 505}, - dictWord{9, 0, 526}, - dictWord{9, 0, 609}, - dictWord{9, 0, 689}, - dictWord{9, 0, 726}, - dictWord{9, 0, 735}, - dictWord{9, 0, 738}, - dictWord{10, 0, 556}, - dictWord{ - 10, - 0, - 674, - }, - dictWord{10, 0, 684}, - dictWord{11, 0, 89}, - dictWord{11, 0, 202}, - dictWord{11, 0, 272}, - dictWord{11, 0, 380}, - dictWord{11, 0, 415}, - dictWord{11, 0, 505}, - dictWord{11, 0, 537}, - dictWord{11, 0, 550}, - dictWord{11, 0, 562}, - dictWord{11, 0, 640}, - dictWord{11, 0, 667}, - dictWord{11, 0, 688}, - dictWord{11, 0, 847}, - dictWord{11, 0, 927}, - dictWord{11, 0, 930}, - dictWord{11, 0, 940}, - dictWord{12, 0, 144}, - dictWord{12, 0, 325}, - dictWord{12, 0, 329}, - dictWord{12, 0, 389}, - dictWord{ - 12, - 0, - 403, - }, - dictWord{12, 0, 451}, - dictWord{12, 0, 515}, - dictWord{12, 0, 604}, - dictWord{12, 0, 616}, - dictWord{12, 0, 626}, - dictWord{13, 0, 66}, - dictWord{ - 13, - 0, - 131, - }, - dictWord{13, 0, 167}, - dictWord{13, 0, 236}, - dictWord{13, 0, 368}, - dictWord{13, 0, 411}, - dictWord{13, 0, 434}, - dictWord{13, 0, 453}, - dictWord{13, 0, 461}, - dictWord{13, 0, 474}, - dictWord{14, 0, 59}, - dictWord{14, 0, 60}, - dictWord{14, 0, 139}, - dictWord{14, 0, 152}, - dictWord{14, 0, 276}, - dictWord{14, 0, 353}, - dictWord{ - 14, - 0, - 402, - }, - dictWord{15, 0, 28}, - dictWord{15, 0, 81}, - dictWord{15, 0, 123}, - dictWord{15, 0, 152}, - dictWord{18, 0, 136}, - dictWord{148, 0, 88}, - dictWord{ - 4, - 11, - 929, - }, - dictWord{133, 11, 799}, - dictWord{136, 11, 46}, - dictWord{142, 0, 307}, - dictWord{4, 0, 609}, - dictWord{7, 0, 756}, - dictWord{9, 0, 544}, - dictWord{ - 11, - 0, - 413, - }, - dictWord{144, 0, 25}, - dictWord{10, 0, 687}, - dictWord{7, 10, 619}, - dictWord{10, 10, 547}, - dictWord{11, 10, 122}, - dictWord{140, 10, 601}, - dictWord{ - 4, - 0, - 930, - }, - dictWord{133, 0, 947}, - dictWord{133, 0, 939}, - dictWord{142, 0, 21}, - dictWord{4, 11, 892}, - dictWord{133, 11, 770}, - dictWord{133, 0, 962}, - dictWord{ - 5, - 0, - 651, - }, - dictWord{8, 0, 170}, - dictWord{9, 0, 61}, - dictWord{9, 0, 63}, - dictWord{10, 0, 23}, - dictWord{10, 0, 37}, - dictWord{10, 0, 834}, - dictWord{11, 0, 4}, - dictWord{ - 11, - 0, - 187, - }, - dictWord{11, 0, 281}, - dictWord{11, 0, 503}, - dictWord{11, 0, 677}, - dictWord{12, 0, 96}, - dictWord{12, 0, 130}, - dictWord{12, 0, 244}, - dictWord{14, 0, 5}, - dictWord{14, 0, 40}, - dictWord{14, 0, 162}, - dictWord{14, 0, 202}, - dictWord{146, 0, 133}, - dictWord{4, 0, 406}, - dictWord{5, 0, 579}, - dictWord{12, 0, 492}, - dictWord{ - 150, - 0, - 15, - }, - dictWord{135, 11, 158}, - dictWord{135, 0, 597}, - dictWord{132, 0, 981}, - dictWord{132, 10, 888}, - dictWord{4, 10, 149}, - dictWord{138, 10, 368}, - dictWord{132, 0, 545}, - dictWord{4, 10, 154}, - dictWord{7, 10, 1134}, - dictWord{136, 10, 105}, - dictWord{135, 11, 2001}, - dictWord{134, 0, 1558}, - dictWord{ - 4, - 10, - 31, - }, - dictWord{6, 10, 429}, - dictWord{7, 10, 962}, - dictWord{9, 10, 458}, - dictWord{139, 10, 691}, - dictWord{132, 10, 312}, - dictWord{135, 10, 1642}, - dictWord{ - 6, - 0, - 17, - }, - dictWord{6, 0, 1304}, - dictWord{7, 0, 16}, - dictWord{7, 0, 1001}, - dictWord{9, 0, 886}, - dictWord{10, 0, 489}, - dictWord{10, 0, 800}, - dictWord{11, 0, 782}, - dictWord{12, 0, 320}, - dictWord{13, 0, 467}, - dictWord{14, 0, 145}, - dictWord{14, 0, 387}, - dictWord{143, 0, 119}, - dictWord{135, 0, 1982}, - dictWord{17, 0, 17}, - dictWord{7, 11, 1461}, - dictWord{140, 11, 91}, - dictWord{4, 10, 236}, - dictWord{132, 11, 602}, - dictWord{138, 0, 907}, - dictWord{136, 0, 110}, - dictWord{7, 0, 272}, - dictWord{19, 0, 53}, - dictWord{5, 10, 836}, - dictWord{5, 10, 857}, - dictWord{134, 10, 1680}, - dictWord{5, 0, 458}, - dictWord{7, 11, 1218}, - dictWord{136, 11, 303}, - dictWord{7, 0, 1983}, - dictWord{8, 0, 0}, - dictWord{8, 0, 171}, - dictWord{9, 0, 120}, - dictWord{9, 0, 732}, - dictWord{10, 0, 473}, - dictWord{11, 0, 656}, - dictWord{ - 11, - 0, - 998, - }, - dictWord{18, 0, 0}, - dictWord{18, 0, 2}, - dictWord{19, 0, 21}, - dictWord{10, 10, 68}, - dictWord{139, 10, 494}, - dictWord{137, 11, 662}, - dictWord{4, 11, 13}, - dictWord{5, 11, 567}, - dictWord{7, 11, 1498}, - dictWord{9, 11, 124}, - dictWord{11, 11, 521}, - dictWord{140, 11, 405}, - dictWord{4, 10, 81}, - dictWord{139, 10, 867}, - dictWord{135, 11, 1006}, - dictWord{7, 11, 800}, - dictWord{7, 11, 1783}, - dictWord{138, 11, 12}, - dictWord{9, 0, 295}, - dictWord{10, 0, 443}, - dictWord{ - 5, - 10, - 282, - }, - dictWord{8, 10, 650}, - dictWord{137, 10, 907}, - dictWord{132, 11, 735}, - dictWord{4, 11, 170}, - dictWord{4, 10, 775}, - dictWord{135, 11, 323}, - dictWord{ - 6, - 0, - 1844, - }, - dictWord{10, 0, 924}, - dictWord{11, 11, 844}, - dictWord{12, 11, 104}, - dictWord{140, 11, 625}, - dictWord{5, 11, 304}, - dictWord{7, 11, 1403}, - dictWord{140, 11, 498}, - dictWord{134, 0, 1232}, - dictWord{4, 0, 519}, - dictWord{10, 0, 70}, - dictWord{12, 0, 26}, - dictWord{14, 0, 17}, - dictWord{14, 0, 178}, - dictWord{ - 15, - 0, - 34, - }, - dictWord{149, 0, 12}, - dictWord{132, 0, 993}, - dictWord{4, 11, 148}, - dictWord{133, 11, 742}, - dictWord{6, 0, 31}, - dictWord{7, 0, 491}, - dictWord{7, 0, 530}, - dictWord{8, 0, 592}, - dictWord{11, 0, 53}, - dictWord{11, 0, 779}, - dictWord{12, 0, 167}, - dictWord{12, 0, 411}, - dictWord{14, 0, 14}, - dictWord{14, 0, 136}, - dictWord{ - 15, - 0, - 72, - }, - dictWord{16, 0, 17}, - dictWord{144, 0, 72}, - dictWord{133, 0, 907}, - dictWord{134, 0, 733}, - dictWord{133, 11, 111}, - dictWord{4, 10, 71}, - dictWord{ - 5, - 10, - 376, - }, - dictWord{7, 10, 119}, - dictWord{138, 10, 665}, - dictWord{136, 0, 55}, - dictWord{8, 0, 430}, - dictWord{136, 11, 430}, - dictWord{4, 0, 208}, - dictWord{ - 5, - 0, - 106, - }, - dictWord{6, 0, 531}, - dictWord{8, 0, 408}, - dictWord{9, 0, 188}, - dictWord{138, 0, 572}, - dictWord{12, 0, 56}, - dictWord{11, 10, 827}, - dictWord{14, 10, 34}, - dictWord{143, 10, 148}, - dictWord{134, 0, 1693}, - dictWord{133, 11, 444}, - dictWord{132, 10, 479}, - dictWord{140, 0, 441}, - dictWord{9, 0, 449}, - dictWord{ - 10, - 0, - 192, - }, - dictWord{138, 0, 740}, - dictWord{134, 0, 928}, - dictWord{4, 0, 241}, - dictWord{7, 10, 607}, - dictWord{136, 10, 99}, - dictWord{8, 11, 123}, - dictWord{ - 15, - 11, - 6, - }, - dictWord{144, 11, 7}, - dictWord{6, 11, 285}, - dictWord{8, 11, 654}, - dictWord{11, 11, 749}, - dictWord{12, 11, 190}, - dictWord{12, 11, 327}, - dictWord{ - 13, - 11, - 120, - }, - dictWord{13, 11, 121}, - dictWord{13, 11, 327}, - dictWord{15, 11, 47}, - dictWord{146, 11, 40}, - dictWord{4, 10, 41}, - dictWord{5, 10, 74}, - dictWord{ - 7, - 10, - 1627, - }, - dictWord{11, 10, 871}, - dictWord{140, 10, 619}, - dictWord{7, 0, 1525}, - dictWord{11, 10, 329}, - dictWord{11, 10, 965}, - dictWord{12, 10, 241}, - dictWord{14, 10, 354}, - dictWord{15, 10, 22}, - dictWord{148, 10, 63}, - dictWord{132, 0, 259}, - dictWord{135, 11, 183}, - dictWord{9, 10, 209}, - dictWord{ - 137, - 10, - 300, - }, - dictWord{5, 11, 937}, - dictWord{135, 11, 100}, - dictWord{133, 10, 98}, - dictWord{4, 0, 173}, - dictWord{5, 0, 312}, - dictWord{5, 0, 512}, - dictWord{ - 135, - 0, - 1285, - }, - dictWord{141, 0, 185}, - dictWord{7, 0, 1603}, - dictWord{7, 0, 1691}, - dictWord{9, 0, 464}, - dictWord{11, 0, 195}, - dictWord{12, 0, 279}, - dictWord{ - 12, - 0, - 448, - }, - dictWord{14, 0, 11}, - dictWord{147, 0, 102}, - dictWord{135, 0, 1113}, - dictWord{133, 10, 984}, - dictWord{4, 0, 452}, - dictWord{5, 0, 583}, - dictWord{ - 135, - 0, - 720, - }, - dictWord{4, 0, 547}, - dictWord{5, 0, 817}, - dictWord{6, 0, 433}, - dictWord{7, 0, 593}, - dictWord{7, 0, 1378}, - dictWord{8, 0, 161}, - dictWord{9, 0, 284}, - dictWord{ - 10, - 0, - 313, - }, - dictWord{139, 0, 886}, - dictWord{8, 0, 722}, - dictWord{4, 10, 182}, - dictWord{6, 10, 205}, - dictWord{135, 10, 220}, - dictWord{150, 0, 13}, - dictWord{ - 4, - 10, - 42, - }, - dictWord{9, 10, 205}, - dictWord{9, 10, 786}, - dictWord{138, 10, 659}, - dictWord{6, 0, 289}, - dictWord{7, 0, 1670}, - dictWord{12, 0, 57}, - dictWord{151, 0, 4}, - dictWord{132, 10, 635}, - dictWord{14, 0, 43}, - dictWord{146, 0, 21}, - dictWord{139, 10, 533}, - dictWord{135, 0, 1694}, - dictWord{8, 0, 420}, - dictWord{ - 139, - 0, - 193, - }, - dictWord{135, 0, 409}, - dictWord{132, 10, 371}, - dictWord{4, 10, 272}, - dictWord{135, 10, 836}, - dictWord{5, 10, 825}, - dictWord{134, 10, 1640}, - dictWord{5, 11, 251}, - dictWord{5, 11, 956}, - dictWord{8, 11, 268}, - dictWord{9, 11, 214}, - dictWord{146, 11, 142}, - dictWord{138, 0, 308}, - dictWord{6, 0, 1863}, - dictWord{141, 11, 37}, - dictWord{137, 10, 879}, - dictWord{7, 10, 317}, - dictWord{135, 10, 569}, - dictWord{132, 11, 294}, - dictWord{134, 0, 790}, - dictWord{ - 5, - 0, - 1002, - }, - dictWord{136, 0, 745}, - dictWord{5, 11, 346}, - dictWord{5, 11, 711}, - dictWord{136, 11, 390}, - dictWord{135, 0, 289}, - dictWord{5, 0, 504}, - dictWord{ - 11, - 0, - 68, - }, - dictWord{137, 10, 307}, - dictWord{4, 0, 239}, - dictWord{6, 0, 477}, - dictWord{7, 0, 1607}, - dictWord{139, 0, 617}, - dictWord{149, 0, 13}, - dictWord{ - 133, - 0, - 609, - }, - dictWord{133, 11, 624}, - dictWord{5, 11, 783}, - dictWord{7, 11, 1998}, - dictWord{135, 11, 2047}, - dictWord{133, 10, 525}, - dictWord{132, 0, 367}, - dictWord{132, 11, 594}, - dictWord{6, 0, 528}, - dictWord{133, 10, 493}, - dictWord{4, 10, 174}, - dictWord{135, 10, 911}, - dictWord{8, 10, 417}, - dictWord{ - 137, - 10, - 782, - }, - dictWord{132, 0, 694}, - dictWord{7, 0, 548}, - dictWord{137, 0, 58}, - dictWord{4, 10, 32}, - dictWord{5, 10, 215}, - dictWord{6, 10, 269}, - dictWord{7, 10, 1782}, - dictWord{7, 10, 1892}, - dictWord{10, 10, 16}, - dictWord{11, 10, 822}, - dictWord{11, 10, 954}, - dictWord{141, 10, 481}, - dictWord{140, 0, 687}, - dictWord{ - 7, - 0, - 1749, - }, - dictWord{136, 10, 477}, - dictWord{132, 11, 569}, - dictWord{133, 10, 308}, - dictWord{135, 10, 1088}, - dictWord{4, 0, 661}, - dictWord{138, 0, 1004}, - dictWord{5, 11, 37}, - dictWord{6, 11, 39}, - dictWord{6, 11, 451}, - dictWord{7, 11, 218}, - dictWord{7, 11, 667}, - dictWord{7, 11, 1166}, - dictWord{7, 11, 1687}, - dictWord{8, 11, 662}, - dictWord{144, 11, 2}, - dictWord{9, 0, 445}, - dictWord{12, 0, 53}, - dictWord{13, 0, 492}, - dictWord{5, 10, 126}, - dictWord{8, 10, 297}, - dictWord{ - 9, - 10, - 366, - }, - dictWord{140, 10, 374}, - dictWord{7, 10, 1551}, - dictWord{139, 10, 361}, - dictWord{148, 0, 74}, - dictWord{134, 11, 508}, - dictWord{135, 0, 213}, - dictWord{132, 10, 175}, - dictWord{132, 10, 685}, - dictWord{6, 0, 760}, - dictWord{6, 0, 834}, - dictWord{134, 0, 1248}, - dictWord{7, 11, 453}, - dictWord{7, 11, 635}, - dictWord{7, 11, 796}, - dictWord{8, 11, 331}, - dictWord{9, 11, 328}, - dictWord{9, 11, 330}, - dictWord{9, 11, 865}, - dictWord{10, 11, 119}, - dictWord{10, 11, 235}, - dictWord{11, 11, 111}, - dictWord{11, 11, 129}, - dictWord{11, 11, 240}, - dictWord{12, 11, 31}, - dictWord{12, 11, 66}, - dictWord{12, 11, 222}, - dictWord{12, 11, 269}, - dictWord{12, 11, 599}, - dictWord{12, 11, 689}, - dictWord{13, 11, 186}, - dictWord{13, 11, 364}, - dictWord{142, 11, 345}, - dictWord{7, 0, 1672}, - dictWord{ - 139, - 0, - 189, - }, - dictWord{133, 10, 797}, - dictWord{133, 10, 565}, - dictWord{6, 0, 1548}, - dictWord{6, 11, 98}, - dictWord{7, 11, 585}, - dictWord{135, 11, 702}, - dictWord{ - 9, - 0, - 968, - }, - dictWord{15, 0, 192}, - dictWord{149, 0, 56}, - dictWord{4, 10, 252}, - dictWord{6, 11, 37}, - dictWord{7, 11, 299}, - dictWord{7, 10, 1068}, - dictWord{ - 7, - 11, - 1666, - }, - dictWord{8, 11, 195}, - dictWord{8, 11, 316}, - dictWord{9, 11, 178}, - dictWord{9, 11, 276}, - dictWord{9, 11, 339}, - dictWord{9, 11, 536}, - dictWord{ - 10, - 11, - 102, - }, - dictWord{10, 11, 362}, - dictWord{10, 10, 434}, - dictWord{10, 11, 785}, - dictWord{11, 11, 55}, - dictWord{11, 11, 149}, - dictWord{11, 10, 228}, - dictWord{ - 11, - 10, - 426, - }, - dictWord{11, 11, 773}, - dictWord{13, 10, 231}, - dictWord{13, 11, 416}, - dictWord{13, 11, 419}, - dictWord{14, 11, 38}, - dictWord{14, 11, 41}, - dictWord{14, 11, 210}, - dictWord{18, 10, 106}, - dictWord{148, 10, 87}, - dictWord{4, 0, 751}, - dictWord{11, 0, 390}, - dictWord{140, 0, 32}, - dictWord{4, 0, 409}, - dictWord{133, 0, 78}, - dictWord{11, 11, 458}, - dictWord{12, 11, 15}, - dictWord{140, 11, 432}, - dictWord{7, 0, 1602}, - dictWord{10, 0, 257}, - dictWord{10, 0, 698}, - dictWord{11, 0, 544}, - dictWord{11, 0, 585}, - dictWord{12, 0, 212}, - dictWord{13, 0, 307}, - dictWord{5, 10, 231}, - dictWord{7, 10, 601}, - dictWord{9, 10, 277}, - dictWord{ - 9, - 10, - 674, - }, - dictWord{10, 10, 178}, - dictWord{10, 10, 418}, - dictWord{10, 10, 509}, - dictWord{11, 10, 531}, - dictWord{12, 10, 113}, - dictWord{12, 10, 475}, - dictWord{13, 10, 99}, - dictWord{142, 10, 428}, - dictWord{6, 0, 473}, - dictWord{145, 0, 105}, - dictWord{6, 0, 1949}, - dictWord{15, 0, 156}, - dictWord{133, 11, 645}, - dictWord{7, 10, 1591}, - dictWord{144, 10, 43}, - dictWord{135, 0, 1779}, - dictWord{135, 10, 1683}, - dictWord{4, 11, 290}, - dictWord{135, 11, 1356}, - dictWord{134, 0, 763}, - dictWord{6, 11, 70}, - dictWord{7, 11, 1292}, - dictWord{10, 11, 762}, - dictWord{139, 11, 288}, - dictWord{142, 0, 29}, - dictWord{140, 11, 428}, - dictWord{7, 0, 883}, - dictWord{7, 11, 131}, - dictWord{7, 11, 422}, - dictWord{8, 11, 210}, - dictWord{140, 11, 573}, - dictWord{134, 0, 488}, - dictWord{4, 10, 399}, - dictWord{5, 10, 119}, - dictWord{5, 10, 494}, - dictWord{7, 10, 751}, - dictWord{137, 10, 556}, - dictWord{133, 0, 617}, - dictWord{132, 11, 936}, - dictWord{ - 139, - 0, - 50, - }, - dictWord{7, 0, 1518}, - dictWord{139, 0, 694}, - dictWord{137, 0, 785}, - dictWord{4, 0, 546}, - dictWord{135, 0, 2042}, - dictWord{7, 11, 716}, - dictWord{ - 13, - 11, - 97, - }, - dictWord{141, 11, 251}, - dictWord{132, 11, 653}, - dictWord{145, 0, 22}, - dictWord{134, 0, 1016}, - dictWord{4, 0, 313}, - dictWord{133, 0, 577}, - dictWord{ - 136, - 11, - 657, - }, - dictWord{8, 0, 184}, - dictWord{141, 0, 433}, - dictWord{135, 0, 935}, - dictWord{6, 0, 720}, - dictWord{9, 0, 114}, - dictWord{146, 11, 80}, - dictWord{ - 12, - 0, - 186, - }, - dictWord{12, 0, 292}, - dictWord{14, 0, 100}, - dictWord{18, 0, 70}, - dictWord{7, 10, 594}, - dictWord{7, 10, 851}, - dictWord{7, 10, 1858}, - dictWord{ - 9, - 10, - 411, - }, - dictWord{9, 10, 574}, - dictWord{9, 10, 666}, - dictWord{9, 10, 737}, - dictWord{10, 10, 346}, - dictWord{10, 10, 712}, - dictWord{11, 10, 246}, - dictWord{ - 11, - 10, - 432, - }, - dictWord{11, 10, 517}, - dictWord{11, 10, 647}, - dictWord{11, 10, 679}, - dictWord{11, 10, 727}, - dictWord{12, 10, 304}, - dictWord{12, 10, 305}, - dictWord{12, 10, 323}, - dictWord{12, 10, 483}, - dictWord{12, 10, 572}, - dictWord{12, 10, 593}, - dictWord{12, 10, 602}, - dictWord{13, 10, 95}, - dictWord{13, 10, 101}, - dictWord{13, 10, 171}, - dictWord{13, 10, 315}, - dictWord{13, 10, 378}, - dictWord{13, 10, 425}, - dictWord{13, 10, 475}, - dictWord{14, 10, 63}, - dictWord{ - 14, - 10, - 380, - }, - dictWord{14, 10, 384}, - dictWord{15, 10, 133}, - dictWord{18, 10, 112}, - dictWord{148, 10, 72}, - dictWord{135, 10, 1093}, - dictWord{135, 11, 1836}, - dictWord{132, 10, 679}, - dictWord{137, 10, 203}, - dictWord{11, 0, 402}, - dictWord{12, 0, 109}, - dictWord{12, 0, 431}, - dictWord{13, 0, 179}, - dictWord{13, 0, 206}, - dictWord{14, 0, 217}, - dictWord{16, 0, 3}, - dictWord{148, 0, 53}, - dictWord{7, 11, 1368}, - dictWord{8, 11, 232}, - dictWord{8, 11, 361}, - dictWord{10, 11, 682}, - dictWord{138, 11, 742}, - dictWord{137, 10, 714}, - dictWord{5, 0, 886}, - dictWord{6, 0, 46}, - dictWord{6, 0, 1790}, - dictWord{7, 0, 14}, - dictWord{7, 0, 732}, - dictWord{ - 7, - 0, - 1654, - }, - dictWord{8, 0, 95}, - dictWord{8, 0, 327}, - dictWord{8, 0, 616}, - dictWord{9, 0, 892}, - dictWord{10, 0, 598}, - dictWord{10, 0, 769}, - dictWord{11, 0, 134}, - dictWord{11, 0, 747}, - dictWord{12, 0, 378}, - dictWord{14, 0, 97}, - dictWord{137, 11, 534}, - dictWord{4, 0, 969}, - dictWord{136, 10, 825}, - dictWord{137, 11, 27}, - dictWord{6, 0, 727}, - dictWord{142, 11, 12}, - dictWord{133, 0, 1021}, - dictWord{134, 0, 1190}, - dictWord{134, 11, 1657}, - dictWord{5, 10, 143}, - dictWord{ - 5, - 10, - 769, - }, - dictWord{6, 10, 1760}, - dictWord{7, 10, 682}, - dictWord{7, 10, 1992}, - dictWord{136, 10, 736}, - dictWord{132, 0, 153}, - dictWord{135, 11, 127}, - dictWord{133, 0, 798}, - dictWord{132, 0, 587}, - dictWord{6, 0, 598}, - dictWord{7, 0, 42}, - dictWord{8, 0, 695}, - dictWord{10, 0, 212}, - dictWord{11, 0, 158}, - dictWord{ - 14, - 0, - 196, - }, - dictWord{145, 0, 85}, - dictWord{133, 10, 860}, - dictWord{6, 0, 1929}, - dictWord{134, 0, 1933}, - dictWord{5, 0, 957}, - dictWord{5, 0, 1008}, - dictWord{ - 9, - 0, - 577, - }, - dictWord{12, 0, 141}, - dictWord{6, 10, 422}, - dictWord{7, 10, 0}, - dictWord{7, 10, 1544}, - dictWord{8, 11, 364}, - dictWord{11, 10, 990}, - dictWord{ - 12, - 10, - 453, - }, - dictWord{13, 10, 47}, - dictWord{141, 10, 266}, - dictWord{134, 0, 1319}, - dictWord{4, 0, 129}, - dictWord{135, 0, 465}, - dictWord{7, 0, 470}, - dictWord{ - 7, - 0, - 1057, - }, - dictWord{7, 0, 1201}, - dictWord{9, 0, 755}, - dictWord{11, 0, 906}, - dictWord{140, 0, 527}, - dictWord{7, 0, 908}, - dictWord{146, 0, 7}, - dictWord{5, 0, 148}, - dictWord{136, 0, 450}, - dictWord{5, 10, 515}, - dictWord{137, 10, 131}, - dictWord{7, 10, 1605}, - dictWord{11, 10, 962}, - dictWord{146, 10, 139}, - dictWord{ - 132, - 10, - 646, - }, - dictWord{134, 0, 1166}, - dictWord{4, 10, 396}, - dictWord{7, 10, 728}, - dictWord{9, 10, 117}, - dictWord{13, 10, 202}, - dictWord{148, 10, 51}, - dictWord{ - 6, - 10, - 121, - }, - dictWord{6, 10, 124}, - dictWord{6, 10, 357}, - dictWord{7, 10, 1138}, - dictWord{7, 10, 1295}, - dictWord{8, 10, 162}, - dictWord{139, 10, 655}, - dictWord{14, 0, 374}, - dictWord{142, 11, 374}, - dictWord{138, 0, 253}, - dictWord{139, 0, 1003}, - dictWord{5, 11, 909}, - dictWord{9, 11, 849}, - dictWord{ - 138, - 11, - 805, - }, - dictWord{133, 10, 237}, - dictWord{7, 11, 525}, - dictWord{7, 11, 1579}, - dictWord{8, 11, 497}, - dictWord{136, 11, 573}, - dictWord{137, 0, 46}, - dictWord{ - 132, - 0, - 879, - }, - dictWord{134, 0, 806}, - dictWord{135, 0, 1868}, - dictWord{6, 0, 1837}, - dictWord{134, 0, 1846}, - dictWord{6, 0, 730}, - dictWord{134, 0, 881}, - dictWord{7, 0, 965}, - dictWord{7, 0, 1460}, - dictWord{7, 0, 1604}, - dictWord{7, 11, 193}, - dictWord{7, 11, 397}, - dictWord{7, 11, 1105}, - dictWord{8, 11, 124}, - dictWord{ - 8, - 11, - 619, - }, - dictWord{9, 11, 305}, - dictWord{10, 11, 264}, - dictWord{11, 11, 40}, - dictWord{12, 11, 349}, - dictWord{13, 11, 134}, - dictWord{13, 11, 295}, - dictWord{14, 11, 155}, - dictWord{15, 11, 120}, - dictWord{146, 11, 105}, - dictWord{136, 0, 506}, - dictWord{143, 0, 10}, - dictWord{4, 11, 262}, - dictWord{7, 11, 342}, - dictWord{7, 10, 571}, - dictWord{7, 10, 1877}, - dictWord{10, 10, 366}, - dictWord{141, 11, 23}, - dictWord{133, 11, 641}, - dictWord{10, 0, 22}, - dictWord{9, 10, 513}, - dictWord{10, 10, 39}, - dictWord{12, 10, 122}, - dictWord{140, 10, 187}, - dictWord{135, 11, 1431}, - dictWord{150, 11, 49}, - dictWord{4, 11, 99}, - dictWord{ - 6, - 11, - 250, - }, - dictWord{6, 11, 346}, - dictWord{8, 11, 127}, - dictWord{138, 11, 81}, - dictWord{6, 0, 2014}, - dictWord{8, 0, 928}, - dictWord{10, 0, 960}, - dictWord{10, 0, 979}, - dictWord{140, 0, 996}, - dictWord{134, 0, 296}, - dictWord{132, 11, 915}, - dictWord{5, 11, 75}, - dictWord{9, 11, 517}, - dictWord{10, 11, 470}, - dictWord{ - 12, - 11, - 155, - }, - dictWord{141, 11, 224}, - dictWord{137, 10, 873}, - dictWord{4, 0, 854}, - dictWord{140, 11, 18}, - dictWord{134, 0, 587}, - dictWord{7, 10, 107}, - dictWord{ - 7, - 10, - 838, - }, - dictWord{8, 10, 550}, - dictWord{138, 10, 401}, - dictWord{11, 0, 636}, - dictWord{15, 0, 145}, - dictWord{17, 0, 34}, - dictWord{19, 0, 50}, - dictWord{ - 23, - 0, - 20, - }, - dictWord{11, 10, 588}, - dictWord{11, 10, 864}, - dictWord{11, 10, 968}, - dictWord{143, 10, 160}, - dictWord{135, 11, 216}, - dictWord{7, 0, 982}, - dictWord{ - 10, - 0, - 32, - }, - dictWord{143, 0, 56}, - dictWord{133, 10, 768}, - dictWord{133, 11, 954}, - dictWord{6, 11, 304}, - dictWord{7, 11, 1114}, - dictWord{8, 11, 418}, - dictWord{ - 10, - 11, - 345, - }, - dictWord{11, 11, 341}, - dictWord{11, 11, 675}, - dictWord{141, 11, 40}, - dictWord{9, 11, 410}, - dictWord{139, 11, 425}, - dictWord{136, 0, 941}, - dictWord{5, 0, 435}, - dictWord{132, 10, 894}, - dictWord{5, 0, 85}, - dictWord{6, 0, 419}, - dictWord{7, 0, 134}, - dictWord{7, 0, 305}, - dictWord{7, 0, 361}, - dictWord{ - 7, - 0, - 1337, - }, - dictWord{8, 0, 71}, - dictWord{140, 0, 519}, - dictWord{140, 0, 688}, - dictWord{135, 0, 740}, - dictWord{5, 0, 691}, - dictWord{7, 0, 345}, - dictWord{9, 0, 94}, - dictWord{140, 0, 169}, - dictWord{5, 0, 183}, - dictWord{6, 0, 582}, - dictWord{10, 0, 679}, - dictWord{140, 0, 435}, - dictWord{134, 11, 14}, - dictWord{6, 0, 945}, - dictWord{135, 0, 511}, - dictWord{134, 11, 1708}, - dictWord{5, 11, 113}, - dictWord{6, 11, 243}, - dictWord{7, 11, 1865}, - dictWord{11, 11, 161}, - dictWord{16, 11, 37}, - dictWord{145, 11, 99}, - dictWord{132, 11, 274}, - dictWord{137, 0, 539}, - dictWord{7, 0, 1993}, - dictWord{8, 0, 684}, - dictWord{134, 10, 272}, - dictWord{ - 6, - 0, - 659, - }, - dictWord{134, 0, 982}, - dictWord{4, 10, 9}, - dictWord{5, 10, 128}, - dictWord{7, 10, 368}, - dictWord{11, 10, 480}, - dictWord{148, 10, 3}, - dictWord{ - 134, - 0, - 583, - }, - dictWord{132, 0, 803}, - dictWord{133, 0, 704}, - dictWord{4, 0, 179}, - dictWord{5, 0, 198}, - dictWord{133, 0, 697}, - dictWord{7, 0, 347}, - dictWord{7, 0, 971}, - dictWord{8, 0, 181}, - dictWord{10, 0, 711}, - dictWord{135, 11, 166}, - dictWord{136, 10, 682}, - dictWord{4, 10, 2}, - dictWord{7, 10, 545}, - dictWord{7, 10, 894}, - dictWord{136, 11, 521}, - dictWord{135, 0, 481}, - dictWord{132, 0, 243}, - dictWord{5, 0, 203}, - dictWord{7, 0, 19}, - dictWord{7, 0, 71}, - dictWord{7, 0, 113}, - dictWord{ - 10, - 0, - 405, - }, - dictWord{11, 0, 357}, - dictWord{142, 0, 240}, - dictWord{5, 11, 725}, - dictWord{5, 11, 727}, - dictWord{135, 11, 1811}, - dictWord{6, 0, 826}, - dictWord{ - 137, - 11, - 304, - }, - dictWord{7, 0, 1450}, - dictWord{139, 0, 99}, - dictWord{133, 11, 654}, - dictWord{134, 0, 492}, - dictWord{5, 0, 134}, - dictWord{6, 0, 408}, - dictWord{ - 6, - 0, - 495, - }, - dictWord{7, 0, 1593}, - dictWord{6, 11, 273}, - dictWord{10, 11, 188}, - dictWord{13, 11, 377}, - dictWord{146, 11, 77}, - dictWord{9, 10, 769}, - dictWord{ - 140, - 10, - 185, - }, - dictWord{135, 11, 410}, - dictWord{142, 0, 4}, - dictWord{4, 0, 665}, - dictWord{134, 11, 1785}, - dictWord{4, 0, 248}, - dictWord{7, 0, 137}, - dictWord{ - 137, - 0, - 349, - }, - dictWord{5, 10, 530}, - dictWord{142, 10, 113}, - dictWord{7, 0, 1270}, - dictWord{139, 0, 612}, - dictWord{132, 11, 780}, - dictWord{5, 0, 371}, - dictWord{135, 0, 563}, - dictWord{135, 0, 826}, - dictWord{6, 0, 1535}, - dictWord{23, 0, 21}, - dictWord{151, 0, 23}, - dictWord{4, 0, 374}, - dictWord{7, 0, 547}, - dictWord{ - 7, - 0, - 1700, - }, - dictWord{7, 0, 1833}, - dictWord{139, 0, 858}, - dictWord{133, 10, 556}, - dictWord{7, 11, 612}, - dictWord{8, 11, 545}, - dictWord{8, 11, 568}, - dictWord{ - 8, - 11, - 642, - }, - dictWord{9, 11, 717}, - dictWord{10, 11, 541}, - dictWord{10, 11, 763}, - dictWord{11, 11, 449}, - dictWord{12, 11, 489}, - dictWord{13, 11, 153}, - dictWord{ - 13, - 11, - 296, - }, - dictWord{14, 11, 138}, - dictWord{14, 11, 392}, - dictWord{15, 11, 50}, - dictWord{16, 11, 6}, - dictWord{16, 11, 12}, - dictWord{148, 11, 9}, - dictWord{ - 9, - 0, - 311, - }, - dictWord{141, 0, 42}, - dictWord{8, 10, 16}, - dictWord{140, 10, 568}, - dictWord{6, 0, 1968}, - dictWord{6, 0, 2027}, - dictWord{138, 0, 991}, - dictWord{ - 6, - 0, - 1647, - }, - dictWord{7, 0, 1552}, - dictWord{7, 0, 2010}, - dictWord{9, 0, 494}, - dictWord{137, 0, 509}, - dictWord{133, 11, 948}, - dictWord{6, 10, 186}, - dictWord{ - 137, - 10, - 426, - }, - dictWord{134, 0, 769}, - dictWord{134, 0, 642}, - dictWord{132, 10, 585}, - dictWord{6, 0, 123}, - dictWord{7, 0, 214}, - dictWord{9, 0, 728}, - dictWord{ - 10, - 0, - 157, - }, - dictWord{11, 0, 346}, - dictWord{11, 0, 662}, - dictWord{143, 0, 106}, - dictWord{142, 11, 381}, - dictWord{135, 0, 1435}, - dictWord{4, 11, 532}, - dictWord{ - 5, - 11, - 706, - }, - dictWord{135, 11, 662}, - dictWord{5, 11, 837}, - dictWord{134, 11, 1651}, - dictWord{4, 10, 93}, - dictWord{5, 10, 252}, - dictWord{6, 10, 229}, - dictWord{ - 7, - 10, - 291, - }, - dictWord{9, 10, 550}, - dictWord{139, 10, 644}, - dictWord{148, 0, 79}, - dictWord{137, 10, 749}, - dictWord{134, 0, 1425}, - dictWord{ - 137, - 10, - 162, - }, - dictWord{4, 11, 362}, - dictWord{7, 11, 52}, - dictWord{7, 11, 303}, - dictWord{140, 11, 166}, - dictWord{132, 10, 381}, - dictWord{4, 11, 330}, - dictWord{ - 7, - 11, - 933, - }, - dictWord{7, 11, 2012}, - dictWord{136, 11, 292}, - dictWord{135, 11, 767}, - dictWord{4, 0, 707}, - dictWord{5, 0, 588}, - dictWord{6, 0, 393}, - dictWord{ - 13, - 0, - 106, - }, - dictWord{18, 0, 49}, - dictWord{147, 0, 41}, - dictWord{6, 0, 211}, - dictWord{7, 0, 1690}, - dictWord{11, 0, 486}, - dictWord{140, 0, 369}, - dictWord{ - 137, - 11, - 883, - }, - dictWord{4, 11, 703}, - dictWord{135, 11, 207}, - dictWord{4, 0, 187}, - dictWord{5, 0, 184}, - dictWord{5, 0, 690}, - dictWord{7, 0, 1869}, - dictWord{10, 0, 756}, - dictWord{139, 0, 783}, - dictWord{132, 11, 571}, - dictWord{134, 0, 1382}, - dictWord{5, 0, 175}, - dictWord{6, 10, 77}, - dictWord{6, 10, 157}, - dictWord{7, 10, 974}, - dictWord{7, 10, 1301}, - dictWord{7, 10, 1339}, - dictWord{7, 10, 1490}, - dictWord{7, 10, 1873}, - dictWord{137, 10, 628}, - dictWord{134, 0, 1493}, - dictWord{ - 5, - 11, - 873, - }, - dictWord{133, 11, 960}, - dictWord{134, 0, 1007}, - dictWord{12, 11, 93}, - dictWord{12, 11, 501}, - dictWord{13, 11, 362}, - dictWord{14, 11, 151}, - dictWord{15, 11, 40}, - dictWord{15, 11, 59}, - dictWord{16, 11, 46}, - dictWord{17, 11, 25}, - dictWord{18, 11, 14}, - dictWord{18, 11, 134}, - dictWord{19, 11, 25}, - dictWord{ - 19, - 11, - 69, - }, - dictWord{20, 11, 16}, - dictWord{20, 11, 19}, - dictWord{20, 11, 66}, - dictWord{21, 11, 23}, - dictWord{21, 11, 25}, - dictWord{150, 11, 42}, - dictWord{ - 11, - 10, - 919, - }, - dictWord{141, 10, 409}, - dictWord{134, 0, 219}, - dictWord{5, 0, 582}, - dictWord{6, 0, 1646}, - dictWord{7, 0, 99}, - dictWord{7, 0, 1962}, - dictWord{ - 7, - 0, - 1986, - }, - dictWord{8, 0, 515}, - dictWord{8, 0, 773}, - dictWord{9, 0, 23}, - dictWord{9, 0, 491}, - dictWord{12, 0, 620}, - dictWord{142, 0, 93}, - dictWord{133, 0, 851}, - dictWord{5, 11, 33}, - dictWord{134, 11, 470}, - dictWord{135, 11, 1291}, - dictWord{134, 0, 1278}, - dictWord{135, 11, 1882}, - dictWord{135, 10, 1489}, - dictWord{132, 0, 1000}, - dictWord{138, 0, 982}, - dictWord{8, 0, 762}, - dictWord{8, 0, 812}, - dictWord{137, 0, 910}, - dictWord{6, 11, 47}, - dictWord{7, 11, 90}, - dictWord{ - 7, - 11, - 664, - }, - dictWord{7, 11, 830}, - dictWord{7, 11, 1380}, - dictWord{7, 11, 2025}, - dictWord{8, 11, 448}, - dictWord{136, 11, 828}, - dictWord{4, 0, 98}, - dictWord{ - 4, - 0, - 940, - }, - dictWord{6, 0, 1819}, - dictWord{6, 0, 1834}, - dictWord{6, 0, 1841}, - dictWord{7, 0, 1365}, - dictWord{8, 0, 859}, - dictWord{8, 0, 897}, - dictWord{8, 0, 918}, - dictWord{9, 0, 422}, - dictWord{9, 0, 670}, - dictWord{10, 0, 775}, - dictWord{10, 0, 894}, - dictWord{10, 0, 909}, - dictWord{10, 0, 910}, - dictWord{10, 0, 935}, - dictWord{ - 11, - 0, - 210, - }, - dictWord{12, 0, 750}, - dictWord{12, 0, 755}, - dictWord{13, 0, 26}, - dictWord{13, 0, 457}, - dictWord{13, 0, 476}, - dictWord{16, 0, 100}, - dictWord{16, 0, 109}, - dictWord{18, 0, 173}, - dictWord{18, 0, 175}, - dictWord{8, 10, 398}, - dictWord{9, 10, 681}, - dictWord{139, 10, 632}, - dictWord{9, 11, 417}, - dictWord{ - 137, - 11, - 493, - }, - dictWord{136, 10, 645}, - dictWord{138, 0, 906}, - dictWord{134, 0, 1730}, - dictWord{134, 10, 20}, - dictWord{133, 11, 1019}, - dictWord{134, 0, 1185}, - dictWord{10, 0, 40}, - dictWord{136, 10, 769}, - dictWord{9, 0, 147}, - dictWord{134, 11, 208}, - dictWord{140, 0, 650}, - dictWord{5, 0, 209}, - dictWord{6, 0, 30}, - dictWord{11, 0, 56}, - dictWord{139, 0, 305}, - dictWord{132, 0, 553}, - dictWord{138, 11, 344}, - dictWord{6, 11, 68}, - dictWord{7, 11, 398}, - dictWord{7, 11, 448}, - dictWord{ - 7, - 11, - 1629, - }, - dictWord{7, 11, 1813}, - dictWord{8, 11, 387}, - dictWord{8, 11, 442}, - dictWord{9, 11, 710}, - dictWord{10, 11, 282}, - dictWord{138, 11, 722}, - dictWord{5, 0, 597}, - dictWord{14, 0, 20}, - dictWord{142, 11, 20}, - dictWord{135, 0, 1614}, - dictWord{135, 10, 1757}, - dictWord{4, 0, 150}, - dictWord{5, 0, 303}, - dictWord{6, 0, 327}, - dictWord{135, 10, 937}, - dictWord{16, 0, 49}, - dictWord{7, 10, 1652}, - dictWord{144, 11, 49}, - dictWord{8, 0, 192}, - dictWord{10, 0, 78}, - dictWord{ - 141, - 0, - 359, - }, - dictWord{135, 0, 786}, - dictWord{143, 0, 134}, - dictWord{6, 0, 1638}, - dictWord{7, 0, 79}, - dictWord{7, 0, 496}, - dictWord{9, 0, 138}, - dictWord{ - 10, - 0, - 336, - }, - dictWord{11, 0, 12}, - dictWord{12, 0, 412}, - dictWord{12, 0, 440}, - dictWord{142, 0, 305}, - dictWord{136, 11, 491}, - dictWord{4, 10, 579}, - dictWord{ - 5, - 10, - 226, - }, - dictWord{5, 10, 323}, - dictWord{135, 10, 960}, - dictWord{7, 0, 204}, - dictWord{7, 0, 415}, - dictWord{8, 0, 42}, - dictWord{10, 0, 85}, - dictWord{139, 0, 564}, - dictWord{132, 0, 614}, - dictWord{4, 11, 403}, - dictWord{5, 11, 441}, - dictWord{7, 11, 450}, - dictWord{11, 11, 101}, - dictWord{12, 11, 193}, - dictWord{141, 11, 430}, - dictWord{135, 11, 1927}, - dictWord{135, 11, 1330}, - dictWord{4, 0, 3}, - dictWord{5, 0, 247}, - dictWord{5, 0, 644}, - dictWord{7, 0, 744}, - dictWord{7, 0, 1207}, - dictWord{7, 0, 1225}, - dictWord{7, 0, 1909}, - dictWord{146, 0, 147}, - dictWord{136, 0, 942}, - dictWord{4, 0, 1019}, - dictWord{134, 0, 2023}, - dictWord{5, 11, 679}, - dictWord{133, 10, 973}, - dictWord{5, 0, 285}, - dictWord{9, 0, 67}, - dictWord{13, 0, 473}, - dictWord{143, 0, 82}, - dictWord{7, 11, 328}, - dictWord{137, 11, 326}, - dictWord{151, 0, 8}, - dictWord{6, 10, 135}, - dictWord{135, 10, 1176}, - dictWord{135, 11, 1128}, - dictWord{134, 0, 1309}, - dictWord{135, 11, 1796}, - dictWord{ - 135, - 10, - 314, - }, - dictWord{4, 11, 574}, - dictWord{7, 11, 350}, - dictWord{7, 11, 1024}, - dictWord{8, 11, 338}, - dictWord{9, 11, 677}, - dictWord{10, 11, 808}, - dictWord{ - 139, - 11, - 508, - }, - dictWord{7, 11, 818}, - dictWord{17, 11, 14}, - dictWord{17, 11, 45}, - dictWord{18, 11, 75}, - dictWord{148, 11, 18}, - dictWord{146, 10, 4}, - dictWord{ - 135, - 11, - 1081, - }, - dictWord{4, 0, 29}, - dictWord{6, 0, 532}, - dictWord{7, 0, 1628}, - dictWord{7, 0, 1648}, - dictWord{9, 0, 350}, - dictWord{10, 0, 433}, - dictWord{11, 0, 97}, - dictWord{11, 0, 557}, - dictWord{11, 0, 745}, - dictWord{12, 0, 289}, - dictWord{12, 0, 335}, - dictWord{12, 0, 348}, - dictWord{12, 0, 606}, - dictWord{13, 0, 116}, - dictWord{13, 0, 233}, - dictWord{13, 0, 466}, - dictWord{14, 0, 181}, - dictWord{14, 0, 209}, - dictWord{14, 0, 232}, - dictWord{14, 0, 236}, - dictWord{14, 0, 300}, - dictWord{ - 16, - 0, - 41, - }, - dictWord{148, 0, 97}, - dictWord{7, 0, 318}, - dictWord{6, 10, 281}, - dictWord{8, 10, 282}, - dictWord{8, 10, 480}, - dictWord{8, 10, 499}, - dictWord{9, 10, 198}, - dictWord{10, 10, 143}, - dictWord{10, 10, 169}, - dictWord{10, 10, 211}, - dictWord{10, 10, 417}, - dictWord{10, 10, 574}, - dictWord{11, 10, 147}, - dictWord{ - 11, - 10, - 395, - }, - dictWord{12, 10, 75}, - dictWord{12, 10, 407}, - dictWord{12, 10, 608}, - dictWord{13, 10, 500}, - dictWord{142, 10, 251}, - dictWord{135, 11, 1676}, - dictWord{135, 11, 2037}, - dictWord{135, 0, 1692}, - dictWord{5, 0, 501}, - dictWord{7, 0, 1704}, - dictWord{9, 0, 553}, - dictWord{11, 0, 520}, - dictWord{12, 0, 557}, - dictWord{141, 0, 249}, - dictWord{6, 0, 1527}, - dictWord{14, 0, 324}, - dictWord{15, 0, 55}, - dictWord{15, 0, 80}, - dictWord{14, 11, 324}, - dictWord{15, 11, 55}, - dictWord{143, 11, 80}, - dictWord{135, 10, 1776}, - dictWord{8, 0, 988}, - dictWord{137, 11, 297}, - dictWord{132, 10, 419}, - dictWord{142, 0, 223}, - dictWord{ - 139, - 11, - 234, - }, - dictWord{7, 0, 1123}, - dictWord{12, 0, 508}, - dictWord{14, 0, 102}, - dictWord{14, 0, 226}, - dictWord{144, 0, 57}, - dictWord{4, 10, 138}, - dictWord{ - 7, - 10, - 1012, - }, - dictWord{7, 10, 1280}, - dictWord{137, 10, 76}, - dictWord{7, 0, 1764}, - dictWord{5, 10, 29}, - dictWord{140, 10, 638}, - dictWord{134, 0, 2015}, - dictWord{134, 0, 1599}, - dictWord{138, 11, 56}, - dictWord{6, 11, 306}, - dictWord{7, 11, 1140}, - dictWord{7, 11, 1340}, - dictWord{8, 11, 133}, - dictWord{ - 138, - 11, - 449, - }, - dictWord{139, 11, 1011}, - dictWord{6, 10, 1710}, - dictWord{135, 10, 2038}, - dictWord{7, 11, 1763}, - dictWord{140, 11, 310}, - dictWord{6, 0, 129}, - dictWord{4, 10, 17}, - dictWord{5, 10, 23}, - dictWord{7, 10, 995}, - dictWord{11, 10, 383}, - dictWord{11, 10, 437}, - dictWord{12, 10, 460}, - dictWord{140, 10, 532}, - dictWord{5, 11, 329}, - dictWord{136, 11, 260}, - dictWord{133, 10, 862}, - dictWord{132, 0, 534}, - dictWord{6, 0, 811}, - dictWord{135, 0, 626}, - dictWord{ - 132, - 11, - 657, - }, - dictWord{4, 0, 25}, - dictWord{5, 0, 60}, - dictWord{6, 0, 504}, - dictWord{7, 0, 614}, - dictWord{7, 0, 1155}, - dictWord{12, 0, 0}, - dictWord{152, 11, 7}, - dictWord{ - 7, - 0, - 1248, - }, - dictWord{11, 0, 621}, - dictWord{139, 0, 702}, - dictWord{137, 0, 321}, - dictWord{8, 10, 70}, - dictWord{12, 10, 171}, - dictWord{141, 10, 272}, - dictWord{ - 10, - 10, - 233, - }, - dictWord{139, 10, 76}, - dictWord{4, 0, 379}, - dictWord{7, 0, 1397}, - dictWord{134, 10, 442}, - dictWord{5, 11, 66}, - dictWord{7, 11, 1896}, - dictWord{ - 136, - 11, - 288, - }, - dictWord{134, 11, 1643}, - dictWord{134, 10, 1709}, - dictWord{4, 11, 21}, - dictWord{5, 11, 91}, - dictWord{5, 11, 570}, - dictWord{5, 11, 648}, - dictWord{5, 11, 750}, - dictWord{5, 11, 781}, - dictWord{6, 11, 54}, - dictWord{6, 11, 112}, - dictWord{6, 11, 402}, - dictWord{6, 11, 1732}, - dictWord{7, 11, 315}, - dictWord{ - 7, - 11, - 749, - }, - dictWord{7, 11, 1347}, - dictWord{7, 11, 1900}, - dictWord{9, 11, 78}, - dictWord{9, 11, 508}, - dictWord{10, 11, 611}, - dictWord{11, 11, 510}, - dictWord{ - 11, - 11, - 728, - }, - dictWord{13, 11, 36}, - dictWord{14, 11, 39}, - dictWord{16, 11, 83}, - dictWord{17, 11, 124}, - dictWord{148, 11, 30}, - dictWord{4, 0, 118}, - dictWord{ - 6, - 0, - 274, - }, - dictWord{6, 0, 361}, - dictWord{7, 0, 75}, - dictWord{141, 0, 441}, - dictWord{10, 11, 322}, - dictWord{10, 11, 719}, - dictWord{139, 11, 407}, - dictWord{ - 147, - 10, - 119, - }, - dictWord{12, 11, 549}, - dictWord{14, 11, 67}, - dictWord{147, 11, 60}, - dictWord{11, 10, 69}, - dictWord{12, 10, 105}, - dictWord{12, 10, 117}, - dictWord{13, 10, 213}, - dictWord{14, 10, 13}, - dictWord{14, 10, 62}, - dictWord{14, 10, 177}, - dictWord{14, 10, 421}, - dictWord{15, 10, 19}, - dictWord{146, 10, 141}, - dictWord{9, 0, 841}, - dictWord{137, 10, 309}, - dictWord{7, 10, 608}, - dictWord{7, 10, 976}, - dictWord{8, 11, 125}, - dictWord{8, 11, 369}, - dictWord{8, 11, 524}, - dictWord{9, 10, 146}, - dictWord{10, 10, 206}, - dictWord{10, 11, 486}, - dictWord{10, 10, 596}, - dictWord{11, 11, 13}, - dictWord{11, 11, 381}, - dictWord{11, 11, 736}, - dictWord{11, 11, 766}, - dictWord{11, 11, 845}, - dictWord{13, 11, 114}, - dictWord{13, 10, 218}, - dictWord{13, 11, 292}, - dictWord{14, 11, 47}, - dictWord{ - 142, - 10, - 153, - }, - dictWord{12, 0, 693}, - dictWord{135, 11, 759}, - dictWord{5, 0, 314}, - dictWord{6, 0, 221}, - dictWord{7, 0, 419}, - dictWord{10, 0, 650}, - dictWord{11, 0, 396}, - dictWord{12, 0, 156}, - dictWord{13, 0, 369}, - dictWord{14, 0, 333}, - dictWord{145, 0, 47}, - dictWord{6, 11, 1684}, - dictWord{6, 11, 1731}, - dictWord{7, 11, 356}, - dictWord{7, 11, 1932}, - dictWord{8, 11, 54}, - dictWord{8, 11, 221}, - dictWord{9, 11, 225}, - dictWord{9, 11, 356}, - dictWord{10, 11, 77}, - dictWord{10, 11, 446}, - dictWord{10, 11, 731}, - dictWord{12, 11, 404}, - dictWord{141, 11, 491}, - dictWord{132, 11, 375}, - dictWord{4, 10, 518}, - dictWord{135, 10, 1136}, - dictWord{ - 4, - 0, - 913, - }, - dictWord{4, 11, 411}, - dictWord{11, 11, 643}, - dictWord{140, 11, 115}, - dictWord{4, 11, 80}, - dictWord{133, 11, 44}, - dictWord{8, 10, 689}, - dictWord{ - 137, - 10, - 863, - }, - dictWord{138, 0, 880}, - dictWord{4, 10, 18}, - dictWord{7, 10, 145}, - dictWord{7, 10, 444}, - dictWord{7, 10, 1278}, - dictWord{8, 10, 49}, - dictWord{ - 8, - 10, - 400, - }, - dictWord{9, 10, 71}, - dictWord{9, 10, 250}, - dictWord{10, 10, 459}, - dictWord{12, 10, 160}, - dictWord{144, 10, 24}, - dictWord{136, 0, 475}, - dictWord{ - 5, - 0, - 1016, - }, - dictWord{5, 11, 299}, - dictWord{135, 11, 1083}, - dictWord{7, 0, 602}, - dictWord{8, 0, 179}, - dictWord{10, 0, 781}, - dictWord{140, 0, 126}, - dictWord{ - 6, - 0, - 329, - }, - dictWord{138, 0, 111}, - dictWord{135, 0, 1864}, - dictWord{4, 11, 219}, - dictWord{7, 11, 1761}, - dictWord{137, 11, 86}, - dictWord{6, 0, 1888}, - dictWord{ - 6, - 0, - 1892, - }, - dictWord{6, 0, 1901}, - dictWord{6, 0, 1904}, - dictWord{9, 0, 953}, - dictWord{9, 0, 985}, - dictWord{9, 0, 991}, - dictWord{9, 0, 1001}, - dictWord{12, 0, 818}, - dictWord{12, 0, 846}, - dictWord{12, 0, 847}, - dictWord{12, 0, 861}, - dictWord{12, 0, 862}, - dictWord{12, 0, 873}, - dictWord{12, 0, 875}, - dictWord{12, 0, 877}, - dictWord{12, 0, 879}, - dictWord{12, 0, 881}, - dictWord{12, 0, 884}, - dictWord{12, 0, 903}, - dictWord{12, 0, 915}, - dictWord{12, 0, 926}, - dictWord{12, 0, 939}, - dictWord{ - 15, - 0, - 182, - }, - dictWord{15, 0, 219}, - dictWord{15, 0, 255}, - dictWord{18, 0, 191}, - dictWord{18, 0, 209}, - dictWord{18, 0, 211}, - dictWord{149, 0, 41}, - dictWord{ - 5, - 11, - 328, - }, - dictWord{135, 11, 918}, - dictWord{137, 0, 780}, - dictWord{12, 0, 82}, - dictWord{143, 0, 36}, - dictWord{133, 10, 1010}, - dictWord{5, 0, 821}, - dictWord{ - 134, - 0, - 1687, - }, - dictWord{133, 11, 514}, - dictWord{132, 0, 956}, - dictWord{134, 0, 1180}, - dictWord{10, 0, 112}, - dictWord{5, 10, 87}, - dictWord{7, 10, 313}, - dictWord{ - 7, - 10, - 1103, - }, - dictWord{10, 10, 582}, - dictWord{11, 10, 389}, - dictWord{11, 10, 813}, - dictWord{12, 10, 385}, - dictWord{13, 10, 286}, - dictWord{14, 10, 124}, - dictWord{146, 10, 108}, - dictWord{5, 0, 71}, - dictWord{7, 0, 1407}, - dictWord{9, 0, 704}, - dictWord{10, 0, 261}, - dictWord{10, 0, 619}, - dictWord{11, 0, 547}, - dictWord{11, 0, 619}, - dictWord{143, 0, 157}, - dictWord{4, 0, 531}, - dictWord{5, 0, 455}, - dictWord{5, 11, 301}, - dictWord{6, 11, 571}, - dictWord{14, 11, 49}, - dictWord{ - 146, - 11, - 102, - }, - dictWord{132, 10, 267}, - dictWord{6, 0, 385}, - dictWord{7, 0, 2008}, - dictWord{9, 0, 337}, - dictWord{138, 0, 517}, - dictWord{133, 11, 726}, - dictWord{133, 11, 364}, - dictWord{4, 11, 76}, - dictWord{7, 11, 1550}, - dictWord{9, 11, 306}, - dictWord{9, 11, 430}, - dictWord{9, 11, 663}, - dictWord{10, 11, 683}, - dictWord{11, 11, 427}, - dictWord{11, 11, 753}, - dictWord{12, 11, 334}, - dictWord{12, 11, 442}, - dictWord{14, 11, 258}, - dictWord{14, 11, 366}, - dictWord{ - 143, - 11, - 131, - }, - dictWord{6, 0, 1865}, - dictWord{6, 0, 1879}, - dictWord{6, 0, 1881}, - dictWord{6, 0, 1894}, - dictWord{6, 0, 1908}, - dictWord{9, 0, 915}, - dictWord{9, 0, 926}, - dictWord{9, 0, 940}, - dictWord{9, 0, 943}, - dictWord{9, 0, 966}, - dictWord{9, 0, 980}, - dictWord{9, 0, 989}, - dictWord{9, 0, 1005}, - dictWord{9, 0, 1010}, - dictWord{ - 12, - 0, - 813, - }, - dictWord{12, 0, 817}, - dictWord{12, 0, 840}, - dictWord{12, 0, 843}, - dictWord{12, 0, 855}, - dictWord{12, 0, 864}, - dictWord{12, 0, 871}, - dictWord{12, 0, 872}, - dictWord{12, 0, 899}, - dictWord{12, 0, 905}, - dictWord{12, 0, 924}, - dictWord{15, 0, 171}, - dictWord{15, 0, 181}, - dictWord{15, 0, 224}, - dictWord{15, 0, 235}, - dictWord{15, 0, 251}, - dictWord{146, 0, 184}, - dictWord{137, 11, 52}, - dictWord{5, 0, 16}, - dictWord{6, 0, 86}, - dictWord{6, 0, 603}, - dictWord{7, 0, 292}, - dictWord{7, 0, 561}, - dictWord{8, 0, 257}, - dictWord{8, 0, 382}, - dictWord{9, 0, 721}, - dictWord{9, 0, 778}, - dictWord{11, 0, 581}, - dictWord{140, 0, 466}, - dictWord{4, 0, 486}, - dictWord{ - 5, - 0, - 491, - }, - dictWord{135, 10, 1121}, - dictWord{4, 0, 72}, - dictWord{6, 0, 265}, - dictWord{135, 0, 1300}, - dictWord{135, 11, 1183}, - dictWord{10, 10, 249}, - dictWord{139, 10, 209}, - dictWord{132, 10, 561}, - dictWord{137, 11, 519}, - dictWord{4, 11, 656}, - dictWord{4, 10, 760}, - dictWord{135, 11, 779}, - dictWord{ - 9, - 10, - 154, - }, - dictWord{140, 10, 485}, - dictWord{135, 11, 1793}, - dictWord{135, 11, 144}, - dictWord{136, 10, 255}, - dictWord{133, 0, 621}, - dictWord{4, 10, 368}, - dictWord{135, 10, 641}, - dictWord{135, 11, 1373}, - dictWord{7, 11, 554}, - dictWord{7, 11, 605}, - dictWord{141, 11, 10}, - dictWord{137, 0, 234}, - dictWord{ - 5, - 0, - 815, - }, - dictWord{6, 0, 1688}, - dictWord{134, 0, 1755}, - dictWord{5, 11, 838}, - dictWord{5, 11, 841}, - dictWord{134, 11, 1649}, - dictWord{7, 0, 1987}, - dictWord{ - 7, - 0, - 2040, - }, - dictWord{136, 0, 743}, - dictWord{133, 11, 1012}, - dictWord{6, 0, 197}, - dictWord{136, 0, 205}, - dictWord{6, 0, 314}, - dictWord{134, 11, 314}, - dictWord{144, 11, 53}, - dictWord{6, 11, 251}, - dictWord{7, 11, 365}, - dictWord{7, 11, 1357}, - dictWord{7, 11, 1497}, - dictWord{8, 11, 154}, - dictWord{141, 11, 281}, - dictWord{133, 11, 340}, - dictWord{6, 0, 452}, - dictWord{7, 0, 312}, - dictWord{138, 0, 219}, - dictWord{138, 0, 589}, - dictWord{4, 0, 333}, - dictWord{9, 0, 176}, - dictWord{12, 0, 353}, - dictWord{141, 0, 187}, - dictWord{9, 10, 92}, - dictWord{147, 10, 91}, - dictWord{134, 0, 1110}, - dictWord{11, 0, 47}, - dictWord{139, 11, 495}, - dictWord{6, 10, 525}, - dictWord{8, 10, 806}, - dictWord{9, 10, 876}, - dictWord{140, 10, 284}, - dictWord{8, 11, 261}, - dictWord{9, 11, 144}, - dictWord{9, 11, 466}, - dictWord{10, 11, 370}, - dictWord{12, 11, 470}, - dictWord{13, 11, 144}, - dictWord{142, 11, 348}, - dictWord{137, 11, 897}, - dictWord{8, 0, 863}, - dictWord{8, 0, 864}, - dictWord{8, 0, 868}, - dictWord{8, 0, 884}, - dictWord{10, 0, 866}, - dictWord{10, 0, 868}, - dictWord{10, 0, 873}, - dictWord{10, 0, 911}, - dictWord{10, 0, 912}, - dictWord{ - 10, - 0, - 944, - }, - dictWord{12, 0, 727}, - dictWord{6, 11, 248}, - dictWord{9, 11, 546}, - dictWord{10, 11, 535}, - dictWord{11, 11, 681}, - dictWord{141, 11, 135}, - dictWord{ - 6, - 0, - 300, - }, - dictWord{135, 0, 1515}, - dictWord{134, 0, 1237}, - dictWord{139, 10, 958}, - dictWord{133, 10, 594}, - dictWord{140, 11, 250}, - dictWord{ - 134, - 0, - 1685, - }, - dictWord{134, 11, 567}, - dictWord{7, 0, 135}, - dictWord{8, 0, 7}, - dictWord{8, 0, 62}, - dictWord{9, 0, 243}, - dictWord{10, 0, 658}, - dictWord{10, 0, 697}, - dictWord{11, 0, 456}, - dictWord{139, 0, 756}, - dictWord{9, 0, 395}, - dictWord{138, 0, 79}, - dictWord{6, 10, 1641}, - dictWord{136, 10, 820}, - dictWord{4, 10, 302}, - dictWord{135, 10, 1766}, - dictWord{134, 11, 174}, - dictWord{135, 10, 1313}, - dictWord{135, 0, 631}, - dictWord{134, 10, 1674}, - dictWord{134, 11, 395}, - dictWord{138, 0, 835}, - dictWord{7, 0, 406}, - dictWord{7, 0, 459}, - dictWord{8, 0, 606}, - dictWord{139, 0, 726}, - dictWord{134, 11, 617}, - dictWord{134, 0, 979}, - dictWord{ - 6, - 10, - 389, - }, - dictWord{7, 10, 149}, - dictWord{9, 10, 142}, - dictWord{138, 10, 94}, - dictWord{5, 11, 878}, - dictWord{133, 11, 972}, - dictWord{6, 10, 8}, - dictWord{ - 7, - 10, - 1881, - }, - dictWord{8, 10, 91}, - dictWord{136, 11, 511}, - dictWord{133, 0, 612}, - dictWord{132, 11, 351}, - dictWord{4, 0, 372}, - dictWord{7, 0, 482}, - dictWord{ - 8, - 0, - 158, - }, - dictWord{9, 0, 602}, - dictWord{9, 0, 615}, - dictWord{10, 0, 245}, - dictWord{10, 0, 678}, - dictWord{10, 0, 744}, - dictWord{11, 0, 248}, - dictWord{ - 139, - 0, - 806, - }, - dictWord{5, 0, 854}, - dictWord{135, 0, 1991}, - dictWord{132, 11, 286}, - dictWord{135, 11, 344}, - dictWord{7, 11, 438}, - dictWord{7, 11, 627}, - dictWord{ - 7, - 11, - 1516, - }, - dictWord{8, 11, 40}, - dictWord{9, 11, 56}, - dictWord{9, 11, 294}, - dictWord{10, 11, 30}, - dictWord{10, 11, 259}, - dictWord{11, 11, 969}, - dictWord{ - 146, - 11, - 148, - }, - dictWord{135, 0, 1492}, - dictWord{5, 11, 259}, - dictWord{7, 11, 414}, - dictWord{7, 11, 854}, - dictWord{142, 11, 107}, - dictWord{135, 10, 1746}, - dictWord{6, 0, 833}, - dictWord{134, 0, 998}, - dictWord{135, 10, 24}, - dictWord{6, 0, 750}, - dictWord{135, 0, 1739}, - dictWord{4, 10, 503}, - dictWord{ - 135, - 10, - 1661, - }, - dictWord{5, 10, 130}, - dictWord{7, 10, 1314}, - dictWord{9, 10, 610}, - dictWord{10, 10, 718}, - dictWord{11, 10, 601}, - dictWord{11, 10, 819}, - dictWord{ - 11, - 10, - 946, - }, - dictWord{140, 10, 536}, - dictWord{10, 10, 149}, - dictWord{11, 10, 280}, - dictWord{142, 10, 336}, - dictWord{132, 11, 738}, - dictWord{ - 135, - 10, - 1946, - }, - dictWord{5, 0, 195}, - dictWord{135, 0, 1685}, - dictWord{7, 0, 1997}, - dictWord{8, 0, 730}, - dictWord{139, 0, 1006}, - dictWord{151, 11, 17}, - dictWord{ - 133, - 11, - 866, - }, - dictWord{14, 0, 463}, - dictWord{14, 0, 470}, - dictWord{150, 0, 61}, - dictWord{5, 0, 751}, - dictWord{8, 0, 266}, - dictWord{11, 0, 578}, - dictWord{ - 4, - 10, - 392, - }, - dictWord{135, 10, 1597}, - dictWord{5, 10, 433}, - dictWord{9, 10, 633}, - dictWord{139, 10, 629}, - dictWord{135, 0, 821}, - dictWord{6, 0, 715}, - dictWord{ - 134, - 0, - 1325, - }, - dictWord{133, 11, 116}, - dictWord{6, 0, 868}, - dictWord{132, 11, 457}, - dictWord{134, 0, 959}, - dictWord{6, 10, 234}, - dictWord{138, 11, 199}, - dictWord{7, 0, 1053}, - dictWord{7, 10, 1950}, - dictWord{8, 10, 680}, - dictWord{11, 10, 817}, - dictWord{147, 10, 88}, - dictWord{7, 10, 1222}, - dictWord{ - 138, - 10, - 386, - }, - dictWord{5, 0, 950}, - dictWord{5, 0, 994}, - dictWord{6, 0, 351}, - dictWord{134, 0, 1124}, - dictWord{134, 0, 1081}, - dictWord{7, 0, 1595}, - dictWord{6, 10, 5}, - dictWord{11, 10, 249}, - dictWord{12, 10, 313}, - dictWord{16, 10, 66}, - dictWord{145, 10, 26}, - dictWord{148, 0, 59}, - dictWord{5, 11, 527}, - dictWord{6, 11, 189}, - dictWord{135, 11, 859}, - dictWord{5, 10, 963}, - dictWord{6, 10, 1773}, - dictWord{11, 11, 104}, - dictWord{11, 11, 554}, - dictWord{15, 11, 60}, - dictWord{ - 143, - 11, - 125, - }, - dictWord{135, 0, 47}, - dictWord{137, 0, 684}, - dictWord{134, 11, 116}, - dictWord{134, 0, 1606}, - dictWord{134, 0, 777}, - dictWord{7, 0, 1020}, - dictWord{ - 8, - 10, - 509, - }, - dictWord{136, 10, 792}, - dictWord{135, 0, 1094}, - dictWord{132, 0, 350}, - dictWord{133, 11, 487}, - dictWord{4, 11, 86}, - dictWord{5, 11, 667}, - dictWord{5, 11, 753}, - dictWord{6, 11, 316}, - dictWord{6, 11, 455}, - dictWord{135, 11, 946}, - dictWord{7, 0, 1812}, - dictWord{13, 0, 259}, - dictWord{13, 0, 356}, - dictWord{14, 0, 242}, - dictWord{147, 0, 114}, - dictWord{132, 10, 931}, - dictWord{133, 0, 967}, - dictWord{4, 0, 473}, - dictWord{7, 0, 623}, - dictWord{8, 0, 808}, - dictWord{ - 9, - 0, - 871, - }, - dictWord{9, 0, 893}, - dictWord{11, 0, 38}, - dictWord{11, 0, 431}, - dictWord{12, 0, 112}, - dictWord{12, 0, 217}, - dictWord{12, 0, 243}, - dictWord{12, 0, 562}, - dictWord{12, 0, 663}, - dictWord{12, 0, 683}, - dictWord{13, 0, 141}, - dictWord{13, 0, 197}, - dictWord{13, 0, 227}, - dictWord{13, 0, 406}, - dictWord{13, 0, 487}, - dictWord{14, 0, 156}, - dictWord{14, 0, 203}, - dictWord{14, 0, 224}, - dictWord{14, 0, 256}, - dictWord{18, 0, 58}, - dictWord{150, 0, 0}, - dictWord{138, 0, 286}, - dictWord{ - 7, - 10, - 943, - }, - dictWord{139, 10, 614}, - dictWord{135, 10, 1837}, - dictWord{150, 11, 45}, - dictWord{132, 0, 798}, - dictWord{4, 0, 222}, - dictWord{7, 0, 286}, - dictWord{136, 0, 629}, - dictWord{4, 11, 79}, - dictWord{7, 11, 1773}, - dictWord{10, 11, 450}, - dictWord{11, 11, 589}, - dictWord{13, 11, 332}, - dictWord{13, 11, 493}, - dictWord{14, 11, 183}, - dictWord{14, 11, 334}, - dictWord{14, 11, 362}, - dictWord{14, 11, 368}, - dictWord{14, 11, 376}, - dictWord{14, 11, 379}, - dictWord{ - 19, - 11, - 90, - }, - dictWord{19, 11, 103}, - dictWord{19, 11, 127}, - dictWord{148, 11, 90}, - dictWord{5, 0, 337}, - dictWord{11, 0, 513}, - dictWord{11, 0, 889}, - dictWord{ - 11, - 0, - 961, - }, - dictWord{12, 0, 461}, - dictWord{13, 0, 79}, - dictWord{15, 0, 121}, - dictWord{4, 10, 90}, - dictWord{5, 10, 545}, - dictWord{7, 10, 754}, - dictWord{9, 10, 186}, - dictWord{10, 10, 72}, - dictWord{10, 10, 782}, - dictWord{11, 10, 577}, - dictWord{11, 10, 610}, - dictWord{12, 10, 354}, - dictWord{12, 10, 362}, - dictWord{ - 140, - 10, - 595, - }, - dictWord{141, 0, 306}, - dictWord{136, 0, 146}, - dictWord{7, 0, 1646}, - dictWord{9, 10, 329}, - dictWord{11, 10, 254}, - dictWord{141, 11, 124}, - dictWord{ - 4, - 0, - 465, - }, - dictWord{135, 0, 1663}, - dictWord{132, 0, 525}, - dictWord{133, 11, 663}, - dictWord{10, 0, 299}, - dictWord{18, 0, 74}, - dictWord{9, 10, 187}, - dictWord{ - 11, - 10, - 1016, - }, - dictWord{145, 10, 44}, - dictWord{7, 0, 165}, - dictWord{7, 0, 919}, - dictWord{4, 10, 506}, - dictWord{136, 10, 517}, - dictWord{5, 10, 295}, - dictWord{ - 135, - 10, - 1680, - }, - dictWord{133, 11, 846}, - dictWord{134, 0, 1064}, - dictWord{5, 11, 378}, - dictWord{7, 11, 1402}, - dictWord{7, 11, 1414}, - dictWord{8, 11, 465}, - dictWord{9, 11, 286}, - dictWord{10, 11, 185}, - dictWord{10, 11, 562}, - dictWord{10, 11, 635}, - dictWord{11, 11, 31}, - dictWord{11, 11, 393}, - dictWord{ - 12, - 11, - 456, - }, - dictWord{13, 11, 312}, - dictWord{18, 11, 65}, - dictWord{18, 11, 96}, - dictWord{147, 11, 89}, - dictWord{132, 0, 596}, - dictWord{7, 10, 987}, - dictWord{ - 9, - 10, - 688, - }, - dictWord{10, 10, 522}, - dictWord{11, 10, 788}, - dictWord{140, 10, 566}, - dictWord{6, 0, 82}, - dictWord{7, 0, 138}, - dictWord{7, 0, 517}, - dictWord{7, 0, 1741}, - dictWord{11, 0, 238}, - dictWord{4, 11, 648}, - dictWord{134, 10, 1775}, - dictWord{7, 0, 1233}, - dictWord{7, 10, 700}, - dictWord{7, 10, 940}, - dictWord{8, 10, 514}, - dictWord{9, 10, 116}, - dictWord{9, 10, 535}, - dictWord{10, 10, 118}, - dictWord{11, 10, 107}, - dictWord{11, 10, 148}, - dictWord{11, 10, 922}, - dictWord{ - 12, - 10, - 254, - }, - dictWord{12, 10, 421}, - dictWord{142, 10, 238}, - dictWord{4, 0, 962}, - dictWord{6, 0, 1824}, - dictWord{8, 0, 894}, - dictWord{12, 0, 708}, - dictWord{ - 12, - 0, - 725, - }, - dictWord{14, 0, 451}, - dictWord{20, 0, 94}, - dictWord{22, 0, 59}, - dictWord{150, 0, 62}, - dictWord{5, 11, 945}, - dictWord{6, 11, 1656}, - dictWord{6, 11, 1787}, - dictWord{7, 11, 167}, - dictWord{8, 11, 824}, - dictWord{9, 11, 391}, - dictWord{10, 11, 375}, - dictWord{139, 11, 185}, - dictWord{5, 0, 495}, - dictWord{7, 0, 834}, - dictWord{9, 0, 733}, - dictWord{139, 0, 378}, - dictWord{4, 10, 743}, - dictWord{135, 11, 1273}, - dictWord{6, 0, 1204}, - dictWord{7, 11, 1645}, - dictWord{8, 11, 352}, - dictWord{137, 11, 249}, - dictWord{139, 10, 292}, - dictWord{133, 0, 559}, - dictWord{132, 11, 152}, - dictWord{9, 0, 499}, - dictWord{10, 0, 341}, - dictWord{ - 15, - 0, - 144, - }, - dictWord{19, 0, 49}, - dictWord{7, 10, 1283}, - dictWord{9, 10, 227}, - dictWord{11, 10, 325}, - dictWord{11, 10, 408}, - dictWord{14, 10, 180}, - dictWord{ - 146, - 10, - 47, - }, - dictWord{6, 0, 21}, - dictWord{6, 0, 1737}, - dictWord{7, 0, 1444}, - dictWord{136, 0, 224}, - dictWord{133, 11, 1006}, - dictWord{7, 0, 1446}, - dictWord{ - 9, - 0, - 97, - }, - dictWord{17, 0, 15}, - dictWord{5, 10, 81}, - dictWord{7, 10, 146}, - dictWord{7, 10, 1342}, - dictWord{8, 10, 53}, - dictWord{8, 10, 561}, - dictWord{8, 10, 694}, - dictWord{8, 10, 754}, - dictWord{9, 10, 115}, - dictWord{9, 10, 894}, - dictWord{10, 10, 462}, - dictWord{10, 10, 813}, - dictWord{11, 10, 230}, - dictWord{11, 10, 657}, - dictWord{11, 10, 699}, - dictWord{11, 10, 748}, - dictWord{12, 10, 119}, - dictWord{12, 10, 200}, - dictWord{12, 10, 283}, - dictWord{142, 10, 273}, - dictWord{ - 5, - 10, - 408, - }, - dictWord{137, 10, 747}, - dictWord{135, 11, 431}, - dictWord{135, 11, 832}, - dictWord{6, 0, 729}, - dictWord{134, 0, 953}, - dictWord{4, 0, 727}, - dictWord{ - 8, - 0, - 565, - }, - dictWord{5, 11, 351}, - dictWord{7, 11, 264}, - dictWord{136, 11, 565}, - dictWord{134, 0, 1948}, - dictWord{5, 0, 519}, - dictWord{5, 11, 40}, - dictWord{ - 7, - 11, - 598, - }, - dictWord{7, 11, 1638}, - dictWord{8, 11, 78}, - dictWord{9, 11, 166}, - dictWord{9, 11, 640}, - dictWord{9, 11, 685}, - dictWord{9, 11, 773}, - dictWord{ - 11, - 11, - 215, - }, - dictWord{13, 11, 65}, - dictWord{14, 11, 172}, - dictWord{14, 11, 317}, - dictWord{145, 11, 6}, - dictWord{8, 11, 60}, - dictWord{9, 11, 343}, - dictWord{ - 139, - 11, - 769, - }, - dictWord{137, 11, 455}, - dictWord{134, 0, 1193}, - dictWord{140, 0, 790}, - dictWord{7, 11, 1951}, - dictWord{8, 11, 765}, - dictWord{8, 11, 772}, - dictWord{140, 11, 671}, - dictWord{7, 11, 108}, - dictWord{8, 11, 219}, - dictWord{8, 11, 388}, - dictWord{9, 11, 639}, - dictWord{9, 11, 775}, - dictWord{11, 11, 275}, - dictWord{140, 11, 464}, - dictWord{132, 11, 468}, - dictWord{7, 10, 30}, - dictWord{8, 10, 86}, - dictWord{8, 10, 315}, - dictWord{8, 10, 700}, - dictWord{9, 10, 576}, - dictWord{ - 9, - 10, - 858, - }, - dictWord{11, 10, 310}, - dictWord{11, 10, 888}, - dictWord{11, 10, 904}, - dictWord{12, 10, 361}, - dictWord{141, 10, 248}, - dictWord{5, 11, 15}, - dictWord{6, 11, 56}, - dictWord{7, 11, 1758}, - dictWord{8, 11, 500}, - dictWord{9, 11, 730}, - dictWord{11, 11, 331}, - dictWord{13, 11, 150}, - dictWord{142, 11, 282}, - dictWord{4, 0, 402}, - dictWord{7, 0, 2}, - dictWord{8, 0, 323}, - dictWord{136, 0, 479}, - dictWord{138, 10, 839}, - dictWord{11, 0, 580}, - dictWord{142, 0, 201}, - dictWord{ - 5, - 0, - 59, - }, - dictWord{135, 0, 672}, - dictWord{137, 10, 617}, - dictWord{146, 0, 34}, - dictWord{134, 11, 1886}, - dictWord{4, 0, 961}, - dictWord{136, 0, 896}, - dictWord{ - 6, - 0, - 1285, - }, - dictWord{5, 11, 205}, - dictWord{6, 11, 438}, - dictWord{137, 11, 711}, - dictWord{134, 10, 428}, - dictWord{7, 10, 524}, - dictWord{8, 10, 169}, - dictWord{8, 10, 234}, - dictWord{9, 10, 480}, - dictWord{138, 10, 646}, - dictWord{148, 0, 46}, - dictWord{141, 0, 479}, - dictWord{133, 11, 534}, - dictWord{6, 0, 2019}, - dictWord{134, 10, 1648}, - dictWord{4, 0, 85}, - dictWord{7, 0, 549}, - dictWord{7, 10, 1205}, - dictWord{138, 10, 637}, - dictWord{4, 0, 663}, - dictWord{5, 0, 94}, - dictWord{ - 7, - 11, - 235, - }, - dictWord{7, 11, 1475}, - dictWord{15, 11, 68}, - dictWord{146, 11, 120}, - dictWord{6, 11, 443}, - dictWord{9, 11, 237}, - dictWord{9, 11, 571}, - dictWord{ - 9, - 11, - 695, - }, - dictWord{10, 11, 139}, - dictWord{11, 11, 715}, - dictWord{12, 11, 417}, - dictWord{141, 11, 421}, - dictWord{132, 0, 783}, - dictWord{4, 0, 682}, - dictWord{8, 0, 65}, - dictWord{9, 10, 39}, - dictWord{10, 10, 166}, - dictWord{11, 10, 918}, - dictWord{12, 10, 635}, - dictWord{20, 10, 10}, - dictWord{22, 10, 27}, - dictWord{ - 22, - 10, - 43, - }, - dictWord{150, 10, 52}, - dictWord{6, 0, 11}, - dictWord{135, 0, 187}, - dictWord{132, 0, 522}, - dictWord{4, 0, 52}, - dictWord{135, 0, 661}, - dictWord{ - 4, - 0, - 383, - }, - dictWord{133, 0, 520}, - dictWord{135, 11, 546}, - dictWord{11, 0, 343}, - dictWord{142, 0, 127}, - dictWord{4, 11, 578}, - dictWord{7, 10, 157}, - dictWord{ - 7, - 11, - 624, - }, - dictWord{7, 11, 916}, - dictWord{8, 10, 279}, - dictWord{10, 11, 256}, - dictWord{11, 11, 87}, - dictWord{139, 11, 703}, - dictWord{134, 10, 604}, - dictWord{ - 4, - 0, - 281, - }, - dictWord{5, 0, 38}, - dictWord{7, 0, 194}, - dictWord{7, 0, 668}, - dictWord{7, 0, 1893}, - dictWord{137, 0, 397}, - dictWord{7, 10, 945}, - dictWord{11, 10, 713}, - dictWord{139, 10, 744}, - dictWord{139, 10, 1022}, - dictWord{9, 0, 635}, - dictWord{139, 0, 559}, - dictWord{5, 11, 923}, - dictWord{7, 11, 490}, - dictWord{ - 12, - 11, - 553, - }, - dictWord{13, 11, 100}, - dictWord{14, 11, 118}, - dictWord{143, 11, 75}, - dictWord{132, 0, 975}, - dictWord{132, 10, 567}, - dictWord{137, 10, 859}, - dictWord{7, 10, 1846}, - dictWord{7, 11, 1846}, - dictWord{8, 10, 628}, - dictWord{136, 11, 628}, - dictWord{148, 0, 116}, - dictWord{138, 11, 750}, - dictWord{14, 0, 51}, - dictWord{14, 11, 51}, - dictWord{15, 11, 7}, - dictWord{148, 11, 20}, - dictWord{132, 0, 858}, - dictWord{134, 0, 1075}, - dictWord{4, 11, 924}, - dictWord{ - 133, - 10, - 762, - }, - dictWord{136, 0, 535}, - dictWord{133, 0, 448}, - dictWord{10, 10, 784}, - dictWord{141, 10, 191}, - dictWord{133, 10, 298}, - dictWord{7, 0, 610}, - dictWord{135, 0, 1501}, - dictWord{7, 10, 633}, - dictWord{7, 10, 905}, - dictWord{7, 10, 909}, - dictWord{7, 10, 1538}, - dictWord{9, 10, 767}, - dictWord{140, 10, 636}, - dictWord{4, 11, 265}, - dictWord{7, 11, 807}, - dictWord{135, 11, 950}, - dictWord{5, 11, 93}, - dictWord{12, 11, 267}, - dictWord{144, 11, 26}, - dictWord{136, 0, 191}, - dictWord{139, 10, 301}, - dictWord{135, 10, 1970}, - dictWord{135, 0, 267}, - dictWord{4, 0, 319}, - dictWord{5, 0, 699}, - dictWord{138, 0, 673}, - dictWord{ - 6, - 0, - 336, - }, - dictWord{7, 0, 92}, - dictWord{7, 0, 182}, - dictWord{8, 0, 453}, - dictWord{8, 0, 552}, - dictWord{9, 0, 204}, - dictWord{9, 0, 285}, - dictWord{10, 0, 99}, - dictWord{ - 11, - 0, - 568, - }, - dictWord{11, 0, 950}, - dictWord{12, 0, 94}, - dictWord{16, 0, 20}, - dictWord{16, 0, 70}, - dictWord{19, 0, 55}, - dictWord{12, 10, 644}, - dictWord{144, 10, 90}, - dictWord{6, 0, 551}, - dictWord{7, 0, 1308}, - dictWord{7, 10, 845}, - dictWord{7, 11, 994}, - dictWord{8, 10, 160}, - dictWord{137, 10, 318}, - dictWord{19, 11, 1}, - dictWord{ - 19, - 11, - 26, - }, - dictWord{150, 11, 9}, - dictWord{7, 0, 1406}, - dictWord{9, 0, 218}, - dictWord{141, 0, 222}, - dictWord{5, 0, 256}, - dictWord{138, 0, 69}, - dictWord{ - 5, - 11, - 233, - }, - dictWord{5, 11, 320}, - dictWord{6, 11, 140}, - dictWord{7, 11, 330}, - dictWord{136, 11, 295}, - dictWord{6, 0, 1980}, - dictWord{136, 0, 952}, - dictWord{ - 4, - 0, - 833, - }, - dictWord{137, 11, 678}, - dictWord{133, 11, 978}, - dictWord{4, 11, 905}, - dictWord{6, 11, 1701}, - dictWord{137, 11, 843}, - dictWord{138, 10, 735}, - dictWord{136, 10, 76}, - dictWord{17, 0, 39}, - dictWord{148, 0, 36}, - dictWord{18, 0, 81}, - dictWord{146, 11, 81}, - dictWord{14, 0, 352}, - dictWord{17, 0, 53}, - dictWord{ - 18, - 0, - 146, - }, - dictWord{18, 0, 152}, - dictWord{19, 0, 11}, - dictWord{150, 0, 54}, - dictWord{135, 0, 634}, - dictWord{138, 10, 841}, - dictWord{132, 0, 618}, - dictWord{ - 4, - 0, - 339, - }, - dictWord{7, 0, 259}, - dictWord{17, 0, 73}, - dictWord{4, 11, 275}, - dictWord{140, 11, 376}, - dictWord{132, 11, 509}, - dictWord{7, 11, 273}, - dictWord{ - 139, - 11, - 377, - }, - dictWord{4, 0, 759}, - dictWord{13, 0, 169}, - dictWord{137, 10, 804}, - dictWord{6, 10, 96}, - dictWord{135, 10, 1426}, - dictWord{4, 10, 651}, - dictWord{133, 10, 289}, - dictWord{7, 0, 1075}, - dictWord{8, 10, 35}, - dictWord{9, 10, 511}, - dictWord{10, 10, 767}, - dictWord{147, 10, 118}, - dictWord{6, 0, 649}, - dictWord{6, 0, 670}, - dictWord{136, 0, 482}, - dictWord{5, 0, 336}, - dictWord{6, 0, 341}, - dictWord{6, 0, 478}, - dictWord{6, 0, 1763}, - dictWord{136, 0, 386}, - dictWord{ - 5, - 11, - 802, - }, - dictWord{7, 11, 2021}, - dictWord{8, 11, 805}, - dictWord{14, 11, 94}, - dictWord{15, 11, 65}, - dictWord{16, 11, 4}, - dictWord{16, 11, 77}, - dictWord{16, 11, 80}, - dictWord{145, 11, 5}, - dictWord{6, 0, 1035}, - dictWord{5, 11, 167}, - dictWord{5, 11, 899}, - dictWord{6, 11, 410}, - dictWord{137, 11, 777}, - dictWord{ - 134, - 11, - 1705, - }, - dictWord{5, 0, 924}, - dictWord{133, 0, 969}, - dictWord{132, 10, 704}, - dictWord{135, 0, 73}, - dictWord{135, 11, 10}, - dictWord{135, 10, 1078}, - dictWord{ - 5, - 11, - 11, - }, - dictWord{6, 11, 117}, - dictWord{6, 11, 485}, - dictWord{7, 11, 1133}, - dictWord{9, 11, 582}, - dictWord{9, 11, 594}, - dictWord{11, 11, 21}, - dictWord{ - 11, - 11, - 818, - }, - dictWord{12, 11, 535}, - dictWord{141, 11, 86}, - dictWord{135, 0, 1971}, - dictWord{4, 11, 264}, - dictWord{7, 11, 1067}, - dictWord{8, 11, 204}, - dictWord{8, 11, 385}, - dictWord{139, 11, 953}, - dictWord{6, 0, 1458}, - dictWord{135, 0, 1344}, - dictWord{5, 0, 396}, - dictWord{134, 0, 501}, - dictWord{4, 10, 720}, - dictWord{133, 10, 306}, - dictWord{4, 0, 929}, - dictWord{5, 0, 799}, - dictWord{8, 0, 46}, - dictWord{8, 0, 740}, - dictWord{133, 10, 431}, - dictWord{7, 11, 646}, - dictWord{ - 7, - 11, - 1730, - }, - dictWord{11, 11, 446}, - dictWord{141, 11, 178}, - dictWord{7, 0, 276}, - dictWord{5, 10, 464}, - dictWord{6, 10, 236}, - dictWord{7, 10, 696}, - dictWord{ - 7, - 10, - 914, - }, - dictWord{7, 10, 1108}, - dictWord{7, 10, 1448}, - dictWord{9, 10, 15}, - dictWord{9, 10, 564}, - dictWord{10, 10, 14}, - dictWord{12, 10, 565}, - dictWord{ - 13, - 10, - 449, - }, - dictWord{14, 10, 53}, - dictWord{15, 10, 13}, - dictWord{16, 10, 64}, - dictWord{145, 10, 41}, - dictWord{4, 0, 892}, - dictWord{133, 0, 770}, - dictWord{ - 6, - 10, - 1767, - }, - dictWord{12, 10, 194}, - dictWord{145, 10, 107}, - dictWord{135, 0, 158}, - dictWord{5, 10, 840}, - dictWord{138, 11, 608}, - dictWord{134, 0, 1432}, - dictWord{138, 11, 250}, - dictWord{8, 11, 794}, - dictWord{9, 11, 400}, - dictWord{10, 11, 298}, - dictWord{142, 11, 228}, - dictWord{151, 0, 25}, - dictWord{ - 7, - 11, - 1131, - }, - dictWord{135, 11, 1468}, - dictWord{135, 0, 2001}, - dictWord{9, 10, 642}, - dictWord{11, 10, 236}, - dictWord{142, 10, 193}, - dictWord{4, 10, 68}, - dictWord{5, 10, 634}, - dictWord{6, 10, 386}, - dictWord{7, 10, 794}, - dictWord{8, 10, 273}, - dictWord{9, 10, 563}, - dictWord{10, 10, 105}, - dictWord{10, 10, 171}, - dictWord{11, 10, 94}, - dictWord{139, 10, 354}, - dictWord{136, 11, 724}, - dictWord{132, 0, 478}, - dictWord{11, 11, 512}, - dictWord{13, 11, 205}, - dictWord{ - 19, - 11, - 30, - }, - dictWord{22, 11, 36}, - dictWord{151, 11, 19}, - dictWord{7, 0, 1461}, - dictWord{140, 0, 91}, - dictWord{6, 11, 190}, - dictWord{7, 11, 768}, - dictWord{ - 135, - 11, - 1170, - }, - dictWord{4, 0, 602}, - dictWord{8, 0, 211}, - dictWord{4, 10, 95}, - dictWord{7, 10, 416}, - dictWord{139, 10, 830}, - dictWord{7, 10, 731}, - dictWord{13, 10, 20}, - dictWord{143, 10, 11}, - dictWord{6, 0, 1068}, - dictWord{135, 0, 1872}, - dictWord{4, 0, 13}, - dictWord{5, 0, 567}, - dictWord{7, 0, 1498}, - dictWord{9, 0, 124}, - dictWord{11, 0, 521}, - dictWord{12, 0, 405}, - dictWord{135, 11, 1023}, - dictWord{135, 0, 1006}, - dictWord{132, 0, 735}, - dictWord{138, 0, 812}, - dictWord{4, 0, 170}, - dictWord{135, 0, 323}, - dictWord{6, 11, 137}, - dictWord{9, 11, 75}, - dictWord{9, 11, 253}, - dictWord{10, 11, 194}, - dictWord{138, 11, 444}, - dictWord{5, 0, 304}, - dictWord{7, 0, 1403}, - dictWord{5, 10, 864}, - dictWord{10, 10, 648}, - dictWord{11, 10, 671}, - dictWord{143, 10, 46}, - dictWord{135, 11, 1180}, - dictWord{ - 133, - 10, - 928, - }, - dictWord{4, 0, 148}, - dictWord{133, 0, 742}, - dictWord{11, 10, 986}, - dictWord{140, 10, 682}, - dictWord{133, 0, 523}, - dictWord{135, 11, 1743}, - dictWord{7, 0, 730}, - dictWord{18, 0, 144}, - dictWord{19, 0, 61}, - dictWord{8, 10, 44}, - dictWord{9, 10, 884}, - dictWord{10, 10, 580}, - dictWord{11, 10, 399}, - dictWord{ - 11, - 10, - 894, - }, - dictWord{143, 10, 122}, - dictWord{5, 11, 760}, - dictWord{7, 11, 542}, - dictWord{8, 11, 135}, - dictWord{136, 11, 496}, - dictWord{136, 0, 981}, - dictWord{133, 0, 111}, - dictWord{10, 0, 132}, - dictWord{11, 0, 191}, - dictWord{11, 0, 358}, - dictWord{139, 0, 460}, - dictWord{7, 11, 319}, - dictWord{7, 11, 355}, - dictWord{ - 7, - 11, - 763, - }, - dictWord{10, 11, 389}, - dictWord{145, 11, 43}, - dictWord{134, 0, 890}, - dictWord{134, 0, 1420}, - dictWord{136, 11, 557}, - dictWord{ - 133, - 10, - 518, - }, - dictWord{133, 0, 444}, - dictWord{135, 0, 1787}, - dictWord{135, 10, 1852}, - dictWord{8, 0, 123}, - dictWord{15, 0, 6}, - dictWord{144, 0, 7}, - dictWord{ - 6, - 0, - 2041, - }, - dictWord{10, 11, 38}, - dictWord{139, 11, 784}, - dictWord{136, 0, 932}, - dictWord{5, 0, 937}, - dictWord{135, 0, 100}, - dictWord{6, 0, 995}, - dictWord{ - 4, - 11, - 58, - }, - dictWord{5, 11, 286}, - dictWord{6, 11, 319}, - dictWord{7, 11, 402}, - dictWord{7, 11, 1254}, - dictWord{7, 11, 1903}, - dictWord{8, 11, 356}, - dictWord{ - 140, - 11, - 408, - }, - dictWord{4, 11, 389}, - dictWord{9, 11, 181}, - dictWord{9, 11, 255}, - dictWord{10, 11, 8}, - dictWord{10, 11, 29}, - dictWord{10, 11, 816}, - dictWord{ - 11, - 11, - 311, - }, - dictWord{11, 11, 561}, - dictWord{12, 11, 67}, - dictWord{141, 11, 181}, - dictWord{138, 0, 255}, - dictWord{5, 0, 138}, - dictWord{4, 10, 934}, - dictWord{ - 136, - 10, - 610, - }, - dictWord{4, 0, 965}, - dictWord{10, 0, 863}, - dictWord{138, 0, 898}, - dictWord{10, 10, 804}, - dictWord{138, 10, 832}, - dictWord{12, 0, 631}, - dictWord{ - 8, - 10, - 96, - }, - dictWord{9, 10, 36}, - dictWord{10, 10, 607}, - dictWord{11, 10, 423}, - dictWord{11, 10, 442}, - dictWord{12, 10, 309}, - dictWord{14, 10, 199}, - dictWord{ - 15, - 10, - 90, - }, - dictWord{145, 10, 110}, - dictWord{134, 0, 1394}, - dictWord{4, 0, 652}, - dictWord{8, 0, 320}, - dictWord{22, 0, 6}, - dictWord{22, 0, 16}, - dictWord{ - 9, - 10, - 13, - }, - dictWord{9, 10, 398}, - dictWord{9, 10, 727}, - dictWord{10, 10, 75}, - dictWord{10, 10, 184}, - dictWord{10, 10, 230}, - dictWord{10, 10, 564}, - dictWord{ - 10, - 10, - 569, - }, - dictWord{11, 10, 973}, - dictWord{12, 10, 70}, - dictWord{12, 10, 189}, - dictWord{13, 10, 57}, - dictWord{141, 10, 257}, - dictWord{6, 0, 897}, - dictWord{ - 134, - 0, - 1333, - }, - dictWord{4, 0, 692}, - dictWord{133, 0, 321}, - dictWord{133, 11, 373}, - dictWord{135, 0, 922}, - dictWord{5, 0, 619}, - dictWord{133, 0, 698}, - dictWord{ - 137, - 10, - 631, - }, - dictWord{5, 10, 345}, - dictWord{135, 10, 1016}, - dictWord{9, 0, 957}, - dictWord{9, 0, 1018}, - dictWord{12, 0, 828}, - dictWord{12, 0, 844}, - dictWord{ - 12, - 0, - 897, - }, - dictWord{12, 0, 901}, - dictWord{12, 0, 943}, - dictWord{15, 0, 180}, - dictWord{18, 0, 197}, - dictWord{18, 0, 200}, - dictWord{18, 0, 213}, - dictWord{ - 18, - 0, - 214, - }, - dictWord{146, 0, 226}, - dictWord{5, 0, 917}, - dictWord{134, 0, 1659}, - dictWord{135, 0, 1100}, - dictWord{134, 0, 1173}, - dictWord{134, 0, 1930}, - dictWord{5, 0, 251}, - dictWord{5, 0, 956}, - dictWord{8, 0, 268}, - dictWord{9, 0, 214}, - dictWord{146, 0, 142}, - dictWord{133, 10, 673}, - dictWord{137, 10, 850}, - dictWord{ - 4, - 10, - 287, - }, - dictWord{133, 10, 1018}, - dictWord{132, 11, 672}, - dictWord{5, 0, 346}, - dictWord{5, 0, 711}, - dictWord{8, 0, 390}, - dictWord{11, 11, 752}, - dictWord{139, 11, 885}, - dictWord{5, 10, 34}, - dictWord{10, 10, 724}, - dictWord{12, 10, 444}, - dictWord{13, 10, 354}, - dictWord{18, 10, 32}, - dictWord{23, 10, 24}, - dictWord{23, 10, 31}, - dictWord{152, 10, 5}, - dictWord{4, 11, 710}, - dictWord{134, 11, 606}, - dictWord{134, 0, 744}, - dictWord{134, 10, 382}, - dictWord{ - 133, - 11, - 145, - }, - dictWord{4, 10, 329}, - dictWord{7, 11, 884}, - dictWord{140, 11, 124}, - dictWord{4, 11, 467}, - dictWord{5, 11, 405}, - dictWord{134, 11, 544}, - dictWord{ - 9, - 10, - 846, - }, - dictWord{138, 10, 827}, - dictWord{133, 0, 624}, - dictWord{9, 11, 372}, - dictWord{15, 11, 2}, - dictWord{19, 11, 10}, - dictWord{147, 11, 18}, - dictWord{ - 4, - 11, - 387, - }, - dictWord{135, 11, 1288}, - dictWord{5, 0, 783}, - dictWord{7, 0, 1998}, - dictWord{135, 0, 2047}, - dictWord{132, 10, 906}, - dictWord{136, 10, 366}, - dictWord{135, 11, 550}, - dictWord{4, 10, 123}, - dictWord{4, 10, 649}, - dictWord{5, 10, 605}, - dictWord{7, 10, 1509}, - dictWord{136, 10, 36}, - dictWord{ - 134, - 0, - 1125, - }, - dictWord{132, 0, 594}, - dictWord{133, 10, 767}, - dictWord{135, 11, 1227}, - dictWord{136, 11, 467}, - dictWord{4, 11, 576}, - dictWord{ - 135, - 11, - 1263, - }, - dictWord{4, 0, 268}, - dictWord{7, 0, 1534}, - dictWord{135, 11, 1534}, - dictWord{4, 10, 273}, - dictWord{5, 10, 658}, - dictWord{5, 11, 919}, - dictWord{ - 5, - 10, - 995, - }, - dictWord{134, 11, 1673}, - dictWord{133, 0, 563}, - dictWord{134, 10, 72}, - dictWord{135, 10, 1345}, - dictWord{4, 11, 82}, - dictWord{5, 11, 333}, - dictWord{ - 5, - 11, - 904, - }, - dictWord{6, 11, 207}, - dictWord{7, 11, 325}, - dictWord{7, 11, 1726}, - dictWord{8, 11, 101}, - dictWord{10, 11, 778}, - dictWord{139, 11, 220}, - dictWord{5, 0, 37}, - dictWord{6, 0, 39}, - dictWord{6, 0, 451}, - dictWord{7, 0, 218}, - dictWord{7, 0, 667}, - dictWord{7, 0, 1166}, - dictWord{7, 0, 1687}, - dictWord{8, 0, 662}, - dictWord{16, 0, 2}, - dictWord{133, 10, 589}, - dictWord{134, 0, 1332}, - dictWord{133, 11, 903}, - dictWord{134, 0, 508}, - dictWord{5, 10, 117}, - dictWord{6, 10, 514}, - dictWord{6, 10, 541}, - dictWord{7, 10, 1164}, - dictWord{7, 10, 1436}, - dictWord{8, 10, 220}, - dictWord{8, 10, 648}, - dictWord{10, 10, 688}, - dictWord{11, 10, 560}, - dictWord{140, 11, 147}, - dictWord{6, 11, 555}, - dictWord{135, 11, 485}, - dictWord{133, 10, 686}, - dictWord{7, 0, 453}, - dictWord{7, 0, 635}, - dictWord{7, 0, 796}, - dictWord{8, 0, 331}, - dictWord{9, 0, 330}, - dictWord{9, 0, 865}, - dictWord{10, 0, 119}, - dictWord{10, 0, 235}, - dictWord{11, 0, 111}, - dictWord{11, 0, 129}, - dictWord{ - 11, - 0, - 240, - }, - dictWord{12, 0, 31}, - dictWord{12, 0, 66}, - dictWord{12, 0, 222}, - dictWord{12, 0, 269}, - dictWord{12, 0, 599}, - dictWord{12, 0, 684}, - dictWord{12, 0, 689}, - dictWord{12, 0, 691}, - dictWord{142, 0, 345}, - dictWord{135, 0, 1834}, - dictWord{4, 11, 705}, - dictWord{7, 11, 615}, - dictWord{138, 11, 251}, - dictWord{ - 136, - 11, - 345, - }, - dictWord{137, 0, 527}, - dictWord{6, 0, 98}, - dictWord{7, 0, 702}, - dictWord{135, 0, 991}, - dictWord{11, 0, 576}, - dictWord{14, 0, 74}, - dictWord{7, 10, 196}, - dictWord{10, 10, 765}, - dictWord{11, 10, 347}, - dictWord{11, 10, 552}, - dictWord{11, 10, 790}, - dictWord{12, 10, 263}, - dictWord{13, 10, 246}, - dictWord{ - 13, - 10, - 270, - }, - dictWord{13, 10, 395}, - dictWord{14, 10, 176}, - dictWord{14, 10, 190}, - dictWord{14, 10, 398}, - dictWord{14, 10, 412}, - dictWord{15, 10, 32}, - dictWord{ - 15, - 10, - 63, - }, - dictWord{16, 10, 88}, - dictWord{147, 10, 105}, - dictWord{134, 11, 90}, - dictWord{13, 0, 84}, - dictWord{141, 0, 122}, - dictWord{6, 0, 37}, - dictWord{ - 7, - 0, - 299, - }, - dictWord{7, 0, 1666}, - dictWord{8, 0, 195}, - dictWord{8, 0, 316}, - dictWord{9, 0, 178}, - dictWord{9, 0, 276}, - dictWord{9, 0, 339}, - dictWord{9, 0, 536}, - dictWord{ - 10, - 0, - 102, - }, - dictWord{10, 0, 362}, - dictWord{10, 0, 785}, - dictWord{11, 0, 55}, - dictWord{11, 0, 149}, - dictWord{11, 0, 773}, - dictWord{13, 0, 416}, - dictWord{ - 13, - 0, - 419, - }, - dictWord{14, 0, 38}, - dictWord{14, 0, 41}, - dictWord{142, 0, 210}, - dictWord{5, 10, 381}, - dictWord{135, 10, 1792}, - dictWord{7, 11, 813}, - dictWord{ - 12, - 11, - 497, - }, - dictWord{141, 11, 56}, - dictWord{7, 10, 616}, - dictWord{138, 10, 413}, - dictWord{133, 0, 645}, - dictWord{6, 11, 125}, - dictWord{135, 11, 1277}, - dictWord{132, 0, 290}, - dictWord{6, 0, 70}, - dictWord{7, 0, 1292}, - dictWord{10, 0, 762}, - dictWord{139, 0, 288}, - dictWord{6, 10, 120}, - dictWord{7, 10, 1188}, - dictWord{ - 7, - 10, - 1710, - }, - dictWord{8, 10, 286}, - dictWord{9, 10, 667}, - dictWord{11, 10, 592}, - dictWord{139, 10, 730}, - dictWord{135, 11, 1784}, - dictWord{7, 0, 1315}, - dictWord{135, 11, 1315}, - dictWord{134, 0, 1955}, - dictWord{135, 10, 1146}, - dictWord{7, 0, 131}, - dictWord{7, 0, 422}, - dictWord{8, 0, 210}, - dictWord{ - 140, - 0, - 573, - }, - dictWord{4, 10, 352}, - dictWord{135, 10, 687}, - dictWord{139, 0, 797}, - dictWord{143, 0, 38}, - dictWord{14, 0, 179}, - dictWord{15, 0, 151}, - dictWord{ - 150, - 0, - 11, - }, - dictWord{7, 0, 488}, - dictWord{4, 10, 192}, - dictWord{5, 10, 49}, - dictWord{6, 10, 200}, - dictWord{6, 10, 293}, - dictWord{134, 10, 1696}, - dictWord{ - 132, - 0, - 936, - }, - dictWord{135, 11, 703}, - dictWord{6, 11, 160}, - dictWord{7, 11, 1106}, - dictWord{9, 11, 770}, - dictWord{10, 11, 618}, - dictWord{11, 11, 112}, - dictWord{ - 140, - 11, - 413, - }, - dictWord{5, 0, 453}, - dictWord{134, 0, 441}, - dictWord{135, 0, 595}, - dictWord{132, 10, 650}, - dictWord{132, 10, 147}, - dictWord{6, 0, 991}, - dictWord{6, 0, 1182}, - dictWord{12, 11, 271}, - dictWord{145, 11, 109}, - dictWord{133, 10, 934}, - dictWord{140, 11, 221}, - dictWord{132, 0, 653}, - dictWord{ - 7, - 0, - 505, - }, - dictWord{135, 0, 523}, - dictWord{134, 0, 903}, - dictWord{135, 11, 479}, - dictWord{7, 11, 304}, - dictWord{9, 11, 646}, - dictWord{9, 11, 862}, - dictWord{ - 10, - 11, - 262, - }, - dictWord{11, 11, 696}, - dictWord{12, 11, 208}, - dictWord{15, 11, 79}, - dictWord{147, 11, 108}, - dictWord{146, 0, 80}, - dictWord{135, 11, 981}, - dictWord{142, 0, 432}, - dictWord{132, 0, 314}, - dictWord{137, 11, 152}, - dictWord{7, 0, 1368}, - dictWord{8, 0, 232}, - dictWord{8, 0, 361}, - dictWord{10, 0, 682}, - dictWord{138, 0, 742}, - dictWord{135, 11, 1586}, - dictWord{9, 0, 534}, - dictWord{4, 11, 434}, - dictWord{11, 11, 663}, - dictWord{12, 11, 210}, - dictWord{13, 11, 166}, - dictWord{13, 11, 310}, - dictWord{14, 11, 373}, - dictWord{147, 11, 43}, - dictWord{7, 11, 1091}, - dictWord{135, 11, 1765}, - dictWord{6, 11, 550}, - dictWord{ - 135, - 11, - 652, - }, - dictWord{137, 0, 27}, - dictWord{142, 0, 12}, - dictWord{4, 10, 637}, - dictWord{5, 11, 553}, - dictWord{7, 11, 766}, - dictWord{138, 11, 824}, - dictWord{ - 7, - 11, - 737, - }, - dictWord{8, 11, 298}, - dictWord{136, 11, 452}, - dictWord{7, 0, 736}, - dictWord{139, 0, 264}, - dictWord{134, 0, 1657}, - dictWord{133, 11, 292}, - dictWord{138, 11, 135}, - dictWord{6, 0, 844}, - dictWord{134, 0, 1117}, - dictWord{135, 0, 127}, - dictWord{9, 10, 867}, - dictWord{138, 10, 837}, - dictWord{ - 6, - 0, - 1184, - }, - dictWord{134, 0, 1208}, - dictWord{134, 0, 1294}, - dictWord{136, 0, 364}, - dictWord{6, 0, 1415}, - dictWord{7, 0, 1334}, - dictWord{11, 0, 125}, - dictWord{ - 6, - 10, - 170, - }, - dictWord{7, 11, 393}, - dictWord{8, 10, 395}, - dictWord{8, 10, 487}, - dictWord{10, 11, 603}, - dictWord{11, 11, 206}, - dictWord{141, 10, 147}, - dictWord{137, 11, 748}, - dictWord{4, 11, 912}, - dictWord{137, 11, 232}, - dictWord{4, 10, 535}, - dictWord{136, 10, 618}, - dictWord{137, 0, 792}, - dictWord{ - 7, - 11, - 1973, - }, - dictWord{136, 11, 716}, - dictWord{135, 11, 98}, - dictWord{5, 0, 909}, - dictWord{9, 0, 849}, - dictWord{138, 0, 805}, - dictWord{4, 0, 630}, - dictWord{ - 132, - 0, - 699, - }, - dictWord{5, 11, 733}, - dictWord{14, 11, 103}, - dictWord{150, 10, 23}, - dictWord{12, 11, 158}, - dictWord{18, 11, 8}, - dictWord{19, 11, 62}, - dictWord{ - 20, - 11, - 6, - }, - dictWord{22, 11, 4}, - dictWord{23, 11, 2}, - dictWord{151, 11, 9}, - dictWord{132, 0, 968}, - dictWord{132, 10, 778}, - dictWord{132, 10, 46}, - dictWord{5, 10, 811}, - dictWord{6, 10, 1679}, - dictWord{6, 10, 1714}, - dictWord{135, 10, 2032}, - dictWord{6, 0, 1446}, - dictWord{7, 10, 1458}, - dictWord{9, 10, 407}, - dictWord{ - 139, - 10, - 15, - }, - dictWord{7, 0, 206}, - dictWord{7, 0, 397}, - dictWord{7, 0, 621}, - dictWord{7, 0, 640}, - dictWord{8, 0, 124}, - dictWord{8, 0, 619}, - dictWord{9, 0, 305}, - dictWord{ - 9, - 0, - 643, - }, - dictWord{10, 0, 264}, - dictWord{10, 0, 628}, - dictWord{11, 0, 40}, - dictWord{12, 0, 349}, - dictWord{13, 0, 134}, - dictWord{13, 0, 295}, - dictWord{ - 14, - 0, - 155, - }, - dictWord{15, 0, 120}, - dictWord{18, 0, 105}, - dictWord{6, 10, 34}, - dictWord{7, 10, 1089}, - dictWord{8, 10, 708}, - dictWord{8, 10, 721}, - dictWord{9, 10, 363}, - dictWord{148, 10, 98}, - dictWord{4, 0, 262}, - dictWord{5, 0, 641}, - dictWord{135, 0, 342}, - dictWord{137, 11, 72}, - dictWord{4, 0, 99}, - dictWord{6, 0, 250}, - dictWord{ - 6, - 0, - 346, - }, - dictWord{8, 0, 127}, - dictWord{138, 0, 81}, - dictWord{132, 0, 915}, - dictWord{5, 0, 75}, - dictWord{9, 0, 517}, - dictWord{10, 0, 470}, - dictWord{12, 0, 155}, - dictWord{141, 0, 224}, - dictWord{132, 10, 462}, - dictWord{11, 11, 600}, - dictWord{11, 11, 670}, - dictWord{141, 11, 245}, - dictWord{142, 0, 83}, - dictWord{ - 5, - 10, - 73, - }, - dictWord{6, 10, 23}, - dictWord{134, 10, 338}, - dictWord{6, 0, 1031}, - dictWord{139, 11, 923}, - dictWord{7, 11, 164}, - dictWord{7, 11, 1571}, - dictWord{ - 9, - 11, - 107, - }, - dictWord{140, 11, 225}, - dictWord{134, 0, 1470}, - dictWord{133, 0, 954}, - dictWord{6, 0, 304}, - dictWord{8, 0, 418}, - dictWord{10, 0, 345}, - dictWord{ - 11, - 0, - 341, - }, - dictWord{139, 0, 675}, - dictWord{9, 0, 410}, - dictWord{139, 0, 425}, - dictWord{4, 11, 27}, - dictWord{5, 11, 484}, - dictWord{5, 11, 510}, - dictWord{6, 11, 434}, - dictWord{7, 11, 1000}, - dictWord{7, 11, 1098}, - dictWord{8, 11, 2}, - dictWord{136, 11, 200}, - dictWord{134, 0, 734}, - dictWord{140, 11, 257}, - dictWord{ - 7, - 10, - 725, - }, - dictWord{8, 10, 498}, - dictWord{139, 10, 268}, - dictWord{134, 0, 1822}, - dictWord{135, 0, 1798}, - dictWord{135, 10, 773}, - dictWord{132, 11, 460}, - dictWord{4, 11, 932}, - dictWord{133, 11, 891}, - dictWord{134, 0, 14}, - dictWord{132, 10, 583}, - dictWord{7, 10, 1462}, - dictWord{8, 11, 625}, - dictWord{ - 139, - 10, - 659, - }, - dictWord{5, 0, 113}, - dictWord{6, 0, 243}, - dictWord{6, 0, 1708}, - dictWord{7, 0, 1865}, - dictWord{11, 0, 161}, - dictWord{16, 0, 37}, - dictWord{17, 0, 99}, - dictWord{133, 10, 220}, - dictWord{134, 11, 76}, - dictWord{5, 11, 461}, - dictWord{135, 11, 1925}, - dictWord{140, 0, 69}, - dictWord{8, 11, 92}, - dictWord{ - 137, - 11, - 221, - }, - dictWord{139, 10, 803}, - dictWord{132, 10, 544}, - dictWord{4, 0, 274}, - dictWord{134, 0, 922}, - dictWord{132, 0, 541}, - dictWord{5, 0, 627}, - dictWord{ - 6, - 10, - 437, - }, - dictWord{6, 10, 564}, - dictWord{11, 10, 181}, - dictWord{141, 10, 183}, - dictWord{135, 10, 1192}, - dictWord{7, 0, 166}, - dictWord{132, 11, 763}, - dictWord{133, 11, 253}, - dictWord{134, 0, 849}, - dictWord{9, 11, 73}, - dictWord{10, 11, 110}, - dictWord{14, 11, 185}, - dictWord{145, 11, 119}, - dictWord{5, 11, 212}, - dictWord{12, 11, 35}, - dictWord{141, 11, 382}, - dictWord{133, 0, 717}, - dictWord{137, 0, 304}, - dictWord{136, 0, 600}, - dictWord{133, 0, 654}, - dictWord{ - 6, - 0, - 273, - }, - dictWord{10, 0, 188}, - dictWord{13, 0, 377}, - dictWord{146, 0, 77}, - dictWord{4, 10, 790}, - dictWord{5, 10, 273}, - dictWord{134, 10, 394}, - dictWord{ - 132, - 0, - 543, - }, - dictWord{135, 0, 410}, - dictWord{11, 0, 98}, - dictWord{11, 0, 524}, - dictWord{141, 0, 87}, - dictWord{132, 0, 941}, - dictWord{135, 11, 1175}, - dictWord{ - 4, - 0, - 250, - }, - dictWord{7, 0, 1612}, - dictWord{11, 0, 186}, - dictWord{12, 0, 133}, - dictWord{6, 10, 127}, - dictWord{7, 10, 1511}, - dictWord{8, 10, 613}, - dictWord{ - 12, - 10, - 495, - }, - dictWord{12, 10, 586}, - dictWord{12, 10, 660}, - dictWord{12, 10, 668}, - dictWord{14, 10, 385}, - dictWord{15, 10, 118}, - dictWord{17, 10, 20}, - dictWord{ - 146, - 10, - 98, - }, - dictWord{6, 0, 1785}, - dictWord{133, 11, 816}, - dictWord{134, 0, 1339}, - dictWord{7, 0, 961}, - dictWord{7, 0, 1085}, - dictWord{7, 0, 1727}, - dictWord{ - 8, - 0, - 462, - }, - dictWord{6, 10, 230}, - dictWord{135, 11, 1727}, - dictWord{9, 0, 636}, - dictWord{135, 10, 1954}, - dictWord{132, 0, 780}, - dictWord{5, 11, 869}, - dictWord{5, 11, 968}, - dictWord{6, 11, 1626}, - dictWord{8, 11, 734}, - dictWord{136, 11, 784}, - dictWord{4, 11, 542}, - dictWord{6, 11, 1716}, - dictWord{6, 11, 1727}, - dictWord{7, 11, 1082}, - dictWord{7, 11, 1545}, - dictWord{8, 11, 56}, - dictWord{8, 11, 118}, - dictWord{8, 11, 412}, - dictWord{8, 11, 564}, - dictWord{9, 11, 888}, - dictWord{9, 11, 908}, - dictWord{10, 11, 50}, - dictWord{10, 11, 423}, - dictWord{11, 11, 685}, - dictWord{11, 11, 697}, - dictWord{11, 11, 933}, - dictWord{12, 11, 299}, - dictWord{13, 11, 126}, - dictWord{13, 11, 136}, - dictWord{13, 11, 170}, - dictWord{141, 11, 190}, - dictWord{134, 11, 226}, - dictWord{4, 11, 232}, - dictWord{ - 9, - 11, - 202, - }, - dictWord{10, 11, 474}, - dictWord{140, 11, 433}, - dictWord{137, 11, 500}, - dictWord{5, 0, 529}, - dictWord{136, 10, 68}, - dictWord{132, 10, 654}, - dictWord{ - 4, - 10, - 156, - }, - dictWord{7, 10, 998}, - dictWord{7, 10, 1045}, - dictWord{7, 10, 1860}, - dictWord{9, 10, 48}, - dictWord{9, 10, 692}, - dictWord{11, 10, 419}, - dictWord{139, 10, 602}, - dictWord{7, 0, 1276}, - dictWord{8, 0, 474}, - dictWord{9, 0, 652}, - dictWord{6, 11, 108}, - dictWord{7, 11, 1003}, - dictWord{7, 11, 1181}, - dictWord{136, 11, 343}, - dictWord{7, 11, 1264}, - dictWord{7, 11, 1678}, - dictWord{11, 11, 945}, - dictWord{12, 11, 341}, - dictWord{12, 11, 471}, - dictWord{ - 140, - 11, - 569, - }, - dictWord{134, 11, 1712}, - dictWord{5, 0, 948}, - dictWord{12, 0, 468}, - dictWord{19, 0, 96}, - dictWord{148, 0, 24}, - dictWord{4, 11, 133}, - dictWord{ - 7, - 11, - 711, - }, - dictWord{7, 11, 1298}, - dictWord{7, 11, 1585}, - dictWord{135, 11, 1929}, - dictWord{6, 0, 753}, - dictWord{140, 0, 657}, - dictWord{139, 0, 941}, - dictWord{ - 6, - 11, - 99, - }, - dictWord{7, 11, 1808}, - dictWord{145, 11, 57}, - dictWord{6, 11, 574}, - dictWord{7, 11, 428}, - dictWord{7, 11, 1250}, - dictWord{10, 11, 669}, - dictWord{ - 11, - 11, - 485, - }, - dictWord{11, 11, 840}, - dictWord{12, 11, 300}, - dictWord{142, 11, 250}, - dictWord{4, 0, 532}, - dictWord{5, 0, 706}, - dictWord{135, 0, 662}, - dictWord{ - 5, - 0, - 837, - }, - dictWord{6, 0, 1651}, - dictWord{139, 0, 985}, - dictWord{7, 0, 1861}, - dictWord{9, 10, 197}, - dictWord{10, 10, 300}, - dictWord{12, 10, 473}, - dictWord{ - 13, - 10, - 90, - }, - dictWord{141, 10, 405}, - dictWord{137, 11, 252}, - dictWord{6, 11, 323}, - dictWord{135, 11, 1564}, - dictWord{4, 0, 330}, - dictWord{4, 0, 863}, - dictWord{7, 0, 933}, - dictWord{7, 0, 2012}, - dictWord{8, 0, 292}, - dictWord{7, 11, 461}, - dictWord{8, 11, 775}, - dictWord{138, 11, 435}, - dictWord{132, 10, 606}, - dictWord{ - 4, - 11, - 655, - }, - dictWord{7, 11, 850}, - dictWord{17, 11, 75}, - dictWord{146, 11, 137}, - dictWord{135, 0, 767}, - dictWord{7, 10, 1978}, - dictWord{136, 10, 676}, - dictWord{132, 0, 641}, - dictWord{135, 11, 1559}, - dictWord{134, 0, 1233}, - dictWord{137, 0, 242}, - dictWord{17, 0, 114}, - dictWord{4, 10, 361}, - dictWord{ - 133, - 10, - 315, - }, - dictWord{137, 0, 883}, - dictWord{132, 10, 461}, - dictWord{138, 0, 274}, - dictWord{134, 0, 2008}, - dictWord{134, 0, 1794}, - dictWord{4, 0, 703}, - dictWord{135, 0, 207}, - dictWord{12, 0, 285}, - dictWord{132, 10, 472}, - dictWord{132, 0, 571}, - dictWord{5, 0, 873}, - dictWord{5, 0, 960}, - dictWord{8, 0, 823}, - dictWord{9, 0, 881}, - dictWord{136, 11, 577}, - dictWord{7, 0, 617}, - dictWord{10, 0, 498}, - dictWord{11, 0, 501}, - dictWord{12, 0, 16}, - dictWord{140, 0, 150}, - dictWord{ - 138, - 10, - 747, - }, - dictWord{132, 0, 431}, - dictWord{133, 10, 155}, - dictWord{11, 0, 283}, - dictWord{11, 0, 567}, - dictWord{7, 10, 163}, - dictWord{8, 10, 319}, - dictWord{ - 9, - 10, - 402, - }, - dictWord{10, 10, 24}, - dictWord{10, 10, 681}, - dictWord{11, 10, 200}, - dictWord{12, 10, 253}, - dictWord{12, 10, 410}, - dictWord{142, 10, 219}, - dictWord{4, 11, 413}, - dictWord{5, 11, 677}, - dictWord{8, 11, 432}, - dictWord{140, 11, 280}, - dictWord{9, 0, 401}, - dictWord{5, 10, 475}, - dictWord{7, 10, 1780}, - dictWord{11, 10, 297}, - dictWord{11, 10, 558}, - dictWord{14, 10, 322}, - dictWord{147, 10, 76}, - dictWord{6, 0, 781}, - dictWord{9, 0, 134}, - dictWord{10, 0, 2}, - dictWord{ - 10, - 0, - 27, - }, - dictWord{10, 0, 333}, - dictWord{11, 0, 722}, - dictWord{143, 0, 1}, - dictWord{5, 0, 33}, - dictWord{6, 0, 470}, - dictWord{139, 0, 424}, - dictWord{ - 135, - 0, - 2006, - }, - dictWord{12, 0, 783}, - dictWord{135, 10, 1956}, - dictWord{136, 0, 274}, - dictWord{135, 0, 1882}, - dictWord{132, 0, 794}, - dictWord{135, 0, 1848}, - dictWord{5, 10, 944}, - dictWord{134, 10, 1769}, - dictWord{6, 0, 47}, - dictWord{7, 0, 90}, - dictWord{7, 0, 664}, - dictWord{7, 0, 830}, - dictWord{7, 0, 1380}, - dictWord{ - 7, - 0, - 2025, - }, - dictWord{8, 0, 448}, - dictWord{136, 0, 828}, - dictWord{132, 10, 144}, - dictWord{134, 0, 1199}, - dictWord{4, 11, 395}, - dictWord{139, 11, 762}, - dictWord{135, 11, 1504}, - dictWord{9, 0, 417}, - dictWord{137, 0, 493}, - dictWord{9, 11, 174}, - dictWord{10, 11, 164}, - dictWord{11, 11, 440}, - dictWord{11, 11, 841}, - dictWord{143, 11, 98}, - dictWord{134, 11, 426}, - dictWord{139, 11, 1002}, - dictWord{134, 0, 295}, - dictWord{134, 0, 816}, - dictWord{6, 10, 247}, - dictWord{ - 137, - 10, - 555, - }, - dictWord{133, 0, 1019}, - dictWord{4, 0, 620}, - dictWord{5, 11, 476}, - dictWord{10, 10, 280}, - dictWord{138, 10, 797}, - dictWord{139, 0, 464}, - dictWord{5, 11, 76}, - dictWord{6, 11, 458}, - dictWord{6, 11, 497}, - dictWord{7, 11, 764}, - dictWord{7, 11, 868}, - dictWord{9, 11, 658}, - dictWord{10, 11, 594}, - dictWord{ - 11, - 11, - 173, - }, - dictWord{11, 11, 566}, - dictWord{12, 11, 20}, - dictWord{12, 11, 338}, - dictWord{141, 11, 200}, - dictWord{134, 0, 208}, - dictWord{4, 11, 526}, - dictWord{7, 11, 1029}, - dictWord{135, 11, 1054}, - dictWord{132, 11, 636}, - dictWord{6, 11, 233}, - dictWord{7, 11, 660}, - dictWord{7, 11, 1124}, - dictWord{ - 17, - 11, - 31, - }, - dictWord{19, 11, 22}, - dictWord{151, 11, 14}, - dictWord{10, 0, 442}, - dictWord{133, 10, 428}, - dictWord{10, 0, 930}, - dictWord{140, 0, 778}, - dictWord{ - 6, - 0, - 68, - }, - dictWord{7, 0, 448}, - dictWord{7, 0, 1629}, - dictWord{7, 0, 1769}, - dictWord{7, 0, 1813}, - dictWord{8, 0, 442}, - dictWord{8, 0, 516}, - dictWord{9, 0, 710}, - dictWord{ - 10, - 0, - 282, - }, - dictWord{10, 0, 722}, - dictWord{7, 10, 1717}, - dictWord{138, 10, 546}, - dictWord{134, 0, 1128}, - dictWord{11, 0, 844}, - dictWord{12, 0, 104}, - dictWord{140, 0, 625}, - dictWord{4, 11, 432}, - dictWord{135, 11, 824}, - dictWord{138, 10, 189}, - dictWord{133, 0, 787}, - dictWord{133, 10, 99}, - dictWord{ - 4, - 11, - 279, - }, - dictWord{7, 11, 301}, - dictWord{137, 11, 362}, - dictWord{8, 0, 491}, - dictWord{4, 10, 397}, - dictWord{136, 10, 555}, - dictWord{4, 11, 178}, - dictWord{ - 133, - 11, - 399, - }, - dictWord{134, 0, 711}, - dictWord{144, 0, 9}, - dictWord{4, 0, 403}, - dictWord{5, 0, 441}, - dictWord{7, 0, 450}, - dictWord{10, 0, 840}, - dictWord{11, 0, 101}, - dictWord{12, 0, 193}, - dictWord{141, 0, 430}, - dictWord{135, 11, 1246}, - dictWord{12, 10, 398}, - dictWord{20, 10, 39}, - dictWord{21, 10, 11}, - dictWord{ - 150, - 10, - 41, - }, - dictWord{4, 10, 485}, - dictWord{7, 10, 353}, - dictWord{135, 10, 1523}, - dictWord{6, 10, 366}, - dictWord{7, 10, 1384}, - dictWord{7, 10, 1601}, - dictWord{ - 135, - 11, - 1912, - }, - dictWord{7, 0, 396}, - dictWord{10, 0, 160}, - dictWord{135, 11, 396}, - dictWord{137, 10, 282}, - dictWord{134, 11, 1692}, - dictWord{4, 10, 157}, - dictWord{5, 10, 471}, - dictWord{6, 11, 202}, - dictWord{10, 11, 448}, - dictWord{11, 11, 208}, - dictWord{12, 11, 360}, - dictWord{17, 11, 117}, - dictWord{ - 17, - 11, - 118, - }, - dictWord{18, 11, 27}, - dictWord{148, 11, 67}, - dictWord{133, 0, 679}, - dictWord{137, 0, 326}, - dictWord{136, 10, 116}, - dictWord{7, 11, 872}, - dictWord{ - 10, - 11, - 516, - }, - dictWord{139, 11, 167}, - dictWord{132, 11, 224}, - dictWord{5, 11, 546}, - dictWord{7, 11, 35}, - dictWord{8, 11, 11}, - dictWord{8, 11, 12}, - dictWord{ - 9, - 11, - 315, - }, - dictWord{9, 11, 533}, - dictWord{10, 11, 802}, - dictWord{11, 11, 166}, - dictWord{12, 11, 525}, - dictWord{142, 11, 243}, - dictWord{7, 0, 1128}, - dictWord{135, 11, 1920}, - dictWord{5, 11, 241}, - dictWord{8, 11, 242}, - dictWord{9, 11, 451}, - dictWord{10, 11, 667}, - dictWord{11, 11, 598}, - dictWord{ - 140, - 11, - 429, - }, - dictWord{6, 0, 737}, - dictWord{5, 10, 160}, - dictWord{7, 10, 363}, - dictWord{7, 10, 589}, - dictWord{10, 10, 170}, - dictWord{141, 10, 55}, - dictWord{ - 135, - 0, - 1796, - }, - dictWord{142, 11, 254}, - dictWord{4, 0, 574}, - dictWord{7, 0, 350}, - dictWord{7, 0, 1024}, - dictWord{8, 0, 338}, - dictWord{9, 0, 677}, - dictWord{138, 0, 808}, - dictWord{134, 0, 1096}, - dictWord{137, 11, 516}, - dictWord{7, 0, 405}, - dictWord{10, 0, 491}, - dictWord{4, 10, 108}, - dictWord{4, 11, 366}, - dictWord{ - 139, - 10, - 498, - }, - dictWord{11, 11, 337}, - dictWord{142, 11, 303}, - dictWord{134, 11, 1736}, - dictWord{7, 0, 1081}, - dictWord{140, 11, 364}, - dictWord{7, 10, 1005}, - dictWord{140, 10, 609}, - dictWord{7, 0, 1676}, - dictWord{4, 10, 895}, - dictWord{133, 10, 772}, - dictWord{135, 0, 2037}, - dictWord{6, 0, 1207}, - dictWord{ - 11, - 11, - 916, - }, - dictWord{142, 11, 419}, - dictWord{14, 11, 140}, - dictWord{148, 11, 41}, - dictWord{6, 11, 331}, - dictWord{136, 11, 623}, - dictWord{9, 0, 944}, - dictWord{ - 9, - 0, - 969, - }, - dictWord{9, 0, 1022}, - dictWord{12, 0, 913}, - dictWord{12, 0, 936}, - dictWord{15, 0, 177}, - dictWord{15, 0, 193}, - dictWord{4, 10, 926}, - dictWord{ - 133, - 10, - 983, - }, - dictWord{5, 0, 354}, - dictWord{135, 11, 506}, - dictWord{8, 0, 598}, - dictWord{9, 0, 664}, - dictWord{138, 0, 441}, - dictWord{4, 11, 640}, - dictWord{ - 133, - 11, - 513, - }, - dictWord{137, 0, 297}, - dictWord{132, 10, 538}, - dictWord{6, 10, 294}, - dictWord{7, 10, 1267}, - dictWord{136, 10, 624}, - dictWord{7, 0, 1772}, - dictWord{ - 7, - 11, - 1888, - }, - dictWord{8, 11, 289}, - dictWord{11, 11, 45}, - dictWord{12, 11, 278}, - dictWord{140, 11, 537}, - dictWord{135, 10, 1325}, - dictWord{138, 0, 751}, - dictWord{141, 0, 37}, - dictWord{134, 0, 1828}, - dictWord{132, 10, 757}, - dictWord{132, 11, 394}, - dictWord{6, 0, 257}, - dictWord{135, 0, 1522}, - dictWord{ - 4, - 0, - 582, - }, - dictWord{9, 0, 191}, - dictWord{135, 11, 1931}, - dictWord{7, 11, 574}, - dictWord{7, 11, 1719}, - dictWord{137, 11, 145}, - dictWord{132, 11, 658}, - dictWord{10, 0, 790}, - dictWord{132, 11, 369}, - dictWord{9, 11, 781}, - dictWord{10, 11, 144}, - dictWord{11, 11, 385}, - dictWord{13, 11, 161}, - dictWord{13, 11, 228}, - dictWord{13, 11, 268}, - dictWord{148, 11, 107}, - dictWord{8, 0, 469}, - dictWord{10, 0, 47}, - dictWord{136, 11, 374}, - dictWord{6, 0, 306}, - dictWord{7, 0, 1140}, - dictWord{7, 0, 1340}, - dictWord{8, 0, 133}, - dictWord{138, 0, 449}, - dictWord{139, 0, 1011}, - dictWord{7, 10, 1875}, - dictWord{139, 10, 124}, - dictWord{ - 4, - 11, - 344, - }, - dictWord{6, 11, 498}, - dictWord{139, 11, 323}, - dictWord{137, 0, 299}, - dictWord{132, 0, 837}, - dictWord{133, 11, 906}, - dictWord{5, 0, 329}, - dictWord{ - 8, - 0, - 260, - }, - dictWord{138, 0, 10}, - dictWord{134, 0, 1320}, - dictWord{4, 0, 657}, - dictWord{146, 0, 158}, - dictWord{135, 0, 1191}, - dictWord{152, 0, 7}, - dictWord{ - 6, - 0, - 1939, - }, - dictWord{8, 0, 974}, - dictWord{138, 0, 996}, - dictWord{135, 0, 1665}, - dictWord{11, 11, 126}, - dictWord{139, 11, 287}, - dictWord{143, 0, 8}, - dictWord{ - 14, - 11, - 149, - }, - dictWord{14, 11, 399}, - dictWord{143, 11, 57}, - dictWord{5, 0, 66}, - dictWord{7, 0, 1896}, - dictWord{136, 0, 288}, - dictWord{7, 0, 175}, - dictWord{ - 10, - 0, - 494, - }, - dictWord{5, 10, 150}, - dictWord{8, 10, 603}, - dictWord{9, 10, 593}, - dictWord{9, 10, 634}, - dictWord{10, 10, 173}, - dictWord{11, 10, 462}, - dictWord{ - 11, - 10, - 515, - }, - dictWord{13, 10, 216}, - dictWord{13, 10, 288}, - dictWord{142, 10, 400}, - dictWord{134, 0, 1643}, - dictWord{136, 11, 21}, - dictWord{4, 0, 21}, - dictWord{ - 5, - 0, - 91, - }, - dictWord{5, 0, 648}, - dictWord{5, 0, 750}, - dictWord{5, 0, 781}, - dictWord{6, 0, 54}, - dictWord{6, 0, 112}, - dictWord{6, 0, 402}, - dictWord{6, 0, 1732}, - dictWord{ - 7, - 0, - 315, - }, - dictWord{7, 0, 749}, - dictWord{7, 0, 1427}, - dictWord{7, 0, 1900}, - dictWord{9, 0, 78}, - dictWord{9, 0, 508}, - dictWord{10, 0, 611}, - dictWord{10, 0, 811}, - dictWord{11, 0, 510}, - dictWord{11, 0, 728}, - dictWord{13, 0, 36}, - dictWord{14, 0, 39}, - dictWord{16, 0, 83}, - dictWord{17, 0, 124}, - dictWord{148, 0, 30}, - dictWord{ - 4, - 0, - 668, - }, - dictWord{136, 0, 570}, - dictWord{10, 0, 322}, - dictWord{10, 0, 719}, - dictWord{139, 0, 407}, - dictWord{135, 11, 1381}, - dictWord{136, 11, 193}, - dictWord{12, 10, 108}, - dictWord{141, 10, 291}, - dictWord{132, 11, 616}, - dictWord{136, 11, 692}, - dictWord{8, 0, 125}, - dictWord{8, 0, 369}, - dictWord{8, 0, 524}, - dictWord{10, 0, 486}, - dictWord{11, 0, 13}, - dictWord{11, 0, 381}, - dictWord{11, 0, 736}, - dictWord{11, 0, 766}, - dictWord{11, 0, 845}, - dictWord{13, 0, 114}, - dictWord{ - 13, - 0, - 292, - }, - dictWord{142, 0, 47}, - dictWord{134, 0, 1247}, - dictWord{6, 0, 1684}, - dictWord{6, 0, 1731}, - dictWord{7, 0, 356}, - dictWord{8, 0, 54}, - dictWord{8, 0, 221}, - dictWord{9, 0, 225}, - dictWord{9, 0, 356}, - dictWord{10, 0, 77}, - dictWord{10, 0, 446}, - dictWord{10, 0, 731}, - dictWord{12, 0, 404}, - dictWord{141, 0, 491}, - dictWord{135, 10, 1777}, - dictWord{4, 11, 305}, - dictWord{4, 10, 493}, - dictWord{144, 10, 55}, - dictWord{4, 0, 951}, - dictWord{6, 0, 1809}, - dictWord{6, 0, 1849}, - dictWord{8, 0, 846}, - dictWord{8, 0, 866}, - dictWord{8, 0, 899}, - dictWord{10, 0, 896}, - dictWord{12, 0, 694}, - dictWord{142, 0, 468}, - dictWord{5, 11, 214}, - dictWord{ - 7, - 11, - 603, - }, - dictWord{8, 11, 611}, - dictWord{9, 11, 686}, - dictWord{10, 11, 88}, - dictWord{11, 11, 459}, - dictWord{11, 11, 496}, - dictWord{12, 11, 463}, - dictWord{ - 12, - 11, - 590, - }, - dictWord{13, 11, 0}, - dictWord{142, 11, 214}, - dictWord{132, 0, 411}, - dictWord{4, 0, 80}, - dictWord{133, 0, 44}, - dictWord{140, 11, 74}, - dictWord{ - 143, - 0, - 31, - }, - dictWord{7, 0, 669}, - dictWord{6, 10, 568}, - dictWord{7, 10, 1804}, - dictWord{8, 10, 362}, - dictWord{8, 10, 410}, - dictWord{8, 10, 830}, - dictWord{9, 10, 514}, - dictWord{11, 10, 649}, - dictWord{142, 10, 157}, - dictWord{7, 0, 673}, - dictWord{134, 11, 1703}, - dictWord{132, 10, 625}, - dictWord{134, 0, 1303}, - dictWord{ - 5, - 0, - 299, - }, - dictWord{135, 0, 1083}, - dictWord{138, 0, 704}, - dictWord{6, 0, 275}, - dictWord{7, 0, 408}, - dictWord{6, 10, 158}, - dictWord{7, 10, 129}, - dictWord{ - 7, - 10, - 181, - }, - dictWord{8, 10, 276}, - dictWord{8, 10, 377}, - dictWord{10, 10, 523}, - dictWord{11, 10, 816}, - dictWord{12, 10, 455}, - dictWord{13, 10, 303}, - dictWord{ - 142, - 10, - 135, - }, - dictWord{4, 0, 219}, - dictWord{7, 0, 367}, - dictWord{7, 0, 1713}, - dictWord{7, 0, 1761}, - dictWord{9, 0, 86}, - dictWord{9, 0, 537}, - dictWord{10, 0, 165}, - dictWord{12, 0, 219}, - dictWord{140, 0, 561}, - dictWord{8, 0, 216}, - dictWord{4, 10, 1}, - dictWord{4, 11, 737}, - dictWord{6, 11, 317}, - dictWord{7, 10, 1143}, - dictWord{ - 7, - 10, - 1463, - }, - dictWord{9, 10, 207}, - dictWord{9, 10, 390}, - dictWord{9, 10, 467}, - dictWord{10, 11, 98}, - dictWord{11, 11, 294}, - dictWord{11, 10, 836}, - dictWord{ - 12, - 11, - 60, - }, - dictWord{12, 11, 437}, - dictWord{13, 11, 64}, - dictWord{13, 11, 380}, - dictWord{142, 11, 430}, - dictWord{6, 11, 1758}, - dictWord{8, 11, 520}, - dictWord{9, 11, 345}, - dictWord{9, 11, 403}, - dictWord{142, 11, 350}, - dictWord{5, 11, 47}, - dictWord{10, 11, 242}, - dictWord{138, 11, 579}, - dictWord{5, 11, 139}, - dictWord{7, 11, 1168}, - dictWord{138, 11, 539}, - dictWord{135, 0, 1319}, - dictWord{4, 10, 295}, - dictWord{4, 10, 723}, - dictWord{5, 10, 895}, - dictWord{ - 7, - 10, - 1031, - }, - dictWord{8, 10, 199}, - dictWord{8, 10, 340}, - dictWord{9, 10, 153}, - dictWord{9, 10, 215}, - dictWord{10, 10, 21}, - dictWord{10, 10, 59}, - dictWord{ - 10, - 10, - 80, - }, - dictWord{10, 10, 224}, - dictWord{10, 10, 838}, - dictWord{11, 10, 229}, - dictWord{11, 10, 652}, - dictWord{12, 10, 192}, - dictWord{13, 10, 146}, - dictWord{ - 142, - 10, - 91, - }, - dictWord{140, 0, 428}, - dictWord{137, 10, 51}, - dictWord{133, 0, 514}, - dictWord{5, 10, 309}, - dictWord{140, 10, 211}, - dictWord{6, 0, 1010}, - dictWord{5, 10, 125}, - dictWord{8, 10, 77}, - dictWord{138, 10, 15}, - dictWord{4, 0, 55}, - dictWord{5, 0, 301}, - dictWord{6, 0, 571}, - dictWord{142, 0, 49}, - dictWord{ - 146, - 0, - 102, - }, - dictWord{136, 11, 370}, - dictWord{4, 11, 107}, - dictWord{7, 11, 613}, - dictWord{8, 11, 358}, - dictWord{8, 11, 439}, - dictWord{8, 11, 504}, - dictWord{ - 9, - 11, - 501, - }, - dictWord{10, 11, 383}, - dictWord{139, 11, 477}, - dictWord{132, 11, 229}, - dictWord{133, 0, 364}, - dictWord{133, 10, 439}, - dictWord{4, 11, 903}, - dictWord{135, 11, 1816}, - dictWord{11, 0, 379}, - dictWord{140, 10, 76}, - dictWord{4, 0, 76}, - dictWord{4, 0, 971}, - dictWord{7, 0, 1550}, - dictWord{9, 0, 306}, - dictWord{ - 9, - 0, - 430, - }, - dictWord{9, 0, 663}, - dictWord{10, 0, 683}, - dictWord{10, 0, 921}, - dictWord{11, 0, 427}, - dictWord{11, 0, 753}, - dictWord{12, 0, 334}, - dictWord{12, 0, 442}, - dictWord{14, 0, 258}, - dictWord{14, 0, 366}, - dictWord{143, 0, 131}, - dictWord{137, 0, 52}, - dictWord{4, 11, 47}, - dictWord{6, 11, 373}, - dictWord{7, 11, 452}, - dictWord{7, 11, 543}, - dictWord{7, 11, 1714}, - dictWord{7, 11, 1856}, - dictWord{9, 11, 6}, - dictWord{11, 11, 257}, - dictWord{139, 11, 391}, - dictWord{4, 10, 8}, - dictWord{ - 7, - 10, - 1152, - }, - dictWord{7, 10, 1153}, - dictWord{7, 10, 1715}, - dictWord{9, 10, 374}, - dictWord{10, 10, 478}, - dictWord{139, 10, 648}, - dictWord{4, 11, 785}, - dictWord{133, 11, 368}, - dictWord{135, 10, 1099}, - dictWord{135, 11, 860}, - dictWord{5, 11, 980}, - dictWord{134, 11, 1754}, - dictWord{134, 0, 1258}, - dictWord{ - 6, - 0, - 1058, - }, - dictWord{6, 0, 1359}, - dictWord{7, 11, 536}, - dictWord{7, 11, 1331}, - dictWord{136, 11, 143}, - dictWord{4, 0, 656}, - dictWord{135, 0, 779}, - dictWord{136, 10, 87}, - dictWord{5, 11, 19}, - dictWord{6, 11, 533}, - dictWord{146, 11, 126}, - dictWord{7, 0, 144}, - dictWord{138, 10, 438}, - dictWord{5, 11, 395}, - dictWord{5, 11, 951}, - dictWord{134, 11, 1776}, - dictWord{135, 0, 1373}, - dictWord{7, 0, 554}, - dictWord{7, 0, 605}, - dictWord{141, 0, 10}, - dictWord{4, 10, 69}, - dictWord{ - 5, - 10, - 122, - }, - dictWord{9, 10, 656}, - dictWord{138, 10, 464}, - dictWord{5, 10, 849}, - dictWord{134, 10, 1633}, - dictWord{5, 0, 838}, - dictWord{5, 0, 841}, - dictWord{134, 0, 1649}, - dictWord{133, 0, 1012}, - dictWord{139, 10, 499}, - dictWord{7, 10, 476}, - dictWord{7, 10, 1592}, - dictWord{138, 10, 87}, - dictWord{ - 6, - 0, - 251, - }, - dictWord{7, 0, 365}, - dictWord{7, 0, 1357}, - dictWord{7, 0, 1497}, - dictWord{8, 0, 154}, - dictWord{141, 0, 281}, - dictWord{132, 11, 441}, - dictWord{ - 132, - 11, - 695, - }, - dictWord{7, 11, 497}, - dictWord{9, 11, 387}, - dictWord{147, 11, 81}, - dictWord{133, 0, 340}, - dictWord{14, 10, 283}, - dictWord{142, 11, 283}, - dictWord{ - 134, - 0, - 810, - }, - dictWord{135, 11, 1894}, - dictWord{139, 0, 495}, - dictWord{5, 11, 284}, - dictWord{6, 11, 49}, - dictWord{6, 11, 350}, - dictWord{7, 11, 1}, - dictWord{ - 7, - 11, - 377, - }, - dictWord{7, 11, 1693}, - dictWord{8, 11, 18}, - dictWord{8, 11, 678}, - dictWord{9, 11, 161}, - dictWord{9, 11, 585}, - dictWord{9, 11, 671}, - dictWord{ - 9, - 11, - 839, - }, - dictWord{11, 11, 912}, - dictWord{141, 11, 427}, - dictWord{5, 10, 859}, - dictWord{7, 10, 1160}, - dictWord{8, 10, 107}, - dictWord{9, 10, 291}, - dictWord{ - 9, - 10, - 439, - }, - dictWord{10, 10, 663}, - dictWord{11, 10, 609}, - dictWord{140, 10, 197}, - dictWord{8, 0, 261}, - dictWord{9, 0, 144}, - dictWord{9, 0, 466}, - dictWord{ - 10, - 0, - 370, - }, - dictWord{12, 0, 470}, - dictWord{13, 0, 144}, - dictWord{142, 0, 348}, - dictWord{137, 0, 897}, - dictWord{6, 0, 248}, - dictWord{9, 0, 546}, - dictWord{10, 0, 535}, - dictWord{11, 0, 681}, - dictWord{141, 0, 135}, - dictWord{4, 0, 358}, - dictWord{135, 0, 1496}, - dictWord{134, 0, 567}, - dictWord{136, 0, 445}, - dictWord{ - 4, - 10, - 117, - }, - dictWord{6, 10, 372}, - dictWord{7, 10, 1905}, - dictWord{142, 10, 323}, - dictWord{4, 10, 722}, - dictWord{139, 10, 471}, - dictWord{6, 0, 697}, - dictWord{ - 134, - 0, - 996, - }, - dictWord{7, 11, 2007}, - dictWord{9, 11, 101}, - dictWord{9, 11, 450}, - dictWord{10, 11, 66}, - dictWord{10, 11, 842}, - dictWord{11, 11, 536}, - dictWord{ - 140, - 11, - 587, - }, - dictWord{132, 0, 577}, - dictWord{134, 0, 1336}, - dictWord{9, 10, 5}, - dictWord{12, 10, 216}, - dictWord{12, 10, 294}, - dictWord{12, 10, 298}, - dictWord{12, 10, 400}, - dictWord{12, 10, 518}, - dictWord{13, 10, 229}, - dictWord{143, 10, 139}, - dictWord{6, 0, 174}, - dictWord{138, 0, 917}, - dictWord{ - 134, - 10, - 1774, - }, - dictWord{5, 10, 12}, - dictWord{7, 10, 375}, - dictWord{9, 10, 88}, - dictWord{9, 10, 438}, - dictWord{11, 11, 62}, - dictWord{139, 10, 270}, - dictWord{ - 134, - 11, - 1766, - }, - dictWord{6, 11, 0}, - dictWord{7, 11, 84}, - dictWord{7, 10, 816}, - dictWord{7, 10, 1241}, - dictWord{9, 10, 283}, - dictWord{9, 10, 520}, - dictWord{10, 10, 213}, - dictWord{10, 10, 307}, - dictWord{10, 10, 463}, - dictWord{10, 10, 671}, - dictWord{10, 10, 746}, - dictWord{11, 10, 401}, - dictWord{11, 10, 794}, - dictWord{ - 11, - 11, - 895, - }, - dictWord{12, 10, 517}, - dictWord{17, 11, 11}, - dictWord{18, 10, 107}, - dictWord{147, 10, 115}, - dictWord{5, 0, 878}, - dictWord{133, 0, 972}, - dictWord{ - 6, - 11, - 1665, - }, - dictWord{7, 11, 256}, - dictWord{7, 11, 1388}, - dictWord{138, 11, 499}, - dictWord{4, 10, 258}, - dictWord{136, 10, 639}, - dictWord{4, 11, 22}, - dictWord{5, 11, 10}, - dictWord{6, 10, 22}, - dictWord{7, 11, 848}, - dictWord{7, 10, 903}, - dictWord{7, 10, 1963}, - dictWord{8, 11, 97}, - dictWord{138, 10, 577}, - dictWord{ - 5, - 10, - 681, - }, - dictWord{136, 10, 782}, - dictWord{133, 11, 481}, - dictWord{132, 0, 351}, - dictWord{4, 10, 664}, - dictWord{5, 10, 804}, - dictWord{139, 10, 1013}, - dictWord{6, 11, 134}, - dictWord{7, 11, 437}, - dictWord{7, 11, 959}, - dictWord{9, 11, 37}, - dictWord{14, 11, 285}, - dictWord{14, 11, 371}, - dictWord{144, 11, 60}, - dictWord{7, 11, 486}, - dictWord{8, 11, 155}, - dictWord{11, 11, 93}, - dictWord{140, 11, 164}, - dictWord{132, 0, 286}, - dictWord{7, 0, 438}, - dictWord{7, 0, 627}, - dictWord{7, 0, 1516}, - dictWord{8, 0, 40}, - dictWord{9, 0, 56}, - dictWord{9, 0, 294}, - dictWord{10, 0, 30}, - dictWord{11, 0, 969}, - dictWord{11, 0, 995}, - dictWord{146, 0, 148}, - dictWord{5, 11, 591}, - dictWord{135, 11, 337}, - dictWord{134, 0, 1950}, - dictWord{133, 10, 32}, - dictWord{138, 11, 500}, - dictWord{5, 11, 380}, - dictWord{ - 5, - 11, - 650, - }, - dictWord{136, 11, 310}, - dictWord{4, 11, 364}, - dictWord{7, 11, 1156}, - dictWord{7, 11, 1187}, - dictWord{137, 11, 409}, - dictWord{4, 0, 738}, - dictWord{134, 11, 482}, - dictWord{4, 11, 781}, - dictWord{6, 11, 487}, - dictWord{7, 11, 926}, - dictWord{8, 11, 263}, - dictWord{139, 11, 500}, - dictWord{135, 11, 418}, - dictWord{6, 0, 2047}, - dictWord{10, 0, 969}, - dictWord{4, 10, 289}, - dictWord{7, 10, 629}, - dictWord{7, 10, 1698}, - dictWord{7, 10, 1711}, - dictWord{ - 140, - 10, - 215, - }, - dictWord{6, 10, 450}, - dictWord{136, 10, 109}, - dictWord{134, 0, 818}, - dictWord{136, 10, 705}, - dictWord{133, 0, 866}, - dictWord{4, 11, 94}, - dictWord{ - 135, - 11, - 1265, - }, - dictWord{132, 11, 417}, - dictWord{134, 0, 1467}, - dictWord{135, 10, 1238}, - dictWord{4, 0, 972}, - dictWord{6, 0, 1851}, - dictWord{ - 134, - 0, - 1857, - }, - dictWord{134, 0, 355}, - dictWord{133, 0, 116}, - dictWord{132, 0, 457}, - dictWord{135, 11, 1411}, - dictWord{4, 11, 408}, - dictWord{4, 11, 741}, - dictWord{135, 11, 500}, - dictWord{134, 10, 26}, - dictWord{142, 11, 137}, - dictWord{5, 0, 527}, - dictWord{6, 0, 189}, - dictWord{7, 0, 859}, - dictWord{136, 0, 267}, - dictWord{11, 0, 104}, - dictWord{11, 0, 554}, - dictWord{15, 0, 60}, - dictWord{143, 0, 125}, - dictWord{134, 0, 1613}, - dictWord{4, 10, 414}, - dictWord{5, 10, 467}, - dictWord{ - 9, - 10, - 654, - }, - dictWord{10, 10, 451}, - dictWord{12, 10, 59}, - dictWord{141, 10, 375}, - dictWord{135, 10, 17}, - dictWord{134, 0, 116}, - dictWord{135, 11, 541}, - dictWord{135, 10, 955}, - dictWord{6, 11, 73}, - dictWord{135, 11, 177}, - dictWord{133, 11, 576}, - dictWord{134, 0, 886}, - dictWord{133, 0, 487}, - dictWord{ - 4, - 0, - 86, - }, - dictWord{5, 0, 667}, - dictWord{5, 0, 753}, - dictWord{6, 0, 316}, - dictWord{6, 0, 455}, - dictWord{135, 0, 946}, - dictWord{142, 11, 231}, - dictWord{150, 0, 45}, - dictWord{134, 0, 863}, - dictWord{134, 0, 1953}, - dictWord{6, 10, 280}, - dictWord{10, 10, 502}, - dictWord{11, 10, 344}, - dictWord{140, 10, 38}, - dictWord{4, 0, 79}, - dictWord{7, 0, 1773}, - dictWord{10, 0, 450}, - dictWord{11, 0, 589}, - dictWord{13, 0, 332}, - dictWord{13, 0, 493}, - dictWord{14, 0, 183}, - dictWord{14, 0, 334}, - dictWord{14, 0, 362}, - dictWord{14, 0, 368}, - dictWord{14, 0, 376}, - dictWord{14, 0, 379}, - dictWord{19, 0, 90}, - dictWord{19, 0, 103}, - dictWord{19, 0, 127}, - dictWord{ - 148, - 0, - 90, - }, - dictWord{5, 10, 45}, - dictWord{7, 10, 1161}, - dictWord{11, 10, 448}, - dictWord{11, 10, 880}, - dictWord{13, 10, 139}, - dictWord{13, 10, 407}, - dictWord{ - 15, - 10, - 16, - }, - dictWord{17, 10, 95}, - dictWord{18, 10, 66}, - dictWord{18, 10, 88}, - dictWord{18, 10, 123}, - dictWord{149, 10, 7}, - dictWord{136, 10, 777}, - dictWord{ - 4, - 10, - 410, - }, - dictWord{135, 10, 521}, - dictWord{135, 10, 1778}, - dictWord{135, 11, 538}, - dictWord{142, 0, 381}, - dictWord{133, 11, 413}, - dictWord{ - 134, - 0, - 1142, - }, - dictWord{6, 0, 1189}, - dictWord{136, 11, 495}, - dictWord{5, 0, 663}, - dictWord{6, 0, 1962}, - dictWord{134, 0, 2003}, - dictWord{7, 11, 54}, - dictWord{ - 8, - 11, - 312, - }, - dictWord{10, 11, 191}, - dictWord{10, 11, 614}, - dictWord{140, 11, 567}, - dictWord{132, 10, 436}, - dictWord{133, 0, 846}, - dictWord{10, 0, 528}, - dictWord{11, 0, 504}, - dictWord{7, 10, 1587}, - dictWord{135, 10, 1707}, - dictWord{5, 0, 378}, - dictWord{8, 0, 465}, - dictWord{9, 0, 286}, - dictWord{10, 0, 185}, - dictWord{ - 10, - 0, - 562, - }, - dictWord{10, 0, 635}, - dictWord{11, 0, 31}, - dictWord{11, 0, 393}, - dictWord{13, 0, 312}, - dictWord{18, 0, 65}, - dictWord{18, 0, 96}, - dictWord{147, 0, 89}, - dictWord{7, 0, 899}, - dictWord{14, 0, 325}, - dictWord{6, 11, 468}, - dictWord{7, 11, 567}, - dictWord{7, 11, 1478}, - dictWord{8, 11, 530}, - dictWord{142, 11, 290}, - dictWord{7, 0, 1880}, - dictWord{9, 0, 680}, - dictWord{139, 0, 798}, - dictWord{134, 0, 1770}, - dictWord{132, 0, 648}, - dictWord{150, 11, 35}, - dictWord{5, 0, 945}, - dictWord{6, 0, 1656}, - dictWord{6, 0, 1787}, - dictWord{7, 0, 167}, - dictWord{8, 0, 824}, - dictWord{9, 0, 391}, - dictWord{10, 0, 375}, - dictWord{139, 0, 185}, - dictWord{ - 6, - 11, - 484, - }, - dictWord{135, 11, 822}, - dictWord{134, 0, 2046}, - dictWord{7, 0, 1645}, - dictWord{8, 0, 352}, - dictWord{137, 0, 249}, - dictWord{132, 0, 152}, - dictWord{6, 0, 611}, - dictWord{135, 0, 1733}, - dictWord{6, 11, 1724}, - dictWord{135, 11, 2022}, - dictWord{133, 0, 1006}, - dictWord{141, 11, 96}, - dictWord{ - 5, - 0, - 420, - }, - dictWord{135, 0, 1449}, - dictWord{146, 11, 149}, - dictWord{135, 0, 832}, - dictWord{135, 10, 663}, - dictWord{133, 0, 351}, - dictWord{5, 0, 40}, - dictWord{ - 7, - 0, - 598, - }, - dictWord{7, 0, 1638}, - dictWord{8, 0, 78}, - dictWord{9, 0, 166}, - dictWord{9, 0, 640}, - dictWord{9, 0, 685}, - dictWord{9, 0, 773}, - dictWord{11, 0, 215}, - dictWord{13, 0, 65}, - dictWord{14, 0, 172}, - dictWord{14, 0, 317}, - dictWord{145, 0, 6}, - dictWord{8, 0, 60}, - dictWord{9, 0, 343}, - dictWord{139, 0, 769}, - dictWord{ - 134, - 0, - 1354, - }, - dictWord{132, 0, 724}, - dictWord{137, 0, 745}, - dictWord{132, 11, 474}, - dictWord{7, 0, 1951}, - dictWord{8, 0, 765}, - dictWord{8, 0, 772}, - dictWord{ - 140, - 0, - 671, - }, - dictWord{7, 0, 108}, - dictWord{8, 0, 219}, - dictWord{8, 0, 388}, - dictWord{9, 0, 775}, - dictWord{11, 0, 275}, - dictWord{140, 0, 464}, - dictWord{137, 0, 639}, - dictWord{135, 10, 503}, - dictWord{133, 11, 366}, - dictWord{5, 0, 15}, - dictWord{6, 0, 56}, - dictWord{7, 0, 1758}, - dictWord{8, 0, 500}, - dictWord{9, 0, 730}, - dictWord{ - 11, - 0, - 331, - }, - dictWord{13, 0, 150}, - dictWord{14, 0, 282}, - dictWord{5, 11, 305}, - dictWord{9, 11, 560}, - dictWord{141, 11, 208}, - dictWord{4, 10, 113}, - dictWord{ - 5, - 10, - 163, - }, - dictWord{5, 10, 735}, - dictWord{7, 10, 1009}, - dictWord{9, 10, 9}, - dictWord{9, 10, 771}, - dictWord{12, 10, 90}, - dictWord{13, 10, 138}, - dictWord{ - 13, - 10, - 410, - }, - dictWord{143, 10, 128}, - dictWord{4, 10, 324}, - dictWord{138, 10, 104}, - dictWord{135, 11, 466}, - dictWord{142, 11, 27}, - dictWord{134, 0, 1886}, - dictWord{5, 0, 205}, - dictWord{6, 0, 438}, - dictWord{9, 0, 711}, - dictWord{4, 11, 480}, - dictWord{6, 11, 167}, - dictWord{6, 11, 302}, - dictWord{6, 11, 1642}, - dictWord{ - 7, - 11, - 130, - }, - dictWord{7, 11, 656}, - dictWord{7, 11, 837}, - dictWord{7, 11, 1547}, - dictWord{7, 11, 1657}, - dictWord{8, 11, 429}, - dictWord{9, 11, 228}, - dictWord{ - 10, - 11, - 643, - }, - dictWord{13, 11, 289}, - dictWord{13, 11, 343}, - dictWord{147, 11, 101}, - dictWord{134, 0, 865}, - dictWord{6, 0, 2025}, - dictWord{136, 0, 965}, - dictWord{ - 7, - 11, - 278, - }, - dictWord{10, 11, 739}, - dictWord{11, 11, 708}, - dictWord{141, 11, 348}, - dictWord{133, 0, 534}, - dictWord{135, 11, 1922}, - dictWord{ - 137, - 0, - 691, - }, - dictWord{4, 10, 935}, - dictWord{133, 10, 823}, - dictWord{6, 0, 443}, - dictWord{9, 0, 237}, - dictWord{9, 0, 571}, - dictWord{9, 0, 695}, - dictWord{10, 0, 139}, - dictWord{11, 0, 715}, - dictWord{12, 0, 417}, - dictWord{141, 0, 421}, - dictWord{5, 10, 269}, - dictWord{7, 10, 434}, - dictWord{7, 10, 891}, - dictWord{8, 10, 339}, - dictWord{ - 9, - 10, - 702, - }, - dictWord{11, 10, 594}, - dictWord{11, 10, 718}, - dictWord{145, 10, 100}, - dictWord{6, 0, 1555}, - dictWord{7, 0, 878}, - dictWord{9, 10, 485}, - dictWord{141, 10, 264}, - dictWord{134, 10, 1713}, - dictWord{7, 10, 1810}, - dictWord{11, 10, 866}, - dictWord{12, 10, 103}, - dictWord{141, 10, 495}, - dictWord{ - 135, - 10, - 900, - }, - dictWord{6, 0, 1410}, - dictWord{9, 11, 316}, - dictWord{139, 11, 256}, - dictWord{4, 0, 995}, - dictWord{135, 0, 1033}, - dictWord{132, 0, 578}, - dictWord{10, 0, 881}, - dictWord{12, 0, 740}, - dictWord{12, 0, 743}, - dictWord{140, 0, 759}, - dictWord{132, 0, 822}, - dictWord{133, 0, 923}, - dictWord{142, 10, 143}, - dictWord{135, 11, 1696}, - dictWord{6, 11, 363}, - dictWord{7, 11, 1955}, - dictWord{136, 11, 725}, - dictWord{132, 0, 924}, - dictWord{133, 0, 665}, - dictWord{ - 135, - 10, - 2029, - }, - dictWord{135, 0, 1901}, - dictWord{4, 0, 265}, - dictWord{6, 0, 1092}, - dictWord{6, 0, 1417}, - dictWord{7, 0, 807}, - dictWord{135, 0, 950}, - dictWord{ - 5, - 0, - 93, - }, - dictWord{12, 0, 267}, - dictWord{141, 0, 498}, - dictWord{135, 0, 1451}, - dictWord{5, 11, 813}, - dictWord{135, 11, 2046}, - dictWord{5, 10, 625}, - dictWord{135, 10, 1617}, - dictWord{135, 0, 747}, - dictWord{6, 0, 788}, - dictWord{137, 0, 828}, - dictWord{7, 0, 184}, - dictWord{11, 0, 307}, - dictWord{11, 0, 400}, - dictWord{15, 0, 130}, - dictWord{5, 11, 712}, - dictWord{7, 11, 1855}, - dictWord{8, 10, 425}, - dictWord{8, 10, 693}, - dictWord{9, 10, 720}, - dictWord{10, 10, 380}, - dictWord{10, 10, 638}, - dictWord{11, 11, 17}, - dictWord{11, 10, 473}, - dictWord{12, 10, 61}, - dictWord{13, 11, 321}, - dictWord{144, 11, 67}, - dictWord{135, 0, 198}, - dictWord{6, 11, 320}, - dictWord{7, 11, 781}, - dictWord{7, 11, 1921}, - dictWord{9, 11, 55}, - dictWord{10, 11, 186}, - dictWord{10, 11, 273}, - dictWord{10, 11, 664}, - dictWord{10, 11, 801}, - dictWord{11, 11, 996}, - dictWord{11, 11, 997}, - dictWord{13, 11, 157}, - dictWord{142, 11, 170}, - dictWord{136, 11, 271}, - dictWord{ - 135, - 0, - 994, - }, - dictWord{7, 11, 103}, - dictWord{7, 11, 863}, - dictWord{11, 11, 184}, - dictWord{14, 11, 299}, - dictWord{145, 11, 62}, - dictWord{11, 10, 551}, - dictWord{142, 10, 159}, - dictWord{5, 0, 233}, - dictWord{5, 0, 320}, - dictWord{6, 0, 140}, - dictWord{8, 0, 295}, - dictWord{8, 0, 615}, - dictWord{136, 11, 615}, - dictWord{ - 133, - 0, - 978, - }, - dictWord{4, 0, 905}, - dictWord{6, 0, 1701}, - dictWord{137, 0, 843}, - dictWord{132, 10, 168}, - dictWord{4, 0, 974}, - dictWord{8, 0, 850}, - dictWord{ - 12, - 0, - 709, - }, - dictWord{12, 0, 768}, - dictWord{140, 0, 786}, - dictWord{135, 10, 91}, - dictWord{152, 0, 6}, - dictWord{138, 10, 532}, - dictWord{135, 10, 1884}, - dictWord{132, 0, 509}, - dictWord{6, 0, 1307}, - dictWord{135, 0, 273}, - dictWord{5, 11, 77}, - dictWord{7, 11, 1455}, - dictWord{10, 11, 843}, - dictWord{19, 11, 73}, - dictWord{150, 11, 5}, - dictWord{132, 11, 458}, - dictWord{135, 11, 1420}, - dictWord{6, 11, 109}, - dictWord{138, 11, 382}, - dictWord{6, 0, 201}, - dictWord{6, 11, 330}, - dictWord{7, 10, 70}, - dictWord{7, 11, 1084}, - dictWord{10, 10, 240}, - dictWord{11, 11, 142}, - dictWord{147, 10, 93}, - dictWord{7, 0, 1041}, - dictWord{ - 140, - 11, - 328, - }, - dictWord{133, 11, 354}, - dictWord{134, 0, 1040}, - dictWord{133, 0, 693}, - dictWord{134, 0, 774}, - dictWord{139, 0, 234}, - dictWord{132, 0, 336}, - dictWord{7, 0, 1399}, - dictWord{139, 10, 392}, - dictWord{20, 0, 22}, - dictWord{148, 11, 22}, - dictWord{5, 0, 802}, - dictWord{7, 0, 2021}, - dictWord{136, 0, 805}, - dictWord{ - 5, - 0, - 167, - }, - dictWord{5, 0, 899}, - dictWord{6, 0, 410}, - dictWord{137, 0, 777}, - dictWord{137, 0, 789}, - dictWord{134, 0, 1705}, - dictWord{7, 10, 655}, - dictWord{ - 135, - 10, - 1844, - }, - dictWord{4, 10, 145}, - dictWord{6, 10, 176}, - dictWord{7, 10, 395}, - dictWord{137, 10, 562}, - dictWord{132, 10, 501}, - dictWord{135, 0, 10}, - dictWord{5, 0, 11}, - dictWord{6, 0, 117}, - dictWord{6, 0, 485}, - dictWord{7, 0, 1133}, - dictWord{9, 0, 582}, - dictWord{9, 0, 594}, - dictWord{10, 0, 82}, - dictWord{11, 0, 21}, - dictWord{11, 0, 818}, - dictWord{12, 0, 535}, - dictWord{13, 0, 86}, - dictWord{20, 0, 91}, - dictWord{23, 0, 13}, - dictWord{134, 10, 509}, - dictWord{4, 0, 264}, - dictWord{ - 7, - 0, - 1067, - }, - dictWord{8, 0, 204}, - dictWord{8, 0, 385}, - dictWord{139, 0, 953}, - dictWord{139, 11, 737}, - dictWord{138, 0, 56}, - dictWord{134, 0, 1917}, - dictWord{ - 133, - 0, - 470, - }, - dictWord{10, 11, 657}, - dictWord{14, 11, 297}, - dictWord{142, 11, 361}, - dictWord{135, 11, 412}, - dictWord{7, 0, 1198}, - dictWord{7, 11, 1198}, - dictWord{8, 11, 556}, - dictWord{14, 11, 123}, - dictWord{14, 11, 192}, - dictWord{143, 11, 27}, - dictWord{7, 11, 1985}, - dictWord{14, 11, 146}, - dictWord{15, 11, 42}, - dictWord{16, 11, 23}, - dictWord{17, 11, 86}, - dictWord{146, 11, 17}, - dictWord{11, 0, 1015}, - dictWord{136, 11, 122}, - dictWord{4, 10, 114}, - dictWord{ - 9, - 10, - 492, - }, - dictWord{13, 10, 462}, - dictWord{142, 10, 215}, - dictWord{4, 10, 77}, - dictWord{5, 10, 361}, - dictWord{6, 10, 139}, - dictWord{6, 10, 401}, - dictWord{ - 6, - 10, - 404, - }, - dictWord{7, 10, 413}, - dictWord{7, 10, 715}, - dictWord{7, 10, 1716}, - dictWord{11, 10, 279}, - dictWord{12, 10, 179}, - dictWord{12, 10, 258}, - dictWord{ - 13, - 10, - 244, - }, - dictWord{142, 10, 358}, - dictWord{134, 10, 1717}, - dictWord{7, 10, 1061}, - dictWord{8, 10, 82}, - dictWord{11, 10, 250}, - dictWord{12, 10, 420}, - dictWord{141, 10, 184}, - dictWord{133, 0, 715}, - dictWord{135, 10, 724}, - dictWord{9, 0, 919}, - dictWord{9, 0, 922}, - dictWord{9, 0, 927}, - dictWord{9, 0, 933}, - dictWord{9, 0, 962}, - dictWord{9, 0, 1000}, - dictWord{9, 0, 1002}, - dictWord{9, 0, 1021}, - dictWord{12, 0, 890}, - dictWord{12, 0, 907}, - dictWord{12, 0, 930}, - dictWord{ - 15, - 0, - 207, - }, - dictWord{15, 0, 228}, - dictWord{15, 0, 238}, - dictWord{149, 0, 61}, - dictWord{8, 0, 794}, - dictWord{9, 0, 400}, - dictWord{10, 0, 298}, - dictWord{142, 0, 228}, - dictWord{5, 11, 430}, - dictWord{5, 11, 932}, - dictWord{6, 11, 131}, - dictWord{7, 11, 417}, - dictWord{9, 11, 522}, - dictWord{11, 11, 314}, - dictWord{141, 11, 390}, - dictWord{132, 0, 867}, - dictWord{8, 0, 724}, - dictWord{132, 11, 507}, - dictWord{137, 11, 261}, - dictWord{4, 11, 343}, - dictWord{133, 11, 511}, - dictWord{ - 6, - 0, - 190, - }, - dictWord{7, 0, 768}, - dictWord{135, 0, 1170}, - dictWord{6, 10, 513}, - dictWord{135, 10, 1052}, - dictWord{7, 11, 455}, - dictWord{138, 11, 591}, - dictWord{134, 0, 1066}, - dictWord{137, 10, 899}, - dictWord{14, 0, 67}, - dictWord{147, 0, 60}, - dictWord{4, 0, 948}, - dictWord{18, 0, 174}, - dictWord{146, 0, 176}, - dictWord{135, 0, 1023}, - dictWord{7, 10, 1417}, - dictWord{12, 10, 382}, - dictWord{17, 10, 48}, - dictWord{152, 10, 12}, - dictWord{134, 11, 575}, - dictWord{ - 132, - 0, - 764, - }, - dictWord{6, 10, 545}, - dictWord{7, 10, 565}, - dictWord{7, 10, 1669}, - dictWord{10, 10, 114}, - dictWord{11, 10, 642}, - dictWord{140, 10, 618}, - dictWord{ - 6, - 0, - 137, - }, - dictWord{9, 0, 75}, - dictWord{9, 0, 253}, - dictWord{10, 0, 194}, - dictWord{138, 0, 444}, - dictWord{4, 0, 756}, - dictWord{133, 10, 5}, - dictWord{8, 0, 1008}, - dictWord{135, 10, 192}, - dictWord{132, 0, 842}, - dictWord{11, 0, 643}, - dictWord{12, 0, 115}, - dictWord{136, 10, 763}, - dictWord{139, 0, 67}, - dictWord{ - 133, - 10, - 759, - }, - dictWord{4, 0, 821}, - dictWord{5, 0, 760}, - dictWord{7, 0, 542}, - dictWord{8, 0, 135}, - dictWord{8, 0, 496}, - dictWord{135, 11, 580}, - dictWord{7, 10, 370}, - dictWord{7, 10, 1007}, - dictWord{7, 10, 1177}, - dictWord{135, 10, 1565}, - dictWord{135, 10, 1237}, - dictWord{140, 0, 736}, - dictWord{7, 0, 319}, - dictWord{ - 7, - 0, - 355, - }, - dictWord{7, 0, 763}, - dictWord{10, 0, 389}, - dictWord{145, 0, 43}, - dictWord{8, 11, 333}, - dictWord{138, 11, 182}, - dictWord{4, 10, 87}, - dictWord{5, 10, 250}, - dictWord{141, 10, 298}, - dictWord{138, 0, 786}, - dictWord{134, 0, 2044}, - dictWord{8, 11, 330}, - dictWord{140, 11, 477}, - dictWord{135, 11, 1338}, - dictWord{132, 11, 125}, - dictWord{134, 0, 1030}, - dictWord{134, 0, 1083}, - dictWord{132, 11, 721}, - dictWord{135, 10, 814}, - dictWord{7, 11, 776}, - dictWord{ - 8, - 11, - 145, - }, - dictWord{147, 11, 56}, - dictWord{134, 0, 1226}, - dictWord{4, 10, 57}, - dictWord{7, 10, 1195}, - dictWord{7, 10, 1438}, - dictWord{7, 10, 1548}, - dictWord{ - 7, - 10, - 1835, - }, - dictWord{7, 10, 1904}, - dictWord{9, 10, 757}, - dictWord{10, 10, 604}, - dictWord{139, 10, 519}, - dictWord{7, 11, 792}, - dictWord{8, 11, 147}, - dictWord{10, 11, 821}, - dictWord{139, 11, 1021}, - dictWord{137, 11, 797}, - dictWord{4, 0, 58}, - dictWord{5, 0, 286}, - dictWord{6, 0, 319}, - dictWord{7, 0, 402}, - dictWord{ - 7, - 0, - 1254, - }, - dictWord{7, 0, 1903}, - dictWord{8, 0, 356}, - dictWord{140, 0, 408}, - dictWord{4, 0, 389}, - dictWord{4, 0, 815}, - dictWord{9, 0, 181}, - dictWord{9, 0, 255}, - dictWord{10, 0, 8}, - dictWord{10, 0, 29}, - dictWord{10, 0, 816}, - dictWord{11, 0, 311}, - dictWord{11, 0, 561}, - dictWord{12, 0, 67}, - dictWord{141, 0, 181}, - dictWord{ - 7, - 11, - 1472, - }, - dictWord{135, 11, 1554}, - dictWord{7, 11, 1071}, - dictWord{7, 11, 1541}, - dictWord{7, 11, 1767}, - dictWord{7, 11, 1806}, - dictWord{7, 11, 1999}, - dictWord{9, 11, 248}, - dictWord{10, 11, 400}, - dictWord{11, 11, 162}, - dictWord{11, 11, 178}, - dictWord{11, 11, 242}, - dictWord{12, 11, 605}, - dictWord{ - 15, - 11, - 26, - }, - dictWord{144, 11, 44}, - dictWord{5, 11, 168}, - dictWord{5, 11, 930}, - dictWord{8, 11, 74}, - dictWord{9, 11, 623}, - dictWord{12, 11, 500}, - dictWord{ - 12, - 11, - 579, - }, - dictWord{13, 11, 41}, - dictWord{143, 11, 93}, - dictWord{6, 11, 220}, - dictWord{7, 11, 1101}, - dictWord{141, 11, 105}, - dictWord{5, 0, 474}, - dictWord{ - 7, - 0, - 507, - }, - dictWord{4, 10, 209}, - dictWord{7, 11, 507}, - dictWord{135, 10, 902}, - dictWord{132, 0, 427}, - dictWord{6, 0, 413}, - dictWord{7, 10, 335}, - dictWord{ - 7, - 10, - 1437, - }, - dictWord{7, 10, 1668}, - dictWord{8, 10, 553}, - dictWord{8, 10, 652}, - dictWord{8, 10, 656}, - dictWord{9, 10, 558}, - dictWord{11, 10, 743}, - dictWord{ - 149, - 10, - 18, - }, - dictWord{132, 0, 730}, - dictWord{6, 11, 19}, - dictWord{7, 11, 1413}, - dictWord{139, 11, 428}, - dictWord{133, 0, 373}, - dictWord{132, 10, 559}, - dictWord{7, 11, 96}, - dictWord{8, 11, 401}, - dictWord{137, 11, 896}, - dictWord{7, 0, 799}, - dictWord{7, 0, 1972}, - dictWord{5, 10, 1017}, - dictWord{138, 10, 511}, - dictWord{135, 0, 1793}, - dictWord{7, 11, 1961}, - dictWord{7, 11, 1965}, - dictWord{8, 11, 702}, - dictWord{136, 11, 750}, - dictWord{8, 11, 150}, - dictWord{8, 11, 737}, - dictWord{140, 11, 366}, - dictWord{132, 0, 322}, - dictWord{133, 10, 709}, - dictWord{8, 11, 800}, - dictWord{9, 11, 148}, - dictWord{9, 11, 872}, - dictWord{ - 9, - 11, - 890, - }, - dictWord{11, 11, 309}, - dictWord{11, 11, 1001}, - dictWord{13, 11, 267}, - dictWord{141, 11, 323}, - dictWord{134, 10, 1745}, - dictWord{7, 0, 290}, - dictWord{136, 10, 206}, - dictWord{7, 0, 1651}, - dictWord{145, 0, 89}, - dictWord{139, 0, 2}, - dictWord{132, 0, 672}, - dictWord{6, 0, 1860}, - dictWord{8, 0, 905}, - dictWord{ - 10, - 0, - 844, - }, - dictWord{10, 0, 846}, - dictWord{10, 0, 858}, - dictWord{12, 0, 699}, - dictWord{12, 0, 746}, - dictWord{140, 0, 772}, - dictWord{135, 11, 424}, - dictWord{133, 11, 547}, - dictWord{133, 0, 737}, - dictWord{5, 11, 490}, - dictWord{6, 11, 615}, - dictWord{6, 11, 620}, - dictWord{135, 11, 683}, - dictWord{6, 0, 746}, - dictWord{134, 0, 1612}, - dictWord{132, 10, 776}, - dictWord{9, 11, 385}, - dictWord{149, 11, 17}, - dictWord{133, 0, 145}, - dictWord{135, 10, 1272}, - dictWord{ - 7, - 0, - 884, - }, - dictWord{140, 0, 124}, - dictWord{4, 0, 387}, - dictWord{135, 0, 1288}, - dictWord{5, 11, 133}, - dictWord{136, 10, 406}, - dictWord{136, 11, 187}, - dictWord{ - 6, - 0, - 679, - }, - dictWord{8, 11, 8}, - dictWord{138, 11, 0}, - dictWord{135, 0, 550}, - dictWord{135, 11, 798}, - dictWord{136, 11, 685}, - dictWord{7, 11, 1086}, - dictWord{145, 11, 46}, - dictWord{8, 10, 175}, - dictWord{10, 10, 168}, - dictWord{138, 10, 573}, - dictWord{135, 0, 1305}, - dictWord{4, 0, 576}, - dictWord{ - 135, - 0, - 1263, - }, - dictWord{6, 0, 686}, - dictWord{134, 0, 1563}, - dictWord{134, 0, 607}, - dictWord{5, 0, 919}, - dictWord{134, 0, 1673}, - dictWord{148, 0, 37}, - dictWord{ - 8, - 11, - 774, - }, - dictWord{10, 11, 670}, - dictWord{140, 11, 51}, - dictWord{133, 10, 784}, - dictWord{139, 10, 882}, - dictWord{4, 0, 82}, - dictWord{5, 0, 333}, - dictWord{ - 5, - 0, - 904, - }, - dictWord{6, 0, 207}, - dictWord{7, 0, 325}, - dictWord{7, 0, 1726}, - dictWord{8, 0, 101}, - dictWord{10, 0, 778}, - dictWord{139, 0, 220}, - dictWord{135, 11, 371}, - dictWord{132, 0, 958}, - dictWord{133, 0, 903}, - dictWord{4, 11, 127}, - dictWord{5, 11, 350}, - dictWord{6, 11, 356}, - dictWord{8, 11, 426}, - dictWord{9, 11, 572}, - dictWord{10, 11, 247}, - dictWord{139, 11, 312}, - dictWord{140, 0, 147}, - dictWord{6, 11, 59}, - dictWord{7, 11, 885}, - dictWord{9, 11, 603}, - dictWord{ - 141, - 11, - 397, - }, - dictWord{10, 0, 367}, - dictWord{9, 10, 14}, - dictWord{9, 10, 441}, - dictWord{139, 10, 9}, - dictWord{11, 10, 966}, - dictWord{12, 10, 287}, - dictWord{ - 13, - 10, - 342, - }, - dictWord{13, 10, 402}, - dictWord{15, 10, 110}, - dictWord{143, 10, 163}, - dictWord{134, 0, 690}, - dictWord{132, 0, 705}, - dictWord{9, 0, 651}, - dictWord{ - 11, - 0, - 971, - }, - dictWord{13, 0, 273}, - dictWord{7, 10, 1428}, - dictWord{7, 10, 1640}, - dictWord{7, 10, 1867}, - dictWord{9, 10, 169}, - dictWord{9, 10, 182}, - dictWord{ - 9, - 10, - 367, - }, - dictWord{9, 10, 478}, - dictWord{9, 10, 506}, - dictWord{9, 10, 551}, - dictWord{9, 10, 557}, - dictWord{9, 10, 648}, - dictWord{9, 10, 697}, - dictWord{ - 9, - 10, - 705, - }, - dictWord{9, 10, 725}, - dictWord{9, 10, 787}, - dictWord{9, 10, 794}, - dictWord{10, 10, 198}, - dictWord{10, 10, 214}, - dictWord{10, 10, 267}, - dictWord{ - 10, - 10, - 275, - }, - dictWord{10, 10, 456}, - dictWord{10, 10, 551}, - dictWord{10, 10, 561}, - dictWord{10, 10, 613}, - dictWord{10, 10, 627}, - dictWord{10, 10, 668}, - dictWord{10, 10, 675}, - dictWord{10, 10, 691}, - dictWord{10, 10, 695}, - dictWord{10, 10, 707}, - dictWord{10, 10, 715}, - dictWord{11, 10, 183}, - dictWord{ - 11, - 10, - 201, - }, - dictWord{11, 10, 262}, - dictWord{11, 10, 352}, - dictWord{11, 10, 439}, - dictWord{11, 10, 493}, - dictWord{11, 10, 572}, - dictWord{11, 10, 591}, - dictWord{ - 11, - 10, - 608, - }, - dictWord{11, 10, 611}, - dictWord{11, 10, 646}, - dictWord{11, 10, 674}, - dictWord{11, 10, 711}, - dictWord{11, 10, 751}, - dictWord{11, 10, 761}, - dictWord{11, 10, 776}, - dictWord{11, 10, 785}, - dictWord{11, 10, 850}, - dictWord{11, 10, 853}, - dictWord{11, 10, 862}, - dictWord{11, 10, 865}, - dictWord{ - 11, - 10, - 868, - }, - dictWord{11, 10, 875}, - dictWord{11, 10, 898}, - dictWord{11, 10, 902}, - dictWord{11, 10, 903}, - dictWord{11, 10, 910}, - dictWord{11, 10, 932}, - dictWord{ - 11, - 10, - 942, - }, - dictWord{11, 10, 957}, - dictWord{11, 10, 967}, - dictWord{11, 10, 972}, - dictWord{12, 10, 148}, - dictWord{12, 10, 195}, - dictWord{12, 10, 220}, - dictWord{12, 10, 237}, - dictWord{12, 10, 318}, - dictWord{12, 10, 339}, - dictWord{12, 10, 393}, - dictWord{12, 10, 445}, - dictWord{12, 10, 450}, - dictWord{ - 12, - 10, - 474, - }, - dictWord{12, 10, 505}, - dictWord{12, 10, 509}, - dictWord{12, 10, 533}, - dictWord{12, 10, 591}, - dictWord{12, 10, 594}, - dictWord{12, 10, 597}, - dictWord{ - 12, - 10, - 621, - }, - dictWord{12, 10, 633}, - dictWord{12, 10, 642}, - dictWord{13, 10, 59}, - dictWord{13, 10, 60}, - dictWord{13, 10, 145}, - dictWord{13, 10, 239}, - dictWord{13, 10, 250}, - dictWord{13, 10, 329}, - dictWord{13, 10, 344}, - dictWord{13, 10, 365}, - dictWord{13, 10, 372}, - dictWord{13, 10, 387}, - dictWord{ - 13, - 10, - 403, - }, - dictWord{13, 10, 414}, - dictWord{13, 10, 456}, - dictWord{13, 10, 470}, - dictWord{13, 10, 478}, - dictWord{13, 10, 483}, - dictWord{13, 10, 489}, - dictWord{ - 14, - 10, - 55, - }, - dictWord{14, 10, 57}, - dictWord{14, 10, 81}, - dictWord{14, 10, 90}, - dictWord{14, 10, 148}, - dictWord{14, 10, 239}, - dictWord{14, 10, 266}, - dictWord{ - 14, - 10, - 321, - }, - dictWord{14, 10, 326}, - dictWord{14, 10, 327}, - dictWord{14, 10, 330}, - dictWord{14, 10, 347}, - dictWord{14, 10, 355}, - dictWord{14, 10, 401}, - dictWord{14, 10, 404}, - dictWord{14, 10, 411}, - dictWord{14, 10, 414}, - dictWord{14, 10, 416}, - dictWord{14, 10, 420}, - dictWord{15, 10, 61}, - dictWord{ - 15, - 10, - 74, - }, - dictWord{15, 10, 87}, - dictWord{15, 10, 88}, - dictWord{15, 10, 94}, - dictWord{15, 10, 96}, - dictWord{15, 10, 116}, - dictWord{15, 10, 149}, - dictWord{ - 15, - 10, - 154, - }, - dictWord{16, 10, 50}, - dictWord{16, 10, 63}, - dictWord{16, 10, 73}, - dictWord{17, 10, 2}, - dictWord{17, 10, 66}, - dictWord{17, 10, 92}, - dictWord{17, 10, 103}, - dictWord{17, 10, 112}, - dictWord{17, 10, 120}, - dictWord{18, 10, 50}, - dictWord{18, 10, 54}, - dictWord{18, 10, 82}, - dictWord{18, 10, 86}, - dictWord{18, 10, 90}, - dictWord{18, 10, 111}, - dictWord{18, 10, 115}, - dictWord{18, 10, 156}, - dictWord{19, 10, 40}, - dictWord{19, 10, 79}, - dictWord{20, 10, 78}, - dictWord{149, 10, 22}, - dictWord{7, 0, 887}, - dictWord{5, 10, 161}, - dictWord{135, 10, 839}, - dictWord{142, 11, 98}, - dictWord{134, 0, 90}, - dictWord{138, 11, 356}, - dictWord{ - 135, - 11, - 441, - }, - dictWord{6, 11, 111}, - dictWord{7, 11, 4}, - dictWord{8, 11, 163}, - dictWord{8, 11, 776}, - dictWord{138, 11, 566}, - dictWord{134, 0, 908}, - dictWord{ - 134, - 0, - 1261, - }, - dictWord{7, 0, 813}, - dictWord{12, 0, 497}, - dictWord{141, 0, 56}, - dictWord{134, 0, 1235}, - dictWord{135, 0, 429}, - dictWord{135, 11, 1994}, - dictWord{138, 0, 904}, - dictWord{6, 0, 125}, - dictWord{7, 0, 1277}, - dictWord{137, 0, 772}, - dictWord{151, 0, 12}, - dictWord{4, 0, 841}, - dictWord{5, 0, 386}, - dictWord{ - 133, - 11, - 386, - }, - dictWord{5, 11, 297}, - dictWord{135, 11, 1038}, - dictWord{6, 0, 860}, - dictWord{6, 0, 1069}, - dictWord{135, 11, 309}, - dictWord{136, 0, 946}, - dictWord{135, 10, 1814}, - dictWord{141, 11, 418}, - dictWord{136, 11, 363}, - dictWord{10, 0, 768}, - dictWord{139, 0, 787}, - dictWord{22, 11, 30}, - dictWord{ - 150, - 11, - 33, - }, - dictWord{6, 0, 160}, - dictWord{7, 0, 1106}, - dictWord{9, 0, 770}, - dictWord{11, 0, 112}, - dictWord{140, 0, 413}, - dictWord{11, 11, 216}, - dictWord{ - 139, - 11, - 340, - }, - dictWord{136, 10, 139}, - dictWord{135, 11, 1390}, - dictWord{135, 11, 808}, - dictWord{132, 11, 280}, - dictWord{12, 0, 271}, - dictWord{17, 0, 109}, - dictWord{7, 10, 643}, - dictWord{136, 10, 236}, - dictWord{140, 11, 54}, - dictWord{4, 11, 421}, - dictWord{133, 11, 548}, - dictWord{11, 0, 719}, - dictWord{12, 0, 36}, - dictWord{141, 0, 337}, - dictWord{7, 0, 581}, - dictWord{9, 0, 644}, - dictWord{137, 0, 699}, - dictWord{11, 11, 511}, - dictWord{13, 11, 394}, - dictWord{14, 11, 298}, - dictWord{14, 11, 318}, - dictWord{146, 11, 103}, - dictWord{7, 0, 304}, - dictWord{9, 0, 646}, - dictWord{9, 0, 862}, - dictWord{11, 0, 696}, - dictWord{12, 0, 208}, - dictWord{15, 0, 79}, - dictWord{147, 0, 108}, - dictWord{4, 0, 631}, - dictWord{7, 0, 1126}, - dictWord{135, 0, 1536}, - dictWord{135, 11, 1527}, - dictWord{8, 0, 880}, - dictWord{10, 0, 869}, - dictWord{138, 0, 913}, - dictWord{7, 0, 1513}, - dictWord{5, 10, 54}, - dictWord{6, 11, 254}, - dictWord{9, 11, 109}, - dictWord{138, 11, 103}, - dictWord{135, 0, 981}, - dictWord{133, 11, 729}, - dictWord{132, 10, 744}, - dictWord{132, 0, 434}, - dictWord{134, 0, 550}, - dictWord{7, 0, 930}, - dictWord{10, 0, 476}, - dictWord{13, 0, 452}, - dictWord{19, 0, 104}, - dictWord{6, 11, 1630}, - dictWord{10, 10, 402}, - dictWord{146, 10, 55}, - dictWord{5, 0, 553}, - dictWord{138, 0, 824}, - dictWord{136, 0, 452}, - dictWord{8, 0, 151}, - dictWord{137, 10, 624}, - dictWord{132, 10, 572}, - dictWord{132, 0, 772}, - dictWord{133, 11, 671}, - dictWord{ - 133, - 0, - 292, - }, - dictWord{138, 0, 135}, - dictWord{132, 11, 889}, - dictWord{140, 11, 207}, - dictWord{9, 0, 504}, - dictWord{6, 10, 43}, - dictWord{7, 10, 38}, - dictWord{ - 8, - 10, - 248, - }, - dictWord{138, 10, 513}, - dictWord{6, 0, 1089}, - dictWord{135, 11, 1910}, - dictWord{4, 11, 627}, - dictWord{133, 11, 775}, - dictWord{135, 0, 783}, - dictWord{133, 10, 766}, - dictWord{133, 10, 363}, - dictWord{7, 0, 387}, - dictWord{135, 11, 387}, - dictWord{7, 0, 393}, - dictWord{10, 0, 603}, - dictWord{11, 0, 206}, - dictWord{7, 11, 202}, - dictWord{11, 11, 362}, - dictWord{11, 11, 948}, - dictWord{140, 11, 388}, - dictWord{6, 11, 507}, - dictWord{7, 11, 451}, - dictWord{8, 11, 389}, - dictWord{12, 11, 490}, - dictWord{13, 11, 16}, - dictWord{13, 11, 215}, - dictWord{13, 11, 351}, - dictWord{18, 11, 132}, - dictWord{147, 11, 125}, - dictWord{ - 4, - 0, - 912, - }, - dictWord{9, 0, 232}, - dictWord{135, 11, 841}, - dictWord{6, 10, 258}, - dictWord{140, 10, 409}, - dictWord{5, 10, 249}, - dictWord{148, 10, 82}, - dictWord{ - 136, - 11, - 566, - }, - dictWord{6, 0, 977}, - dictWord{135, 11, 1214}, - dictWord{7, 0, 1973}, - dictWord{136, 0, 716}, - dictWord{135, 0, 98}, - dictWord{133, 0, 733}, - dictWord{ - 5, - 11, - 912, - }, - dictWord{134, 11, 1695}, - dictWord{5, 10, 393}, - dictWord{6, 10, 378}, - dictWord{7, 10, 1981}, - dictWord{9, 10, 32}, - dictWord{9, 10, 591}, - dictWord{10, 10, 685}, - dictWord{10, 10, 741}, - dictWord{142, 10, 382}, - dictWord{133, 10, 788}, - dictWord{10, 0, 19}, - dictWord{11, 0, 911}, - dictWord{7, 10, 1968}, - dictWord{141, 10, 509}, - dictWord{5, 0, 668}, - dictWord{5, 11, 236}, - dictWord{6, 11, 572}, - dictWord{8, 11, 492}, - dictWord{11, 11, 618}, - dictWord{144, 11, 56}, - dictWord{135, 11, 1789}, - dictWord{4, 0, 360}, - dictWord{5, 0, 635}, - dictWord{5, 0, 700}, - dictWord{5, 10, 58}, - dictWord{5, 10, 171}, - dictWord{5, 10, 683}, - dictWord{ - 6, - 10, - 291, - }, - dictWord{6, 10, 566}, - dictWord{7, 10, 1650}, - dictWord{11, 10, 523}, - dictWord{12, 10, 273}, - dictWord{12, 10, 303}, - dictWord{15, 10, 39}, - dictWord{143, 10, 111}, - dictWord{133, 0, 901}, - dictWord{134, 10, 589}, - dictWord{5, 11, 190}, - dictWord{136, 11, 318}, - dictWord{140, 0, 656}, - dictWord{ - 7, - 0, - 726, - }, - dictWord{152, 0, 9}, - dictWord{4, 10, 917}, - dictWord{133, 10, 1005}, - dictWord{135, 10, 1598}, - dictWord{134, 11, 491}, - dictWord{4, 10, 919}, - dictWord{133, 11, 434}, - dictWord{137, 0, 72}, - dictWord{6, 0, 1269}, - dictWord{6, 0, 1566}, - dictWord{134, 0, 1621}, - dictWord{9, 0, 463}, - dictWord{10, 0, 595}, - dictWord{4, 10, 255}, - dictWord{5, 10, 302}, - dictWord{6, 10, 132}, - dictWord{7, 10, 128}, - dictWord{7, 10, 283}, - dictWord{7, 10, 1299}, - dictWord{10, 10, 52}, - dictWord{ - 10, - 10, - 514, - }, - dictWord{11, 10, 925}, - dictWord{13, 10, 92}, - dictWord{142, 10, 309}, - dictWord{135, 0, 1454}, - dictWord{134, 0, 1287}, - dictWord{11, 0, 600}, - dictWord{13, 0, 245}, - dictWord{137, 10, 173}, - dictWord{136, 0, 989}, - dictWord{7, 0, 164}, - dictWord{7, 0, 1571}, - dictWord{9, 0, 107}, - dictWord{140, 0, 225}, - dictWord{6, 0, 1061}, - dictWord{141, 10, 442}, - dictWord{4, 0, 27}, - dictWord{5, 0, 484}, - dictWord{5, 0, 510}, - dictWord{6, 0, 434}, - dictWord{7, 0, 1000}, - dictWord{ - 7, - 0, - 1098, - }, - dictWord{136, 0, 2}, - dictWord{7, 11, 85}, - dictWord{7, 11, 247}, - dictWord{8, 11, 585}, - dictWord{10, 11, 163}, - dictWord{138, 11, 316}, - dictWord{ - 11, - 11, - 103, - }, - dictWord{142, 11, 0}, - dictWord{134, 0, 1127}, - dictWord{4, 0, 460}, - dictWord{134, 0, 852}, - dictWord{134, 10, 210}, - dictWord{4, 0, 932}, - dictWord{ - 133, - 0, - 891, - }, - dictWord{6, 0, 588}, - dictWord{147, 11, 83}, - dictWord{8, 0, 625}, - dictWord{4, 10, 284}, - dictWord{134, 10, 223}, - dictWord{134, 0, 76}, - dictWord{8, 0, 92}, - dictWord{137, 0, 221}, - dictWord{4, 11, 124}, - dictWord{10, 11, 457}, - dictWord{11, 11, 121}, - dictWord{11, 11, 169}, - dictWord{11, 11, 422}, - dictWord{ - 11, - 11, - 870, - }, - dictWord{12, 11, 214}, - dictWord{13, 11, 389}, - dictWord{14, 11, 187}, - dictWord{143, 11, 77}, - dictWord{9, 11, 618}, - dictWord{138, 11, 482}, - dictWord{ - 4, - 10, - 218, - }, - dictWord{7, 10, 526}, - dictWord{143, 10, 137}, - dictWord{13, 0, 9}, - dictWord{14, 0, 104}, - dictWord{14, 0, 311}, - dictWord{4, 10, 270}, - dictWord{ - 5, - 10, - 192, - }, - dictWord{6, 10, 332}, - dictWord{135, 10, 1322}, - dictWord{140, 10, 661}, - dictWord{135, 11, 1193}, - dictWord{6, 11, 107}, - dictWord{7, 11, 638}, - dictWord{7, 11, 1632}, - dictWord{137, 11, 396}, - dictWord{132, 0, 763}, - dictWord{4, 0, 622}, - dictWord{5, 11, 370}, - dictWord{134, 11, 1756}, - dictWord{ - 133, - 0, - 253, - }, - dictWord{135, 0, 546}, - dictWord{9, 0, 73}, - dictWord{10, 0, 110}, - dictWord{14, 0, 185}, - dictWord{17, 0, 119}, - dictWord{133, 11, 204}, - dictWord{7, 0, 624}, - dictWord{7, 0, 916}, - dictWord{10, 0, 256}, - dictWord{139, 0, 87}, - dictWord{7, 10, 379}, - dictWord{8, 10, 481}, - dictWord{137, 10, 377}, - dictWord{5, 0, 212}, - dictWord{12, 0, 35}, - dictWord{13, 0, 382}, - dictWord{5, 11, 970}, - dictWord{134, 11, 1706}, - dictWord{9, 0, 746}, - dictWord{5, 10, 1003}, - dictWord{134, 10, 149}, - dictWord{10, 0, 150}, - dictWord{11, 0, 849}, - dictWord{13, 0, 330}, - dictWord{8, 10, 262}, - dictWord{9, 10, 627}, - dictWord{11, 10, 214}, - dictWord{11, 10, 404}, - dictWord{11, 10, 457}, - dictWord{11, 10, 780}, - dictWord{11, 10, 913}, - dictWord{13, 10, 401}, - dictWord{142, 10, 200}, - dictWord{134, 0, 1466}, - dictWord{ - 135, - 11, - 3, - }, - dictWord{6, 0, 1299}, - dictWord{4, 11, 35}, - dictWord{5, 11, 121}, - dictWord{5, 11, 483}, - dictWord{5, 11, 685}, - dictWord{6, 11, 489}, - dictWord{7, 11, 1204}, - dictWord{136, 11, 394}, - dictWord{135, 10, 742}, - dictWord{4, 10, 142}, - dictWord{136, 10, 304}, - dictWord{4, 11, 921}, - dictWord{133, 11, 1007}, - dictWord{ - 134, - 0, - 1518, - }, - dictWord{6, 0, 1229}, - dictWord{135, 0, 1175}, - dictWord{133, 0, 816}, - dictWord{12, 0, 159}, - dictWord{4, 10, 471}, - dictWord{4, 11, 712}, - dictWord{ - 5, - 10, - 51, - }, - dictWord{6, 10, 602}, - dictWord{7, 10, 925}, - dictWord{8, 10, 484}, - dictWord{138, 10, 195}, - dictWord{134, 11, 1629}, - dictWord{5, 0, 869}, - dictWord{ - 5, - 0, - 968, - }, - dictWord{6, 0, 1626}, - dictWord{8, 0, 734}, - dictWord{136, 0, 784}, - dictWord{4, 0, 542}, - dictWord{6, 0, 1716}, - dictWord{6, 0, 1727}, - dictWord{ - 7, - 0, - 1082, - }, - dictWord{7, 0, 1545}, - dictWord{8, 0, 56}, - dictWord{8, 0, 118}, - dictWord{8, 0, 412}, - dictWord{8, 0, 564}, - dictWord{9, 0, 888}, - dictWord{9, 0, 908}, - dictWord{ - 10, - 0, - 50, - }, - dictWord{10, 0, 423}, - dictWord{11, 0, 685}, - dictWord{11, 0, 697}, - dictWord{11, 0, 933}, - dictWord{12, 0, 299}, - dictWord{13, 0, 126}, - dictWord{ - 13, - 0, - 136, - }, - dictWord{13, 0, 170}, - dictWord{13, 0, 190}, - dictWord{136, 10, 688}, - dictWord{132, 10, 697}, - dictWord{4, 0, 232}, - dictWord{9, 0, 202}, - dictWord{ - 10, - 0, - 474, - }, - dictWord{140, 0, 433}, - dictWord{136, 0, 212}, - dictWord{6, 0, 108}, - dictWord{7, 0, 1003}, - dictWord{7, 0, 1181}, - dictWord{8, 0, 111}, - dictWord{ - 136, - 0, - 343, - }, - dictWord{5, 10, 221}, - dictWord{135, 11, 1255}, - dictWord{133, 11, 485}, - dictWord{134, 0, 1712}, - dictWord{142, 0, 216}, - dictWord{5, 0, 643}, - dictWord{ - 6, - 0, - 516, - }, - dictWord{4, 11, 285}, - dictWord{5, 11, 317}, - dictWord{6, 11, 301}, - dictWord{7, 11, 7}, - dictWord{8, 11, 153}, - dictWord{10, 11, 766}, - dictWord{ - 11, - 11, - 468, - }, - dictWord{12, 11, 467}, - dictWord{141, 11, 143}, - dictWord{4, 0, 133}, - dictWord{7, 0, 711}, - dictWord{7, 0, 1298}, - dictWord{135, 0, 1585}, - dictWord{ - 134, - 0, - 650, - }, - dictWord{135, 11, 512}, - dictWord{6, 0, 99}, - dictWord{7, 0, 1808}, - dictWord{145, 0, 57}, - dictWord{6, 0, 246}, - dictWord{6, 0, 574}, - dictWord{7, 0, 428}, - dictWord{9, 0, 793}, - dictWord{10, 0, 669}, - dictWord{11, 0, 485}, - dictWord{11, 0, 840}, - dictWord{12, 0, 300}, - dictWord{14, 0, 250}, - dictWord{145, 0, 55}, - dictWord{ - 4, - 10, - 132, - }, - dictWord{5, 10, 69}, - dictWord{135, 10, 1242}, - dictWord{136, 0, 1023}, - dictWord{7, 0, 302}, - dictWord{132, 10, 111}, - dictWord{135, 0, 1871}, - dictWord{132, 0, 728}, - dictWord{9, 0, 252}, - dictWord{132, 10, 767}, - dictWord{6, 0, 461}, - dictWord{7, 0, 1590}, - dictWord{7, 10, 1416}, - dictWord{7, 10, 2005}, - dictWord{8, 10, 131}, - dictWord{8, 10, 466}, - dictWord{9, 10, 672}, - dictWord{13, 10, 252}, - dictWord{148, 10, 103}, - dictWord{6, 0, 323}, - dictWord{135, 0, 1564}, - dictWord{7, 0, 461}, - dictWord{136, 0, 775}, - dictWord{6, 10, 44}, - dictWord{136, 10, 368}, - dictWord{139, 0, 172}, - dictWord{132, 0, 464}, - dictWord{4, 10, 570}, - dictWord{133, 10, 120}, - dictWord{137, 11, 269}, - dictWord{6, 10, 227}, - dictWord{135, 10, 1589}, - dictWord{6, 11, 1719}, - dictWord{6, 11, 1735}, - dictWord{ - 7, - 11, - 2016, - }, - dictWord{7, 11, 2020}, - dictWord{8, 11, 837}, - dictWord{137, 11, 852}, - dictWord{7, 0, 727}, - dictWord{146, 0, 73}, - dictWord{132, 0, 1023}, - dictWord{135, 11, 852}, - dictWord{135, 10, 1529}, - dictWord{136, 0, 577}, - dictWord{138, 11, 568}, - dictWord{134, 0, 1037}, - dictWord{8, 11, 67}, - dictWord{ - 138, - 11, - 419, - }, - dictWord{4, 0, 413}, - dictWord{5, 0, 677}, - dictWord{8, 0, 432}, - dictWord{140, 0, 280}, - dictWord{10, 0, 600}, - dictWord{6, 10, 1667}, - dictWord{ - 7, - 11, - 967, - }, - dictWord{7, 10, 2036}, - dictWord{141, 11, 11}, - dictWord{6, 10, 511}, - dictWord{140, 10, 132}, - dictWord{6, 0, 799}, - dictWord{5, 10, 568}, - dictWord{ - 6, - 10, - 138, - }, - dictWord{135, 10, 1293}, - dictWord{8, 0, 159}, - dictWord{4, 10, 565}, - dictWord{136, 10, 827}, - dictWord{7, 0, 646}, - dictWord{7, 0, 1730}, - dictWord{ - 11, - 0, - 446, - }, - dictWord{141, 0, 178}, - dictWord{4, 10, 922}, - dictWord{133, 10, 1023}, - dictWord{135, 11, 11}, - dictWord{132, 0, 395}, - dictWord{11, 0, 145}, - dictWord{135, 10, 1002}, - dictWord{9, 0, 174}, - dictWord{10, 0, 164}, - dictWord{11, 0, 440}, - dictWord{11, 0, 514}, - dictWord{11, 0, 841}, - dictWord{15, 0, 98}, - dictWord{149, 0, 20}, - dictWord{134, 0, 426}, - dictWord{10, 0, 608}, - dictWord{139, 0, 1002}, - dictWord{7, 11, 320}, - dictWord{8, 11, 51}, - dictWord{12, 11, 481}, - dictWord{12, 11, 570}, - dictWord{148, 11, 106}, - dictWord{9, 0, 977}, - dictWord{9, 0, 983}, - dictWord{132, 11, 445}, - dictWord{138, 0, 250}, - dictWord{139, 0, 100}, - dictWord{6, 0, 1982}, - dictWord{136, 10, 402}, - dictWord{133, 11, 239}, - dictWord{4, 10, 716}, - dictWord{141, 10, 31}, - dictWord{5, 0, 476}, - dictWord{7, 11, 83}, - dictWord{7, 11, 1990}, - dictWord{8, 11, 130}, - dictWord{139, 11, 720}, - dictWord{8, 10, 691}, - dictWord{136, 10, 731}, - dictWord{5, 11, 123}, - dictWord{ - 6, - 11, - 530, - }, - dictWord{7, 11, 348}, - dictWord{135, 11, 1419}, - dictWord{5, 0, 76}, - dictWord{6, 0, 458}, - dictWord{6, 0, 497}, - dictWord{7, 0, 868}, - dictWord{9, 0, 658}, - dictWord{10, 0, 594}, - dictWord{11, 0, 173}, - dictWord{11, 0, 566}, - dictWord{12, 0, 20}, - dictWord{12, 0, 338}, - dictWord{141, 0, 200}, - dictWord{9, 11, 139}, - dictWord{ - 10, - 11, - 399, - }, - dictWord{11, 11, 469}, - dictWord{12, 11, 634}, - dictWord{141, 11, 223}, - dictWord{9, 10, 840}, - dictWord{138, 10, 803}, - dictWord{133, 10, 847}, - dictWord{11, 11, 223}, - dictWord{140, 11, 168}, - dictWord{132, 11, 210}, - dictWord{8, 0, 447}, - dictWord{9, 10, 53}, - dictWord{9, 10, 268}, - dictWord{9, 10, 901}, - dictWord{10, 10, 518}, - dictWord{10, 10, 829}, - dictWord{11, 10, 188}, - dictWord{13, 10, 74}, - dictWord{14, 10, 46}, - dictWord{15, 10, 17}, - dictWord{15, 10, 33}, - dictWord{17, 10, 40}, - dictWord{18, 10, 36}, - dictWord{19, 10, 20}, - dictWord{22, 10, 1}, - dictWord{152, 10, 2}, - dictWord{4, 0, 526}, - dictWord{7, 0, 1029}, - dictWord{135, 0, 1054}, - dictWord{19, 11, 59}, - dictWord{150, 11, 2}, - dictWord{4, 0, 636}, - dictWord{6, 0, 1875}, - dictWord{6, 0, 1920}, - dictWord{9, 0, 999}, - dictWord{ - 12, - 0, - 807, - }, - dictWord{12, 0, 825}, - dictWord{15, 0, 179}, - dictWord{15, 0, 190}, - dictWord{18, 0, 182}, - dictWord{136, 10, 532}, - dictWord{6, 0, 1699}, - dictWord{ - 7, - 0, - 660, - }, - dictWord{7, 0, 1124}, - dictWord{17, 0, 31}, - dictWord{19, 0, 22}, - dictWord{151, 0, 14}, - dictWord{135, 10, 681}, - dictWord{132, 11, 430}, - dictWord{ - 140, - 10, - 677, - }, - dictWord{4, 10, 684}, - dictWord{136, 10, 384}, - dictWord{132, 11, 756}, - dictWord{133, 11, 213}, - dictWord{7, 0, 188}, - dictWord{7, 10, 110}, - dictWord{ - 8, - 10, - 290, - }, - dictWord{8, 10, 591}, - dictWord{9, 10, 382}, - dictWord{9, 10, 649}, - dictWord{11, 10, 71}, - dictWord{11, 10, 155}, - dictWord{11, 10, 313}, - dictWord{ - 12, - 10, - 5, - }, - dictWord{13, 10, 325}, - dictWord{142, 10, 287}, - dictWord{7, 10, 360}, - dictWord{7, 10, 425}, - dictWord{9, 10, 66}, - dictWord{9, 10, 278}, - dictWord{ - 138, - 10, - 644, - }, - dictWord{142, 11, 164}, - dictWord{4, 0, 279}, - dictWord{7, 0, 301}, - dictWord{137, 0, 362}, - dictWord{134, 11, 586}, - dictWord{135, 0, 1743}, - dictWord{4, 0, 178}, - dictWord{133, 0, 399}, - dictWord{4, 10, 900}, - dictWord{133, 10, 861}, - dictWord{5, 10, 254}, - dictWord{7, 10, 985}, - dictWord{136, 10, 73}, - dictWord{133, 11, 108}, - dictWord{7, 10, 1959}, - dictWord{136, 10, 683}, - dictWord{133, 11, 219}, - dictWord{4, 11, 193}, - dictWord{5, 11, 916}, - dictWord{ - 7, - 11, - 364, - }, - dictWord{10, 11, 398}, - dictWord{10, 11, 726}, - dictWord{11, 11, 317}, - dictWord{11, 11, 626}, - dictWord{12, 11, 142}, - dictWord{12, 11, 288}, - dictWord{ - 12, - 11, - 678, - }, - dictWord{13, 11, 313}, - dictWord{15, 11, 113}, - dictWord{18, 11, 114}, - dictWord{21, 11, 30}, - dictWord{150, 11, 53}, - dictWord{6, 11, 241}, - dictWord{7, 11, 907}, - dictWord{8, 11, 832}, - dictWord{9, 11, 342}, - dictWord{10, 11, 729}, - dictWord{11, 11, 284}, - dictWord{11, 11, 445}, - dictWord{11, 11, 651}, - dictWord{11, 11, 863}, - dictWord{13, 11, 398}, - dictWord{146, 11, 99}, - dictWord{132, 0, 872}, - dictWord{134, 0, 831}, - dictWord{134, 0, 1692}, - dictWord{ - 6, - 0, - 202, - }, - dictWord{6, 0, 1006}, - dictWord{9, 0, 832}, - dictWord{10, 0, 636}, - dictWord{11, 0, 208}, - dictWord{12, 0, 360}, - dictWord{17, 0, 118}, - dictWord{18, 0, 27}, - dictWord{20, 0, 67}, - dictWord{137, 11, 734}, - dictWord{132, 10, 725}, - dictWord{7, 11, 993}, - dictWord{138, 11, 666}, - dictWord{134, 0, 1954}, - dictWord{ - 134, - 10, - 196, - }, - dictWord{7, 0, 872}, - dictWord{10, 0, 516}, - dictWord{139, 0, 167}, - dictWord{133, 10, 831}, - dictWord{4, 11, 562}, - dictWord{9, 11, 254}, - dictWord{ - 139, - 11, - 879, - }, - dictWord{137, 0, 313}, - dictWord{4, 0, 224}, - dictWord{132, 11, 786}, - dictWord{11, 0, 24}, - dictWord{12, 0, 170}, - dictWord{136, 10, 723}, - dictWord{ - 5, - 0, - 546, - }, - dictWord{7, 0, 35}, - dictWord{8, 0, 11}, - dictWord{8, 0, 12}, - dictWord{9, 0, 315}, - dictWord{9, 0, 533}, - dictWord{10, 0, 802}, - dictWord{11, 0, 166}, - dictWord{ - 12, - 0, - 525, - }, - dictWord{142, 0, 243}, - dictWord{7, 0, 1937}, - dictWord{13, 10, 80}, - dictWord{13, 10, 437}, - dictWord{145, 10, 74}, - dictWord{5, 0, 241}, - dictWord{ - 8, - 0, - 242, - }, - dictWord{9, 0, 451}, - dictWord{10, 0, 667}, - dictWord{11, 0, 598}, - dictWord{140, 0, 429}, - dictWord{150, 0, 46}, - dictWord{6, 0, 1273}, - dictWord{ - 137, - 0, - 830, - }, - dictWord{5, 10, 848}, - dictWord{6, 10, 66}, - dictWord{136, 10, 764}, - dictWord{6, 0, 825}, - dictWord{134, 0, 993}, - dictWord{4, 0, 1006}, - dictWord{ - 10, - 0, - 327, - }, - dictWord{13, 0, 271}, - dictWord{4, 10, 36}, - dictWord{7, 10, 1387}, - dictWord{139, 10, 755}, - dictWord{134, 0, 1023}, - dictWord{135, 0, 1580}, - dictWord{ - 4, - 0, - 366, - }, - dictWord{137, 0, 516}, - dictWord{132, 10, 887}, - dictWord{6, 0, 1736}, - dictWord{135, 0, 1891}, - dictWord{6, 11, 216}, - dictWord{7, 11, 901}, - dictWord{ - 7, - 11, - 1343, - }, - dictWord{136, 11, 493}, - dictWord{6, 10, 165}, - dictWord{138, 10, 388}, - dictWord{7, 11, 341}, - dictWord{139, 11, 219}, - dictWord{4, 10, 719}, - dictWord{135, 10, 155}, - dictWord{134, 0, 1935}, - dictWord{132, 0, 826}, - dictWord{6, 0, 331}, - dictWord{6, 0, 1605}, - dictWord{8, 0, 623}, - dictWord{11, 0, 139}, - dictWord{139, 0, 171}, - dictWord{135, 11, 1734}, - dictWord{10, 11, 115}, - dictWord{11, 11, 420}, - dictWord{12, 11, 154}, - dictWord{13, 11, 404}, - dictWord{ - 14, - 11, - 346, - }, - dictWord{15, 11, 54}, - dictWord{143, 11, 112}, - dictWord{7, 0, 288}, - dictWord{4, 10, 353}, - dictWord{6, 10, 146}, - dictWord{6, 10, 1789}, - dictWord{ - 7, - 10, - 990, - }, - dictWord{7, 10, 1348}, - dictWord{9, 10, 665}, - dictWord{9, 10, 898}, - dictWord{11, 10, 893}, - dictWord{142, 10, 212}, - dictWord{6, 0, 916}, - dictWord{134, 0, 1592}, - dictWord{7, 0, 1888}, - dictWord{4, 10, 45}, - dictWord{135, 10, 1257}, - dictWord{5, 11, 1011}, - dictWord{136, 11, 701}, - dictWord{ - 139, - 11, - 596, - }, - dictWord{4, 11, 54}, - dictWord{5, 11, 666}, - dictWord{7, 11, 1039}, - dictWord{7, 11, 1130}, - dictWord{9, 11, 195}, - dictWord{138, 11, 302}, - dictWord{ - 134, - 0, - 1471, - }, - dictWord{134, 0, 1570}, - dictWord{132, 0, 394}, - dictWord{140, 10, 65}, - dictWord{136, 10, 816}, - dictWord{135, 0, 1931}, - dictWord{7, 0, 574}, - dictWord{135, 0, 1719}, - dictWord{134, 11, 467}, - dictWord{132, 0, 658}, - dictWord{9, 0, 781}, - dictWord{10, 0, 144}, - dictWord{11, 0, 385}, - dictWord{13, 0, 161}, - dictWord{13, 0, 228}, - dictWord{13, 0, 268}, - dictWord{20, 0, 107}, - dictWord{134, 11, 1669}, - dictWord{136, 0, 374}, - dictWord{135, 0, 735}, - dictWord{4, 0, 344}, - dictWord{6, 0, 498}, - dictWord{139, 0, 323}, - dictWord{7, 0, 586}, - dictWord{7, 0, 1063}, - dictWord{6, 10, 559}, - dictWord{134, 10, 1691}, - dictWord{137, 0, 155}, - dictWord{133, 0, 906}, - dictWord{7, 11, 122}, - dictWord{9, 11, 259}, - dictWord{10, 11, 84}, - dictWord{11, 11, 470}, - dictWord{12, 11, 541}, - dictWord{ - 141, - 11, - 379, - }, - dictWord{134, 0, 1139}, - dictWord{10, 0, 108}, - dictWord{139, 0, 116}, - dictWord{134, 10, 456}, - dictWord{133, 10, 925}, - dictWord{5, 11, 82}, - dictWord{ - 5, - 11, - 131, - }, - dictWord{7, 11, 1755}, - dictWord{8, 11, 31}, - dictWord{9, 11, 168}, - dictWord{9, 11, 764}, - dictWord{139, 11, 869}, - dictWord{134, 11, 605}, - dictWord{ - 5, - 11, - 278, - }, - dictWord{137, 11, 68}, - dictWord{4, 11, 163}, - dictWord{5, 11, 201}, - dictWord{5, 11, 307}, - dictWord{5, 11, 310}, - dictWord{6, 11, 335}, - dictWord{ - 7, - 11, - 284, - }, - dictWord{136, 11, 165}, - dictWord{135, 11, 1660}, - dictWord{6, 11, 33}, - dictWord{135, 11, 1244}, - dictWord{4, 0, 616}, - dictWord{136, 11, 483}, - dictWord{8, 0, 857}, - dictWord{8, 0, 902}, - dictWord{8, 0, 910}, - dictWord{10, 0, 879}, - dictWord{12, 0, 726}, - dictWord{4, 11, 199}, - dictWord{139, 11, 34}, - dictWord{136, 0, 692}, - dictWord{6, 10, 193}, - dictWord{7, 10, 240}, - dictWord{7, 10, 1682}, - dictWord{10, 10, 51}, - dictWord{10, 10, 640}, - dictWord{11, 10, 410}, - dictWord{13, 10, 82}, - dictWord{14, 10, 247}, - dictWord{14, 10, 331}, - dictWord{142, 10, 377}, - dictWord{6, 0, 823}, - dictWord{134, 0, 983}, - dictWord{ - 139, - 10, - 411, - }, - dictWord{132, 0, 305}, - dictWord{136, 10, 633}, - dictWord{138, 11, 203}, - dictWord{134, 0, 681}, - dictWord{6, 11, 326}, - dictWord{7, 11, 677}, - dictWord{137, 11, 425}, - dictWord{5, 0, 214}, - dictWord{7, 0, 603}, - dictWord{8, 0, 611}, - dictWord{9, 0, 686}, - dictWord{10, 0, 88}, - dictWord{11, 0, 459}, - dictWord{ - 11, - 0, - 496, - }, - dictWord{12, 0, 463}, - dictWord{12, 0, 590}, - dictWord{141, 0, 0}, - dictWord{136, 0, 1004}, - dictWord{142, 0, 23}, - dictWord{134, 0, 1703}, - dictWord{ - 147, - 11, - 8, - }, - dictWord{145, 11, 56}, - dictWord{135, 0, 1443}, - dictWord{4, 10, 237}, - dictWord{135, 10, 514}, - dictWord{6, 0, 714}, - dictWord{145, 0, 19}, - dictWord{ - 5, - 11, - 358, - }, - dictWord{7, 11, 473}, - dictWord{7, 11, 1184}, - dictWord{10, 11, 662}, - dictWord{13, 11, 212}, - dictWord{13, 11, 304}, - dictWord{13, 11, 333}, - dictWord{145, 11, 98}, - dictWord{4, 0, 737}, - dictWord{10, 0, 98}, - dictWord{11, 0, 294}, - dictWord{12, 0, 60}, - dictWord{12, 0, 437}, - dictWord{13, 0, 64}, - dictWord{ - 13, - 0, - 380, - }, - dictWord{142, 0, 430}, - dictWord{6, 10, 392}, - dictWord{7, 10, 65}, - dictWord{135, 10, 2019}, - dictWord{6, 0, 1758}, - dictWord{8, 0, 520}, - dictWord{ - 9, - 0, - 345, - }, - dictWord{9, 0, 403}, - dictWord{142, 0, 350}, - dictWord{5, 0, 47}, - dictWord{10, 0, 242}, - dictWord{138, 0, 579}, - dictWord{5, 0, 139}, - dictWord{7, 0, 1168}, - dictWord{138, 0, 539}, - dictWord{134, 0, 1459}, - dictWord{13, 0, 388}, - dictWord{141, 11, 388}, - dictWord{134, 0, 253}, - dictWord{7, 10, 1260}, - dictWord{ - 135, - 10, - 1790, - }, - dictWord{10, 0, 252}, - dictWord{9, 10, 222}, - dictWord{139, 10, 900}, - dictWord{140, 0, 745}, - dictWord{133, 11, 946}, - dictWord{4, 0, 107}, - dictWord{ - 7, - 0, - 613, - }, - dictWord{8, 0, 439}, - dictWord{8, 0, 504}, - dictWord{9, 0, 501}, - dictWord{10, 0, 383}, - dictWord{139, 0, 477}, - dictWord{135, 11, 1485}, - dictWord{ - 132, - 0, - 871, - }, - dictWord{7, 11, 411}, - dictWord{7, 11, 590}, - dictWord{8, 11, 631}, - dictWord{9, 11, 323}, - dictWord{10, 11, 355}, - dictWord{11, 11, 491}, - dictWord{ - 12, - 11, - 143, - }, - dictWord{12, 11, 402}, - dictWord{13, 11, 73}, - dictWord{14, 11, 408}, - dictWord{15, 11, 107}, - dictWord{146, 11, 71}, - dictWord{132, 0, 229}, - dictWord{132, 0, 903}, - dictWord{140, 0, 71}, - dictWord{133, 0, 549}, - dictWord{4, 0, 47}, - dictWord{6, 0, 373}, - dictWord{7, 0, 452}, - dictWord{7, 0, 543}, - dictWord{ - 7, - 0, - 1828, - }, - dictWord{7, 0, 1856}, - dictWord{9, 0, 6}, - dictWord{11, 0, 257}, - dictWord{139, 0, 391}, - dictWord{7, 11, 1467}, - dictWord{8, 11, 328}, - dictWord{ - 10, - 11, - 544, - }, - dictWord{11, 11, 955}, - dictWord{13, 11, 320}, - dictWord{145, 11, 83}, - dictWord{5, 0, 980}, - dictWord{134, 0, 1754}, - dictWord{136, 0, 865}, - dictWord{ - 5, - 0, - 705, - }, - dictWord{137, 0, 606}, - dictWord{7, 0, 161}, - dictWord{8, 10, 201}, - dictWord{136, 10, 605}, - dictWord{143, 11, 35}, - dictWord{5, 11, 835}, - dictWord{ - 6, - 11, - 483, - }, - dictWord{140, 10, 224}, - dictWord{7, 0, 536}, - dictWord{7, 0, 1331}, - dictWord{136, 0, 143}, - dictWord{134, 0, 1388}, - dictWord{5, 0, 724}, - dictWord{ - 10, - 0, - 305, - }, - dictWord{11, 0, 151}, - dictWord{12, 0, 33}, - dictWord{12, 0, 121}, - dictWord{12, 0, 381}, - dictWord{17, 0, 3}, - dictWord{17, 0, 27}, - dictWord{17, 0, 78}, - dictWord{18, 0, 18}, - dictWord{19, 0, 54}, - dictWord{149, 0, 5}, - dictWord{4, 10, 523}, - dictWord{133, 10, 638}, - dictWord{5, 0, 19}, - dictWord{134, 0, 533}, - dictWord{ - 5, - 0, - 395, - }, - dictWord{5, 0, 951}, - dictWord{134, 0, 1776}, - dictWord{135, 0, 1908}, - dictWord{132, 0, 846}, - dictWord{10, 0, 74}, - dictWord{11, 0, 663}, - dictWord{ - 12, - 0, - 210, - }, - dictWord{13, 0, 166}, - dictWord{13, 0, 310}, - dictWord{14, 0, 373}, - dictWord{18, 0, 95}, - dictWord{19, 0, 43}, - dictWord{6, 10, 242}, - dictWord{7, 10, 227}, - dictWord{7, 10, 1581}, - dictWord{8, 10, 104}, - dictWord{9, 10, 113}, - dictWord{9, 10, 220}, - dictWord{9, 10, 427}, - dictWord{10, 10, 239}, - dictWord{11, 10, 579}, - dictWord{11, 10, 1023}, - dictWord{13, 10, 4}, - dictWord{13, 10, 204}, - dictWord{13, 10, 316}, - dictWord{148, 10, 86}, - dictWord{9, 11, 716}, - dictWord{11, 11, 108}, - dictWord{13, 11, 123}, - dictWord{14, 11, 252}, - dictWord{19, 11, 38}, - dictWord{21, 11, 3}, - dictWord{151, 11, 11}, - dictWord{8, 0, 372}, - dictWord{9, 0, 122}, - dictWord{138, 0, 175}, - dictWord{132, 11, 677}, - dictWord{7, 11, 1374}, - dictWord{136, 11, 540}, - dictWord{135, 10, 861}, - dictWord{132, 0, 695}, - dictWord{ - 7, - 0, - 497, - }, - dictWord{9, 0, 387}, - dictWord{147, 0, 81}, - dictWord{136, 0, 937}, - dictWord{134, 0, 718}, - dictWord{7, 0, 1328}, - dictWord{136, 10, 494}, - dictWord{ - 132, - 11, - 331, - }, - dictWord{6, 0, 1581}, - dictWord{133, 11, 747}, - dictWord{5, 0, 284}, - dictWord{6, 0, 49}, - dictWord{6, 0, 350}, - dictWord{7, 0, 1}, - dictWord{7, 0, 377}, - dictWord{7, 0, 1693}, - dictWord{8, 0, 18}, - dictWord{8, 0, 678}, - dictWord{9, 0, 161}, - dictWord{9, 0, 585}, - dictWord{9, 0, 671}, - dictWord{9, 0, 839}, - dictWord{11, 0, 912}, - dictWord{141, 0, 427}, - dictWord{7, 10, 1306}, - dictWord{8, 10, 505}, - dictWord{9, 10, 482}, - dictWord{10, 10, 126}, - dictWord{11, 10, 225}, - dictWord{12, 10, 347}, - dictWord{12, 10, 449}, - dictWord{13, 10, 19}, - dictWord{14, 10, 218}, - dictWord{142, 10, 435}, - dictWord{10, 10, 764}, - dictWord{12, 10, 120}, - dictWord{ - 13, - 10, - 39, - }, - dictWord{145, 10, 127}, - dictWord{4, 0, 597}, - dictWord{133, 10, 268}, - dictWord{134, 0, 1094}, - dictWord{4, 0, 1008}, - dictWord{134, 0, 1973}, - dictWord{132, 0, 811}, - dictWord{139, 0, 908}, - dictWord{135, 0, 1471}, - dictWord{133, 11, 326}, - dictWord{4, 10, 384}, - dictWord{135, 10, 1022}, - dictWord{ - 7, - 0, - 1935, - }, - dictWord{8, 0, 324}, - dictWord{12, 0, 42}, - dictWord{4, 11, 691}, - dictWord{7, 11, 1935}, - dictWord{8, 11, 324}, - dictWord{9, 11, 35}, - dictWord{10, 11, 680}, - dictWord{11, 11, 364}, - dictWord{12, 11, 42}, - dictWord{13, 11, 357}, - dictWord{146, 11, 16}, - dictWord{135, 0, 2014}, - dictWord{7, 0, 2007}, - dictWord{ - 9, - 0, - 101, - }, - dictWord{9, 0, 450}, - dictWord{10, 0, 66}, - dictWord{10, 0, 842}, - dictWord{11, 0, 536}, - dictWord{12, 0, 587}, - dictWord{6, 11, 32}, - dictWord{7, 11, 385}, - dictWord{7, 11, 757}, - dictWord{7, 11, 1916}, - dictWord{8, 11, 37}, - dictWord{8, 11, 94}, - dictWord{8, 11, 711}, - dictWord{9, 11, 541}, - dictWord{10, 11, 162}, - dictWord{ - 10, - 11, - 795, - }, - dictWord{11, 11, 989}, - dictWord{11, 11, 1010}, - dictWord{12, 11, 14}, - dictWord{142, 11, 308}, - dictWord{139, 0, 586}, - dictWord{ - 135, - 10, - 1703, - }, - dictWord{7, 0, 1077}, - dictWord{11, 0, 28}, - dictWord{9, 10, 159}, - dictWord{140, 10, 603}, - dictWord{6, 0, 1221}, - dictWord{136, 10, 583}, - dictWord{ - 6, - 11, - 152, - }, - dictWord{6, 11, 349}, - dictWord{6, 11, 1682}, - dictWord{7, 11, 1252}, - dictWord{8, 11, 112}, - dictWord{9, 11, 435}, - dictWord{9, 11, 668}, - dictWord{ - 10, - 11, - 290, - }, - dictWord{10, 11, 319}, - dictWord{10, 11, 815}, - dictWord{11, 11, 180}, - dictWord{11, 11, 837}, - dictWord{12, 11, 240}, - dictWord{13, 11, 152}, - dictWord{13, 11, 219}, - dictWord{142, 11, 158}, - dictWord{139, 0, 62}, - dictWord{132, 10, 515}, - dictWord{8, 10, 632}, - dictWord{8, 10, 697}, - dictWord{ - 137, - 10, - 854, - }, - dictWord{134, 0, 1766}, - dictWord{132, 11, 581}, - dictWord{6, 11, 126}, - dictWord{7, 11, 573}, - dictWord{8, 11, 397}, - dictWord{142, 11, 44}, - dictWord{ - 150, - 0, - 28, - }, - dictWord{11, 0, 670}, - dictWord{22, 0, 25}, - dictWord{4, 10, 136}, - dictWord{133, 10, 551}, - dictWord{6, 0, 1665}, - dictWord{7, 0, 256}, - dictWord{ - 7, - 0, - 1388, - }, - dictWord{138, 0, 499}, - dictWord{4, 0, 22}, - dictWord{5, 0, 10}, - dictWord{7, 0, 1576}, - dictWord{136, 0, 97}, - dictWord{134, 10, 1782}, - dictWord{5, 0, 481}, - dictWord{7, 10, 1287}, - dictWord{9, 10, 44}, - dictWord{10, 10, 552}, - dictWord{10, 10, 642}, - dictWord{11, 10, 839}, - dictWord{12, 10, 274}, - dictWord{ - 12, - 10, - 275, - }, - dictWord{12, 10, 372}, - dictWord{13, 10, 91}, - dictWord{142, 10, 125}, - dictWord{133, 11, 926}, - dictWord{7, 11, 1232}, - dictWord{137, 11, 531}, - dictWord{6, 0, 134}, - dictWord{7, 0, 437}, - dictWord{7, 0, 1824}, - dictWord{9, 0, 37}, - dictWord{14, 0, 285}, - dictWord{142, 0, 371}, - dictWord{7, 0, 486}, - dictWord{8, 0, 155}, - dictWord{11, 0, 93}, - dictWord{140, 0, 164}, - dictWord{6, 0, 1391}, - dictWord{134, 0, 1442}, - dictWord{133, 11, 670}, - dictWord{133, 0, 591}, - dictWord{ - 6, - 10, - 147, - }, - dictWord{7, 10, 886}, - dictWord{7, 11, 1957}, - dictWord{9, 10, 753}, - dictWord{138, 10, 268}, - dictWord{5, 0, 380}, - dictWord{5, 0, 650}, - dictWord{ - 7, - 0, - 1173, - }, - dictWord{136, 0, 310}, - dictWord{4, 0, 364}, - dictWord{7, 0, 1156}, - dictWord{7, 0, 1187}, - dictWord{137, 0, 409}, - dictWord{135, 11, 1621}, - dictWord{ - 134, - 0, - 482, - }, - dictWord{133, 11, 506}, - dictWord{4, 0, 781}, - dictWord{6, 0, 487}, - dictWord{7, 0, 926}, - dictWord{8, 0, 263}, - dictWord{139, 0, 500}, - dictWord{ - 138, - 10, - 137, - }, - dictWord{135, 11, 242}, - dictWord{139, 11, 96}, - dictWord{133, 10, 414}, - dictWord{135, 10, 1762}, - dictWord{134, 0, 804}, - dictWord{5, 11, 834}, - dictWord{7, 11, 1202}, - dictWord{8, 11, 14}, - dictWord{9, 11, 481}, - dictWord{137, 11, 880}, - dictWord{134, 10, 599}, - dictWord{4, 0, 94}, - dictWord{135, 0, 1265}, - dictWord{4, 0, 415}, - dictWord{132, 0, 417}, - dictWord{5, 0, 348}, - dictWord{6, 0, 522}, - dictWord{6, 10, 1749}, - dictWord{7, 11, 1526}, - dictWord{138, 11, 465}, - dictWord{134, 10, 1627}, - dictWord{132, 0, 1012}, - dictWord{132, 10, 488}, - dictWord{4, 11, 357}, - dictWord{6, 11, 172}, - dictWord{7, 11, 143}, - dictWord{ - 137, - 11, - 413, - }, - dictWord{4, 10, 83}, - dictWord{4, 11, 590}, - dictWord{146, 11, 76}, - dictWord{140, 10, 676}, - dictWord{7, 11, 287}, - dictWord{8, 11, 355}, - dictWord{ - 9, - 11, - 293, - }, - dictWord{137, 11, 743}, - dictWord{134, 10, 278}, - dictWord{6, 0, 1803}, - dictWord{18, 0, 165}, - dictWord{24, 0, 21}, - dictWord{5, 11, 169}, - dictWord{ - 7, - 11, - 333, - }, - dictWord{136, 11, 45}, - dictWord{12, 10, 97}, - dictWord{140, 11, 97}, - dictWord{4, 0, 408}, - dictWord{4, 0, 741}, - dictWord{135, 0, 500}, - dictWord{ - 132, - 11, - 198, - }, - dictWord{7, 10, 388}, - dictWord{7, 10, 644}, - dictWord{139, 10, 781}, - dictWord{4, 11, 24}, - dictWord{5, 11, 140}, - dictWord{5, 11, 185}, - dictWord{ - 7, - 11, - 1500, - }, - dictWord{11, 11, 565}, - dictWord{139, 11, 838}, - dictWord{6, 0, 1321}, - dictWord{9, 0, 257}, - dictWord{7, 10, 229}, - dictWord{8, 10, 59}, - dictWord{ - 9, - 10, - 190, - }, - dictWord{10, 10, 378}, - dictWord{140, 10, 191}, - dictWord{4, 11, 334}, - dictWord{133, 11, 593}, - dictWord{135, 11, 1885}, - dictWord{134, 0, 1138}, - dictWord{4, 0, 249}, - dictWord{6, 0, 73}, - dictWord{135, 0, 177}, - dictWord{133, 0, 576}, - dictWord{142, 0, 231}, - dictWord{137, 0, 288}, - dictWord{132, 10, 660}, - dictWord{7, 10, 1035}, - dictWord{138, 10, 737}, - dictWord{135, 0, 1487}, - dictWord{6, 0, 989}, - dictWord{9, 0, 433}, - dictWord{7, 10, 690}, - dictWord{9, 10, 587}, - dictWord{140, 10, 521}, - dictWord{7, 0, 1264}, - dictWord{7, 0, 1678}, - dictWord{11, 0, 945}, - dictWord{12, 0, 341}, - dictWord{12, 0, 471}, - dictWord{140, 0, 569}, - dictWord{132, 11, 709}, - dictWord{133, 11, 897}, - dictWord{5, 11, 224}, - dictWord{13, 11, 174}, - dictWord{146, 11, 52}, - dictWord{135, 11, 1840}, - dictWord{ - 134, - 10, - 1744, - }, - dictWord{12, 0, 87}, - dictWord{16, 0, 74}, - dictWord{4, 10, 733}, - dictWord{9, 10, 194}, - dictWord{10, 10, 92}, - dictWord{11, 10, 198}, - dictWord{ - 12, - 10, - 84, - }, - dictWord{141, 10, 128}, - dictWord{140, 0, 779}, - dictWord{135, 0, 538}, - dictWord{4, 11, 608}, - dictWord{133, 11, 497}, - dictWord{133, 0, 413}, - dictWord{7, 11, 1375}, - dictWord{7, 11, 1466}, - dictWord{138, 11, 331}, - dictWord{136, 0, 495}, - dictWord{6, 11, 540}, - dictWord{136, 11, 136}, - dictWord{7, 0, 54}, - dictWord{8, 0, 312}, - dictWord{10, 0, 191}, - dictWord{10, 0, 614}, - dictWord{140, 0, 567}, - dictWord{6, 0, 468}, - dictWord{7, 0, 567}, - dictWord{7, 0, 1478}, - dictWord{ - 8, - 0, - 530, - }, - dictWord{14, 0, 290}, - dictWord{133, 11, 999}, - dictWord{4, 11, 299}, - dictWord{7, 10, 306}, - dictWord{135, 11, 1004}, - dictWord{142, 11, 296}, - dictWord{134, 0, 1484}, - dictWord{133, 10, 979}, - dictWord{6, 0, 609}, - dictWord{9, 0, 815}, - dictWord{12, 11, 137}, - dictWord{14, 11, 9}, - dictWord{14, 11, 24}, - dictWord{142, 11, 64}, - dictWord{133, 11, 456}, - dictWord{6, 0, 484}, - dictWord{135, 0, 822}, - dictWord{133, 10, 178}, - dictWord{136, 11, 180}, - dictWord{ - 132, - 11, - 755, - }, - dictWord{137, 0, 900}, - dictWord{135, 0, 1335}, - dictWord{6, 0, 1724}, - dictWord{135, 0, 2022}, - dictWord{135, 11, 1139}, - dictWord{5, 0, 640}, - dictWord{132, 10, 390}, - dictWord{6, 0, 1831}, - dictWord{138, 11, 633}, - dictWord{135, 11, 566}, - dictWord{4, 11, 890}, - dictWord{5, 11, 805}, - dictWord{5, 11, 819}, - dictWord{5, 11, 961}, - dictWord{6, 11, 396}, - dictWord{6, 11, 1631}, - dictWord{6, 11, 1678}, - dictWord{7, 11, 1967}, - dictWord{7, 11, 2041}, - dictWord{ - 9, - 11, - 630, - }, - dictWord{11, 11, 8}, - dictWord{11, 11, 1019}, - dictWord{12, 11, 176}, - dictWord{13, 11, 225}, - dictWord{14, 11, 292}, - dictWord{149, 11, 24}, - dictWord{ - 132, - 0, - 474, - }, - dictWord{134, 0, 1103}, - dictWord{135, 0, 1504}, - dictWord{134, 0, 1576}, - dictWord{6, 0, 961}, - dictWord{6, 0, 1034}, - dictWord{140, 0, 655}, - dictWord{11, 11, 514}, - dictWord{149, 11, 20}, - dictWord{5, 0, 305}, - dictWord{135, 11, 1815}, - dictWord{7, 11, 1505}, - dictWord{10, 11, 190}, - dictWord{ - 10, - 11, - 634, - }, - dictWord{11, 11, 792}, - dictWord{12, 11, 358}, - dictWord{140, 11, 447}, - dictWord{5, 11, 0}, - dictWord{6, 11, 536}, - dictWord{7, 11, 604}, - dictWord{ - 13, - 11, - 445, - }, - dictWord{145, 11, 126}, - dictWord{7, 0, 1236}, - dictWord{133, 10, 105}, - dictWord{4, 0, 480}, - dictWord{6, 0, 217}, - dictWord{6, 0, 302}, - dictWord{ - 6, - 0, - 1642, - }, - dictWord{7, 0, 130}, - dictWord{7, 0, 837}, - dictWord{7, 0, 1321}, - dictWord{7, 0, 1547}, - dictWord{7, 0, 1657}, - dictWord{8, 0, 429}, - dictWord{9, 0, 228}, - dictWord{13, 0, 289}, - dictWord{13, 0, 343}, - dictWord{19, 0, 101}, - dictWord{6, 11, 232}, - dictWord{6, 11, 412}, - dictWord{7, 11, 1074}, - dictWord{8, 11, 9}, - dictWord{ - 8, - 11, - 157, - }, - dictWord{8, 11, 786}, - dictWord{9, 11, 196}, - dictWord{9, 11, 352}, - dictWord{9, 11, 457}, - dictWord{10, 11, 337}, - dictWord{11, 11, 232}, - dictWord{ - 11, - 11, - 877, - }, - dictWord{12, 11, 480}, - dictWord{140, 11, 546}, - dictWord{5, 10, 438}, - dictWord{7, 11, 958}, - dictWord{9, 10, 694}, - dictWord{12, 10, 627}, - dictWord{ - 13, - 11, - 38, - }, - dictWord{141, 10, 210}, - dictWord{4, 11, 382}, - dictWord{136, 11, 579}, - dictWord{7, 0, 278}, - dictWord{10, 0, 739}, - dictWord{11, 0, 708}, - dictWord{ - 141, - 0, - 348, - }, - dictWord{4, 11, 212}, - dictWord{135, 11, 1206}, - dictWord{135, 11, 1898}, - dictWord{6, 0, 708}, - dictWord{6, 0, 1344}, - dictWord{152, 10, 11}, - dictWord{137, 11, 768}, - dictWord{134, 0, 1840}, - dictWord{140, 0, 233}, - dictWord{8, 10, 25}, - dictWord{138, 10, 826}, - dictWord{6, 0, 2017}, - dictWord{ - 133, - 11, - 655, - }, - dictWord{6, 0, 1488}, - dictWord{139, 11, 290}, - dictWord{132, 10, 308}, - dictWord{134, 0, 1590}, - dictWord{134, 0, 1800}, - dictWord{134, 0, 1259}, - dictWord{16, 0, 28}, - dictWord{6, 11, 231}, - dictWord{7, 11, 95}, - dictWord{136, 11, 423}, - dictWord{133, 11, 300}, - dictWord{135, 10, 150}, - dictWord{ - 136, - 10, - 649, - }, - dictWord{7, 11, 1874}, - dictWord{137, 11, 641}, - dictWord{6, 11, 237}, - dictWord{7, 11, 611}, - dictWord{8, 11, 100}, - dictWord{9, 11, 416}, - dictWord{ - 11, - 11, - 335, - }, - dictWord{12, 11, 173}, - dictWord{146, 11, 101}, - dictWord{137, 0, 45}, - dictWord{134, 10, 521}, - dictWord{17, 0, 36}, - dictWord{14, 11, 26}, - dictWord{ - 146, - 11, - 150, - }, - dictWord{7, 0, 1442}, - dictWord{14, 0, 22}, - dictWord{5, 10, 339}, - dictWord{15, 10, 41}, - dictWord{15, 10, 166}, - dictWord{147, 10, 66}, - dictWord{ - 8, - 0, - 378, - }, - dictWord{6, 11, 581}, - dictWord{135, 11, 1119}, - dictWord{134, 0, 1507}, - dictWord{147, 11, 117}, - dictWord{139, 0, 39}, - dictWord{134, 0, 1054}, - dictWord{6, 0, 363}, - dictWord{7, 0, 1955}, - dictWord{136, 0, 725}, - dictWord{134, 0, 2036}, - dictWord{133, 11, 199}, - dictWord{6, 0, 1871}, - dictWord{9, 0, 935}, - dictWord{9, 0, 961}, - dictWord{9, 0, 1004}, - dictWord{9, 0, 1016}, - dictWord{12, 0, 805}, - dictWord{12, 0, 852}, - dictWord{12, 0, 853}, - dictWord{12, 0, 869}, - dictWord{ - 12, - 0, - 882, - }, - dictWord{12, 0, 896}, - dictWord{12, 0, 906}, - dictWord{12, 0, 917}, - dictWord{12, 0, 940}, - dictWord{15, 0, 170}, - dictWord{15, 0, 176}, - dictWord{ - 15, - 0, - 188, - }, - dictWord{15, 0, 201}, - dictWord{15, 0, 205}, - dictWord{15, 0, 212}, - dictWord{15, 0, 234}, - dictWord{15, 0, 244}, - dictWord{18, 0, 181}, - dictWord{18, 0, 193}, - dictWord{18, 0, 196}, - dictWord{18, 0, 201}, - dictWord{18, 0, 202}, - dictWord{18, 0, 210}, - dictWord{18, 0, 217}, - dictWord{18, 0, 235}, - dictWord{18, 0, 236}, - dictWord{18, 0, 237}, - dictWord{21, 0, 54}, - dictWord{21, 0, 55}, - dictWord{21, 0, 58}, - dictWord{21, 0, 59}, - dictWord{152, 0, 22}, - dictWord{134, 10, 1628}, - dictWord{ - 137, - 0, - 805, - }, - dictWord{5, 0, 813}, - dictWord{135, 0, 2046}, - dictWord{142, 11, 42}, - dictWord{5, 0, 712}, - dictWord{6, 0, 1240}, - dictWord{11, 0, 17}, - dictWord{ - 13, - 0, - 321, - }, - dictWord{144, 0, 67}, - dictWord{132, 0, 617}, - dictWord{135, 10, 829}, - dictWord{6, 0, 320}, - dictWord{7, 0, 781}, - dictWord{7, 0, 1921}, - dictWord{9, 0, 55}, - dictWord{10, 0, 186}, - dictWord{10, 0, 273}, - dictWord{10, 0, 664}, - dictWord{10, 0, 801}, - dictWord{11, 0, 996}, - dictWord{11, 0, 997}, - dictWord{13, 0, 157}, - dictWord{142, 0, 170}, - dictWord{136, 0, 271}, - dictWord{5, 10, 486}, - dictWord{135, 10, 1349}, - dictWord{18, 11, 91}, - dictWord{147, 11, 70}, - dictWord{10, 0, 445}, - dictWord{7, 10, 1635}, - dictWord{8, 10, 17}, - dictWord{138, 10, 295}, - dictWord{136, 11, 404}, - dictWord{7, 0, 103}, - dictWord{7, 0, 863}, - dictWord{11, 0, 184}, - dictWord{145, 0, 62}, - dictWord{138, 10, 558}, - dictWord{137, 0, 659}, - dictWord{6, 11, 312}, - dictWord{6, 11, 1715}, - dictWord{10, 11, 584}, - dictWord{ - 11, - 11, - 546, - }, - dictWord{11, 11, 692}, - dictWord{12, 11, 259}, - dictWord{12, 11, 295}, - dictWord{13, 11, 46}, - dictWord{141, 11, 154}, - dictWord{134, 0, 676}, - dictWord{132, 11, 588}, - dictWord{4, 11, 231}, - dictWord{5, 11, 61}, - dictWord{6, 11, 104}, - dictWord{7, 11, 729}, - dictWord{7, 11, 964}, - dictWord{7, 11, 1658}, - dictWord{140, 11, 414}, - dictWord{6, 11, 263}, - dictWord{138, 11, 757}, - dictWord{11, 0, 337}, - dictWord{142, 0, 303}, - dictWord{135, 11, 1363}, - dictWord{ - 132, - 11, - 320, - }, - dictWord{140, 0, 506}, - dictWord{134, 10, 447}, - dictWord{5, 0, 77}, - dictWord{7, 0, 1455}, - dictWord{10, 0, 843}, - dictWord{147, 0, 73}, - dictWord{ - 7, - 10, - 577, - }, - dictWord{7, 10, 1432}, - dictWord{9, 10, 475}, - dictWord{9, 10, 505}, - dictWord{9, 10, 526}, - dictWord{9, 10, 609}, - dictWord{9, 10, 689}, - dictWord{ - 9, - 10, - 726, - }, - dictWord{9, 10, 735}, - dictWord{9, 10, 738}, - dictWord{10, 10, 556}, - dictWord{10, 10, 674}, - dictWord{10, 10, 684}, - dictWord{11, 10, 89}, - dictWord{ - 11, - 10, - 202, - }, - dictWord{11, 10, 272}, - dictWord{11, 10, 380}, - dictWord{11, 10, 415}, - dictWord{11, 10, 505}, - dictWord{11, 10, 537}, - dictWord{11, 10, 550}, - dictWord{11, 10, 562}, - dictWord{11, 10, 640}, - dictWord{11, 10, 667}, - dictWord{11, 10, 688}, - dictWord{11, 10, 847}, - dictWord{11, 10, 927}, - dictWord{ - 11, - 10, - 930, - }, - dictWord{11, 10, 940}, - dictWord{12, 10, 144}, - dictWord{12, 10, 325}, - dictWord{12, 10, 329}, - dictWord{12, 10, 389}, - dictWord{12, 10, 403}, - dictWord{ - 12, - 10, - 451, - }, - dictWord{12, 10, 515}, - dictWord{12, 10, 604}, - dictWord{12, 10, 616}, - dictWord{12, 10, 626}, - dictWord{13, 10, 66}, - dictWord{13, 10, 131}, - dictWord{13, 10, 167}, - dictWord{13, 10, 236}, - dictWord{13, 10, 368}, - dictWord{13, 10, 411}, - dictWord{13, 10, 434}, - dictWord{13, 10, 453}, - dictWord{ - 13, - 10, - 461, - }, - dictWord{13, 10, 474}, - dictWord{14, 10, 59}, - dictWord{14, 10, 60}, - dictWord{14, 10, 139}, - dictWord{14, 10, 152}, - dictWord{14, 10, 276}, - dictWord{ - 14, - 10, - 353, - }, - dictWord{14, 10, 402}, - dictWord{15, 10, 28}, - dictWord{15, 10, 81}, - dictWord{15, 10, 123}, - dictWord{15, 10, 152}, - dictWord{18, 10, 136}, - dictWord{148, 10, 88}, - dictWord{132, 0, 458}, - dictWord{135, 0, 1420}, - dictWord{6, 0, 109}, - dictWord{10, 0, 382}, - dictWord{4, 11, 405}, - dictWord{4, 10, 609}, - dictWord{7, 10, 756}, - dictWord{7, 11, 817}, - dictWord{9, 10, 544}, - dictWord{11, 10, 413}, - dictWord{14, 11, 58}, - dictWord{14, 10, 307}, - dictWord{16, 10, 25}, - dictWord{17, 11, 37}, - dictWord{146, 11, 124}, - dictWord{6, 0, 330}, - dictWord{7, 0, 1084}, - dictWord{11, 0, 142}, - dictWord{133, 11, 974}, - dictWord{4, 10, 930}, - dictWord{133, 10, 947}, - dictWord{5, 10, 939}, - dictWord{142, 11, 394}, - dictWord{16, 0, 91}, - dictWord{145, 0, 87}, - dictWord{5, 11, 235}, - dictWord{5, 10, 962}, - dictWord{7, 11, 1239}, - dictWord{11, 11, 131}, - dictWord{140, 11, 370}, - dictWord{11, 0, 492}, - dictWord{5, 10, 651}, - dictWord{8, 10, 170}, - dictWord{9, 10, 61}, - dictWord{9, 10, 63}, - dictWord{10, 10, 23}, - dictWord{10, 10, 37}, - dictWord{10, 10, 834}, - dictWord{11, 10, 4}, - dictWord{11, 10, 281}, - dictWord{11, 10, 503}, - dictWord{ - 11, - 10, - 677, - }, - dictWord{12, 10, 96}, - dictWord{12, 10, 130}, - dictWord{12, 10, 244}, - dictWord{14, 10, 5}, - dictWord{14, 10, 40}, - dictWord{14, 10, 162}, - dictWord{ - 14, - 10, - 202, - }, - dictWord{146, 10, 133}, - dictWord{4, 10, 406}, - dictWord{5, 10, 579}, - dictWord{12, 10, 492}, - dictWord{150, 10, 15}, - dictWord{9, 11, 137}, - dictWord{138, 11, 221}, - dictWord{134, 0, 1239}, - dictWord{11, 0, 211}, - dictWord{140, 0, 145}, - dictWord{7, 11, 390}, - dictWord{138, 11, 140}, - dictWord{ - 135, - 11, - 1418, - }, - dictWord{135, 11, 1144}, - dictWord{134, 0, 1049}, - dictWord{7, 0, 321}, - dictWord{6, 10, 17}, - dictWord{7, 10, 1001}, - dictWord{7, 10, 1982}, - dictWord{ - 9, - 10, - 886, - }, - dictWord{10, 10, 489}, - dictWord{10, 10, 800}, - dictWord{11, 10, 782}, - dictWord{12, 10, 320}, - dictWord{13, 10, 467}, - dictWord{14, 10, 145}, - dictWord{14, 10, 387}, - dictWord{143, 10, 119}, - dictWord{145, 10, 17}, - dictWord{5, 11, 407}, - dictWord{11, 11, 489}, - dictWord{19, 11, 37}, - dictWord{20, 11, 73}, - dictWord{150, 11, 38}, - dictWord{133, 10, 458}, - dictWord{135, 0, 1985}, - dictWord{7, 10, 1983}, - dictWord{8, 10, 0}, - dictWord{8, 10, 171}, - dictWord{ - 9, - 10, - 120, - }, - dictWord{9, 10, 732}, - dictWord{10, 10, 473}, - dictWord{11, 10, 656}, - dictWord{11, 10, 998}, - dictWord{18, 10, 0}, - dictWord{18, 10, 2}, - dictWord{ - 147, - 10, - 21, - }, - dictWord{5, 11, 325}, - dictWord{7, 11, 1483}, - dictWord{8, 11, 5}, - dictWord{8, 11, 227}, - dictWord{9, 11, 105}, - dictWord{10, 11, 585}, - dictWord{ - 140, - 11, - 614, - }, - dictWord{136, 0, 122}, - dictWord{132, 0, 234}, - dictWord{135, 11, 1196}, - dictWord{6, 0, 976}, - dictWord{6, 0, 1098}, - dictWord{134, 0, 1441}, - dictWord{ - 7, - 0, - 253, - }, - dictWord{136, 0, 549}, - dictWord{6, 11, 621}, - dictWord{13, 11, 504}, - dictWord{144, 11, 19}, - dictWord{132, 10, 519}, - dictWord{5, 0, 430}, - dictWord{ - 5, - 0, - 932, - }, - dictWord{6, 0, 131}, - dictWord{7, 0, 417}, - dictWord{9, 0, 522}, - dictWord{11, 0, 314}, - dictWord{141, 0, 390}, - dictWord{14, 0, 149}, - dictWord{14, 0, 399}, - dictWord{143, 0, 57}, - dictWord{5, 10, 907}, - dictWord{6, 10, 31}, - dictWord{6, 11, 218}, - dictWord{7, 10, 491}, - dictWord{7, 10, 530}, - dictWord{8, 10, 592}, - dictWord{11, 10, 53}, - dictWord{11, 10, 779}, - dictWord{12, 10, 167}, - dictWord{12, 10, 411}, - dictWord{14, 10, 14}, - dictWord{14, 10, 136}, - dictWord{15, 10, 72}, - dictWord{16, 10, 17}, - dictWord{144, 10, 72}, - dictWord{140, 11, 330}, - dictWord{7, 11, 454}, - dictWord{7, 11, 782}, - dictWord{136, 11, 768}, - dictWord{ - 132, - 0, - 507, - }, - dictWord{10, 11, 676}, - dictWord{140, 11, 462}, - dictWord{6, 0, 630}, - dictWord{9, 0, 811}, - dictWord{4, 10, 208}, - dictWord{5, 10, 106}, - dictWord{ - 6, - 10, - 531, - }, - dictWord{8, 10, 408}, - dictWord{9, 10, 188}, - dictWord{138, 10, 572}, - dictWord{4, 0, 343}, - dictWord{5, 0, 511}, - dictWord{134, 10, 1693}, - dictWord{ - 134, - 11, - 164, - }, - dictWord{132, 0, 448}, - dictWord{7, 0, 455}, - dictWord{138, 0, 591}, - dictWord{135, 0, 1381}, - dictWord{12, 10, 441}, - dictWord{150, 11, 50}, - dictWord{9, 10, 449}, - dictWord{10, 10, 192}, - dictWord{138, 10, 740}, - dictWord{6, 0, 575}, - dictWord{132, 10, 241}, - dictWord{134, 0, 1175}, - dictWord{ - 134, - 0, - 653, - }, - dictWord{134, 0, 1761}, - dictWord{134, 0, 1198}, - dictWord{132, 10, 259}, - dictWord{6, 11, 343}, - dictWord{7, 11, 195}, - dictWord{9, 11, 226}, - dictWord{ - 10, - 11, - 197, - }, - dictWord{10, 11, 575}, - dictWord{11, 11, 502}, - dictWord{139, 11, 899}, - dictWord{7, 0, 1127}, - dictWord{7, 0, 1572}, - dictWord{10, 0, 297}, - dictWord{10, 0, 422}, - dictWord{11, 0, 764}, - dictWord{11, 0, 810}, - dictWord{12, 0, 264}, - dictWord{13, 0, 102}, - dictWord{13, 0, 300}, - dictWord{13, 0, 484}, - dictWord{ - 14, - 0, - 147, - }, - dictWord{14, 0, 229}, - dictWord{17, 0, 71}, - dictWord{18, 0, 118}, - dictWord{147, 0, 120}, - dictWord{135, 11, 666}, - dictWord{132, 0, 678}, - dictWord{ - 4, - 10, - 173, - }, - dictWord{5, 10, 312}, - dictWord{5, 10, 512}, - dictWord{135, 10, 1285}, - dictWord{7, 10, 1603}, - dictWord{7, 10, 1691}, - dictWord{9, 10, 464}, - dictWord{11, 10, 195}, - dictWord{12, 10, 279}, - dictWord{12, 10, 448}, - dictWord{14, 10, 11}, - dictWord{147, 10, 102}, - dictWord{16, 0, 99}, - dictWord{146, 0, 164}, - dictWord{7, 11, 1125}, - dictWord{9, 11, 143}, - dictWord{11, 11, 61}, - dictWord{14, 11, 405}, - dictWord{150, 11, 21}, - dictWord{137, 11, 260}, - dictWord{ - 4, - 10, - 452, - }, - dictWord{5, 10, 583}, - dictWord{5, 10, 817}, - dictWord{6, 10, 433}, - dictWord{7, 10, 593}, - dictWord{7, 10, 720}, - dictWord{7, 10, 1378}, - dictWord{ - 8, - 10, - 161, - }, - dictWord{9, 10, 284}, - dictWord{10, 10, 313}, - dictWord{139, 10, 886}, - dictWord{132, 10, 547}, - dictWord{136, 10, 722}, - dictWord{14, 0, 35}, - dictWord{142, 0, 191}, - dictWord{141, 0, 45}, - dictWord{138, 0, 121}, - dictWord{132, 0, 125}, - dictWord{134, 0, 1622}, - dictWord{133, 11, 959}, - dictWord{ - 8, - 10, - 420, - }, - dictWord{139, 10, 193}, - dictWord{132, 0, 721}, - dictWord{135, 10, 409}, - dictWord{136, 0, 145}, - dictWord{7, 0, 792}, - dictWord{8, 0, 147}, - dictWord{ - 10, - 0, - 821, - }, - dictWord{11, 0, 970}, - dictWord{11, 0, 1021}, - dictWord{136, 11, 173}, - dictWord{134, 11, 266}, - dictWord{132, 0, 715}, - dictWord{7, 0, 1999}, - dictWord{138, 10, 308}, - dictWord{133, 0, 531}, - dictWord{5, 0, 168}, - dictWord{5, 0, 930}, - dictWord{8, 0, 74}, - dictWord{9, 0, 623}, - dictWord{12, 0, 500}, - dictWord{ - 140, - 0, - 579, - }, - dictWord{144, 0, 65}, - dictWord{138, 11, 246}, - dictWord{6, 0, 220}, - dictWord{7, 0, 1101}, - dictWord{13, 0, 105}, - dictWord{142, 11, 314}, - dictWord{ - 5, - 10, - 1002, - }, - dictWord{136, 10, 745}, - dictWord{134, 0, 960}, - dictWord{20, 0, 0}, - dictWord{148, 11, 0}, - dictWord{4, 0, 1005}, - dictWord{4, 10, 239}, - dictWord{ - 6, - 10, - 477, - }, - dictWord{7, 10, 1607}, - dictWord{11, 10, 68}, - dictWord{139, 10, 617}, - dictWord{6, 0, 19}, - dictWord{7, 0, 1413}, - dictWord{139, 0, 428}, - dictWord{ - 149, - 10, - 13, - }, - dictWord{7, 0, 96}, - dictWord{8, 0, 401}, - dictWord{8, 0, 703}, - dictWord{9, 0, 896}, - dictWord{136, 11, 300}, - dictWord{134, 0, 1595}, - dictWord{145, 0, 116}, - dictWord{136, 0, 1021}, - dictWord{7, 0, 1961}, - dictWord{7, 0, 1965}, - dictWord{7, 0, 2030}, - dictWord{8, 0, 150}, - dictWord{8, 0, 702}, - dictWord{8, 0, 737}, - dictWord{ - 8, - 0, - 750, - }, - dictWord{140, 0, 366}, - dictWord{11, 11, 75}, - dictWord{142, 11, 267}, - dictWord{132, 10, 367}, - dictWord{8, 0, 800}, - dictWord{9, 0, 148}, - dictWord{ - 9, - 0, - 872, - }, - dictWord{9, 0, 890}, - dictWord{11, 0, 309}, - dictWord{11, 0, 1001}, - dictWord{13, 0, 267}, - dictWord{13, 0, 323}, - dictWord{5, 11, 427}, - dictWord{ - 5, - 11, - 734, - }, - dictWord{7, 11, 478}, - dictWord{136, 11, 52}, - dictWord{7, 11, 239}, - dictWord{11, 11, 217}, - dictWord{142, 11, 165}, - dictWord{132, 11, 323}, - dictWord{140, 11, 419}, - dictWord{13, 0, 299}, - dictWord{142, 0, 75}, - dictWord{6, 11, 87}, - dictWord{6, 11, 1734}, - dictWord{7, 11, 20}, - dictWord{7, 11, 1056}, - dictWord{ - 8, - 11, - 732, - }, - dictWord{9, 11, 406}, - dictWord{9, 11, 911}, - dictWord{138, 11, 694}, - dictWord{134, 0, 1383}, - dictWord{132, 10, 694}, - dictWord{ - 133, - 11, - 613, - }, - dictWord{137, 0, 779}, - dictWord{4, 0, 598}, - dictWord{140, 10, 687}, - dictWord{6, 0, 970}, - dictWord{135, 0, 424}, - dictWord{133, 0, 547}, - dictWord{ - 7, - 11, - 32, - }, - dictWord{7, 11, 984}, - dictWord{8, 11, 85}, - dictWord{8, 11, 709}, - dictWord{9, 11, 579}, - dictWord{9, 11, 847}, - dictWord{9, 11, 856}, - dictWord{10, 11, 799}, - dictWord{11, 11, 258}, - dictWord{11, 11, 1007}, - dictWord{12, 11, 331}, - dictWord{12, 11, 615}, - dictWord{13, 11, 188}, - dictWord{13, 11, 435}, - dictWord{ - 14, - 11, - 8, - }, - dictWord{15, 11, 165}, - dictWord{16, 11, 27}, - dictWord{148, 11, 40}, - dictWord{6, 0, 1222}, - dictWord{134, 0, 1385}, - dictWord{132, 0, 876}, - dictWord{ - 138, - 11, - 151, - }, - dictWord{135, 10, 213}, - dictWord{4, 11, 167}, - dictWord{135, 11, 82}, - dictWord{133, 0, 133}, - dictWord{6, 11, 24}, - dictWord{7, 11, 74}, - dictWord{ - 7, - 11, - 678, - }, - dictWord{137, 11, 258}, - dictWord{5, 11, 62}, - dictWord{6, 11, 534}, - dictWord{7, 11, 684}, - dictWord{7, 11, 1043}, - dictWord{7, 11, 1072}, - dictWord{ - 8, - 11, - 280, - }, - dictWord{8, 11, 541}, - dictWord{8, 11, 686}, - dictWord{10, 11, 519}, - dictWord{11, 11, 252}, - dictWord{140, 11, 282}, - dictWord{136, 0, 187}, - dictWord{8, 0, 8}, - dictWord{10, 0, 0}, - dictWord{10, 0, 818}, - dictWord{139, 0, 988}, - dictWord{132, 11, 359}, - dictWord{11, 0, 429}, - dictWord{15, 0, 51}, - dictWord{ - 135, - 10, - 1672, - }, - dictWord{136, 0, 685}, - dictWord{5, 11, 211}, - dictWord{7, 11, 88}, - dictWord{136, 11, 627}, - dictWord{134, 0, 472}, - dictWord{136, 0, 132}, - dictWord{ - 6, - 11, - 145, - }, - dictWord{141, 11, 336}, - dictWord{4, 10, 751}, - dictWord{11, 10, 390}, - dictWord{140, 10, 32}, - dictWord{6, 0, 938}, - dictWord{6, 0, 1060}, - dictWord{ - 4, - 11, - 263, - }, - dictWord{4, 10, 409}, - dictWord{133, 10, 78}, - dictWord{137, 0, 874}, - dictWord{8, 0, 774}, - dictWord{10, 0, 670}, - dictWord{12, 0, 51}, - dictWord{ - 4, - 11, - 916, - }, - dictWord{6, 10, 473}, - dictWord{7, 10, 1602}, - dictWord{10, 10, 698}, - dictWord{12, 10, 212}, - dictWord{13, 10, 307}, - dictWord{145, 10, 105}, - dictWord{146, 0, 92}, - dictWord{143, 10, 156}, - dictWord{132, 0, 830}, - dictWord{137, 0, 701}, - dictWord{4, 11, 599}, - dictWord{6, 11, 1634}, - dictWord{7, 11, 5}, - dictWord{7, 11, 55}, - dictWord{7, 11, 67}, - dictWord{7, 11, 97}, - dictWord{7, 11, 691}, - dictWord{7, 11, 979}, - dictWord{7, 11, 1697}, - dictWord{8, 11, 207}, - dictWord{ - 8, - 11, - 214, - }, - dictWord{8, 11, 231}, - dictWord{8, 11, 294}, - dictWord{8, 11, 336}, - dictWord{8, 11, 428}, - dictWord{8, 11, 451}, - dictWord{8, 11, 460}, - dictWord{8, 11, 471}, - dictWord{8, 11, 622}, - dictWord{8, 11, 626}, - dictWord{8, 11, 679}, - dictWord{8, 11, 759}, - dictWord{8, 11, 829}, - dictWord{9, 11, 11}, - dictWord{9, 11, 246}, - dictWord{ - 9, - 11, - 484, - }, - dictWord{9, 11, 573}, - dictWord{9, 11, 706}, - dictWord{9, 11, 762}, - dictWord{9, 11, 798}, - dictWord{9, 11, 855}, - dictWord{9, 11, 870}, - dictWord{ - 9, - 11, - 912, - }, - dictWord{10, 11, 303}, - dictWord{10, 11, 335}, - dictWord{10, 11, 424}, - dictWord{10, 11, 461}, - dictWord{10, 11, 543}, - dictWord{10, 11, 759}, - dictWord{10, 11, 814}, - dictWord{11, 11, 59}, - dictWord{11, 11, 199}, - dictWord{11, 11, 235}, - dictWord{11, 11, 475}, - dictWord{11, 11, 590}, - dictWord{11, 11, 929}, - dictWord{11, 11, 963}, - dictWord{12, 11, 114}, - dictWord{12, 11, 182}, - dictWord{12, 11, 226}, - dictWord{12, 11, 332}, - dictWord{12, 11, 439}, - dictWord{ - 12, - 11, - 575, - }, - dictWord{12, 11, 598}, - dictWord{13, 11, 8}, - dictWord{13, 11, 125}, - dictWord{13, 11, 194}, - dictWord{13, 11, 287}, - dictWord{14, 11, 197}, - dictWord{ - 14, - 11, - 383, - }, - dictWord{15, 11, 53}, - dictWord{17, 11, 63}, - dictWord{19, 11, 46}, - dictWord{19, 11, 98}, - dictWord{19, 11, 106}, - dictWord{148, 11, 85}, - dictWord{ - 4, - 0, - 127, - }, - dictWord{5, 0, 350}, - dictWord{6, 0, 356}, - dictWord{8, 0, 426}, - dictWord{9, 0, 572}, - dictWord{10, 0, 247}, - dictWord{139, 0, 312}, - dictWord{134, 0, 1215}, - dictWord{6, 0, 59}, - dictWord{9, 0, 603}, - dictWord{13, 0, 397}, - dictWord{7, 11, 1853}, - dictWord{138, 11, 437}, - dictWord{134, 0, 1762}, - dictWord{ - 147, - 11, - 126, - }, - dictWord{135, 10, 883}, - dictWord{13, 0, 293}, - dictWord{142, 0, 56}, - dictWord{133, 10, 617}, - dictWord{139, 10, 50}, - dictWord{5, 11, 187}, - dictWord{ - 7, - 10, - 1518, - }, - dictWord{139, 10, 694}, - dictWord{135, 0, 441}, - dictWord{6, 0, 111}, - dictWord{7, 0, 4}, - dictWord{8, 0, 163}, - dictWord{8, 0, 776}, - dictWord{ - 138, - 0, - 566, - }, - dictWord{132, 0, 806}, - dictWord{4, 11, 215}, - dictWord{9, 11, 38}, - dictWord{10, 11, 3}, - dictWord{11, 11, 23}, - dictWord{11, 11, 127}, - dictWord{ - 139, - 11, - 796, - }, - dictWord{14, 0, 233}, - dictWord{4, 10, 546}, - dictWord{135, 10, 2042}, - dictWord{135, 0, 1994}, - dictWord{134, 0, 1739}, - dictWord{135, 11, 1530}, - dictWord{136, 0, 393}, - dictWord{5, 0, 297}, - dictWord{7, 0, 1038}, - dictWord{14, 0, 359}, - dictWord{19, 0, 52}, - dictWord{148, 0, 47}, - dictWord{135, 0, 309}, - dictWord{ - 4, - 10, - 313, - }, - dictWord{133, 10, 577}, - dictWord{8, 10, 184}, - dictWord{141, 10, 433}, - dictWord{135, 10, 935}, - dictWord{12, 10, 186}, - dictWord{ - 12, - 10, - 292, - }, - dictWord{14, 10, 100}, - dictWord{146, 10, 70}, - dictWord{136, 0, 363}, - dictWord{14, 0, 175}, - dictWord{11, 10, 402}, - dictWord{12, 10, 109}, - dictWord{ - 12, - 10, - 431, - }, - dictWord{13, 10, 179}, - dictWord{13, 10, 206}, - dictWord{14, 10, 217}, - dictWord{16, 10, 3}, - dictWord{148, 10, 53}, - dictWord{5, 10, 886}, - dictWord{ - 6, - 10, - 46, - }, - dictWord{6, 10, 1790}, - dictWord{7, 10, 14}, - dictWord{7, 10, 732}, - dictWord{7, 10, 1654}, - dictWord{8, 10, 95}, - dictWord{8, 10, 327}, - dictWord{ - 8, - 10, - 616, - }, - dictWord{9, 10, 892}, - dictWord{10, 10, 598}, - dictWord{10, 10, 769}, - dictWord{11, 10, 134}, - dictWord{11, 10, 747}, - dictWord{12, 10, 378}, - dictWord{ - 142, - 10, - 97, - }, - dictWord{136, 0, 666}, - dictWord{135, 0, 1675}, - dictWord{6, 0, 655}, - dictWord{134, 0, 1600}, - dictWord{135, 0, 808}, - dictWord{133, 10, 1021}, - dictWord{4, 11, 28}, - dictWord{5, 11, 440}, - dictWord{7, 11, 248}, - dictWord{11, 11, 833}, - dictWord{140, 11, 344}, - dictWord{134, 11, 1654}, - dictWord{ - 132, - 0, - 280, - }, - dictWord{140, 0, 54}, - dictWord{4, 0, 421}, - dictWord{133, 0, 548}, - dictWord{132, 10, 153}, - dictWord{6, 11, 339}, - dictWord{135, 11, 923}, - dictWord{ - 133, - 11, - 853, - }, - dictWord{133, 10, 798}, - dictWord{132, 10, 587}, - dictWord{6, 11, 249}, - dictWord{7, 11, 1234}, - dictWord{139, 11, 573}, - dictWord{6, 10, 598}, - dictWord{7, 10, 42}, - dictWord{8, 10, 695}, - dictWord{10, 10, 212}, - dictWord{11, 10, 158}, - dictWord{14, 10, 196}, - dictWord{145, 10, 85}, - dictWord{7, 0, 249}, - dictWord{5, 10, 957}, - dictWord{133, 10, 1008}, - dictWord{4, 10, 129}, - dictWord{135, 10, 465}, - dictWord{6, 0, 254}, - dictWord{7, 0, 842}, - dictWord{7, 0, 1659}, - dictWord{9, 0, 109}, - dictWord{10, 0, 103}, - dictWord{7, 10, 908}, - dictWord{7, 10, 1201}, - dictWord{9, 10, 755}, - dictWord{11, 10, 906}, - dictWord{12, 10, 527}, - dictWord{146, 10, 7}, - dictWord{5, 0, 262}, - dictWord{136, 10, 450}, - dictWord{144, 0, 1}, - dictWord{10, 11, 201}, - dictWord{142, 11, 319}, - dictWord{7, 11, 49}, - dictWord{ - 7, - 11, - 392, - }, - dictWord{8, 11, 20}, - dictWord{8, 11, 172}, - dictWord{8, 11, 690}, - dictWord{9, 11, 383}, - dictWord{9, 11, 845}, - dictWord{10, 11, 48}, - dictWord{ - 11, - 11, - 293, - }, - dictWord{11, 11, 832}, - dictWord{11, 11, 920}, - dictWord{141, 11, 221}, - dictWord{5, 11, 858}, - dictWord{133, 11, 992}, - dictWord{134, 0, 805}, - dictWord{139, 10, 1003}, - dictWord{6, 0, 1630}, - dictWord{134, 11, 307}, - dictWord{7, 11, 1512}, - dictWord{135, 11, 1794}, - dictWord{6, 11, 268}, - dictWord{ - 137, - 11, - 62, - }, - dictWord{135, 10, 1868}, - dictWord{133, 0, 671}, - dictWord{4, 0, 989}, - dictWord{8, 0, 972}, - dictWord{136, 0, 998}, - dictWord{132, 11, 423}, - dictWord{132, 0, 889}, - dictWord{135, 0, 1382}, - dictWord{135, 0, 1910}, - dictWord{7, 10, 965}, - dictWord{7, 10, 1460}, - dictWord{135, 10, 1604}, - dictWord{ - 4, - 0, - 627, - }, - dictWord{5, 0, 775}, - dictWord{138, 11, 106}, - dictWord{134, 11, 348}, - dictWord{7, 0, 202}, - dictWord{11, 0, 362}, - dictWord{11, 0, 948}, - dictWord{ - 140, - 0, - 388, - }, - dictWord{138, 11, 771}, - dictWord{6, 11, 613}, - dictWord{136, 11, 223}, - dictWord{6, 0, 560}, - dictWord{7, 0, 451}, - dictWord{8, 0, 389}, - dictWord{ - 12, - 0, - 490, - }, - dictWord{13, 0, 16}, - dictWord{13, 0, 215}, - dictWord{13, 0, 351}, - dictWord{18, 0, 132}, - dictWord{147, 0, 125}, - dictWord{135, 0, 841}, - dictWord{ - 136, - 0, - 566, - }, - dictWord{136, 0, 938}, - dictWord{132, 11, 670}, - dictWord{5, 0, 912}, - dictWord{6, 0, 1695}, - dictWord{140, 11, 55}, - dictWord{9, 11, 40}, - dictWord{ - 139, - 11, - 136, - }, - dictWord{7, 0, 1361}, - dictWord{7, 10, 982}, - dictWord{10, 10, 32}, - dictWord{143, 10, 56}, - dictWord{11, 11, 259}, - dictWord{140, 11, 270}, - dictWord{ - 5, - 0, - 236, - }, - dictWord{6, 0, 572}, - dictWord{8, 0, 492}, - dictWord{11, 0, 618}, - dictWord{144, 0, 56}, - dictWord{8, 11, 572}, - dictWord{9, 11, 310}, - dictWord{9, 11, 682}, - dictWord{137, 11, 698}, - dictWord{134, 0, 1854}, - dictWord{5, 0, 190}, - dictWord{136, 0, 318}, - dictWord{133, 10, 435}, - dictWord{135, 0, 1376}, - dictWord{ - 4, - 11, - 296, - }, - dictWord{6, 11, 352}, - dictWord{7, 11, 401}, - dictWord{7, 11, 1410}, - dictWord{7, 11, 1594}, - dictWord{7, 11, 1674}, - dictWord{8, 11, 63}, - dictWord{ - 8, - 11, - 660, - }, - dictWord{137, 11, 74}, - dictWord{7, 0, 349}, - dictWord{5, 10, 85}, - dictWord{6, 10, 419}, - dictWord{7, 10, 305}, - dictWord{7, 10, 361}, - dictWord{7, 10, 1337}, - dictWord{8, 10, 71}, - dictWord{140, 10, 519}, - dictWord{4, 11, 139}, - dictWord{4, 11, 388}, - dictWord{140, 11, 188}, - dictWord{6, 0, 1972}, - dictWord{6, 0, 2013}, - dictWord{8, 0, 951}, - dictWord{10, 0, 947}, - dictWord{10, 0, 974}, - dictWord{10, 0, 1018}, - dictWord{142, 0, 476}, - dictWord{140, 10, 688}, - dictWord{ - 135, - 10, - 740, - }, - dictWord{5, 10, 691}, - dictWord{7, 10, 345}, - dictWord{9, 10, 94}, - dictWord{140, 10, 169}, - dictWord{9, 0, 344}, - dictWord{5, 10, 183}, - dictWord{6, 10, 582}, - dictWord{10, 10, 679}, - dictWord{140, 10, 435}, - dictWord{135, 10, 511}, - dictWord{132, 0, 850}, - dictWord{8, 11, 441}, - dictWord{10, 11, 314}, - dictWord{ - 143, - 11, - 3, - }, - dictWord{7, 10, 1993}, - dictWord{136, 10, 684}, - dictWord{4, 11, 747}, - dictWord{6, 11, 290}, - dictWord{6, 10, 583}, - dictWord{7, 11, 649}, - dictWord{ - 7, - 11, - 1479, - }, - dictWord{135, 11, 1583}, - dictWord{133, 11, 232}, - dictWord{133, 10, 704}, - dictWord{134, 0, 910}, - dictWord{4, 10, 179}, - dictWord{5, 10, 198}, - dictWord{133, 10, 697}, - dictWord{7, 10, 347}, - dictWord{7, 10, 971}, - dictWord{8, 10, 181}, - dictWord{138, 10, 711}, - dictWord{136, 11, 525}, - dictWord{ - 14, - 0, - 19, - }, - dictWord{14, 0, 28}, - dictWord{144, 0, 29}, - dictWord{7, 0, 85}, - dictWord{7, 0, 247}, - dictWord{8, 0, 585}, - dictWord{138, 0, 163}, - dictWord{4, 0, 487}, - dictWord{ - 7, - 11, - 472, - }, - dictWord{7, 11, 1801}, - dictWord{10, 11, 748}, - dictWord{141, 11, 458}, - dictWord{4, 10, 243}, - dictWord{5, 10, 203}, - dictWord{7, 10, 19}, - dictWord{ - 7, - 10, - 71, - }, - dictWord{7, 10, 113}, - dictWord{10, 10, 405}, - dictWord{11, 10, 357}, - dictWord{142, 10, 240}, - dictWord{7, 10, 1450}, - dictWord{139, 10, 99}, - dictWord{132, 11, 425}, - dictWord{138, 0, 145}, - dictWord{147, 0, 83}, - dictWord{6, 10, 492}, - dictWord{137, 11, 247}, - dictWord{4, 0, 1013}, - dictWord{ - 134, - 0, - 2033, - }, - dictWord{5, 10, 134}, - dictWord{6, 10, 408}, - dictWord{6, 10, 495}, - dictWord{135, 10, 1593}, - dictWord{135, 0, 1922}, - dictWord{134, 11, 1768}, - dictWord{4, 0, 124}, - dictWord{10, 0, 457}, - dictWord{11, 0, 121}, - dictWord{11, 0, 169}, - dictWord{11, 0, 870}, - dictWord{11, 0, 874}, - dictWord{12, 0, 214}, - dictWord{ - 14, - 0, - 187, - }, - dictWord{143, 0, 77}, - dictWord{5, 0, 557}, - dictWord{135, 0, 1457}, - dictWord{139, 0, 66}, - dictWord{5, 11, 943}, - dictWord{6, 11, 1779}, - dictWord{ - 142, - 10, - 4, - }, - dictWord{4, 10, 248}, - dictWord{4, 10, 665}, - dictWord{7, 10, 137}, - dictWord{137, 10, 349}, - dictWord{7, 0, 1193}, - dictWord{5, 11, 245}, - dictWord{ - 6, - 11, - 576, - }, - dictWord{7, 11, 582}, - dictWord{136, 11, 225}, - dictWord{144, 0, 82}, - dictWord{7, 10, 1270}, - dictWord{139, 10, 612}, - dictWord{5, 0, 454}, - dictWord{ - 10, - 0, - 352, - }, - dictWord{138, 11, 352}, - dictWord{18, 0, 57}, - dictWord{5, 10, 371}, - dictWord{135, 10, 563}, - dictWord{135, 0, 1333}, - dictWord{6, 0, 107}, - dictWord{ - 7, - 0, - 638, - }, - dictWord{7, 0, 1632}, - dictWord{9, 0, 396}, - dictWord{134, 11, 610}, - dictWord{5, 0, 370}, - dictWord{134, 0, 1756}, - dictWord{4, 10, 374}, - dictWord{ - 7, - 10, - 547, - }, - dictWord{7, 10, 1700}, - dictWord{7, 10, 1833}, - dictWord{139, 10, 858}, - dictWord{133, 0, 204}, - dictWord{6, 0, 1305}, - dictWord{9, 10, 311}, - dictWord{ - 141, - 10, - 42, - }, - dictWord{5, 0, 970}, - dictWord{134, 0, 1706}, - dictWord{6, 10, 1647}, - dictWord{7, 10, 1552}, - dictWord{7, 10, 2010}, - dictWord{9, 10, 494}, - dictWord{137, 10, 509}, - dictWord{13, 11, 455}, - dictWord{15, 11, 99}, - dictWord{15, 11, 129}, - dictWord{144, 11, 68}, - dictWord{135, 0, 3}, - dictWord{4, 0, 35}, - dictWord{ - 5, - 0, - 121, - }, - dictWord{5, 0, 483}, - dictWord{5, 0, 685}, - dictWord{6, 0, 489}, - dictWord{6, 0, 782}, - dictWord{6, 0, 1032}, - dictWord{7, 0, 1204}, - dictWord{136, 0, 394}, - dictWord{4, 0, 921}, - dictWord{133, 0, 1007}, - dictWord{8, 11, 360}, - dictWord{138, 11, 63}, - dictWord{135, 0, 1696}, - dictWord{134, 0, 1519}, - dictWord{ - 132, - 11, - 443, - }, - dictWord{135, 11, 944}, - dictWord{6, 10, 123}, - dictWord{7, 10, 214}, - dictWord{9, 10, 728}, - dictWord{10, 10, 157}, - dictWord{11, 10, 346}, - dictWord{11, 10, 662}, - dictWord{143, 10, 106}, - dictWord{137, 0, 981}, - dictWord{135, 10, 1435}, - dictWord{134, 0, 1072}, - dictWord{132, 0, 712}, - dictWord{ - 134, - 0, - 1629, - }, - dictWord{134, 0, 728}, - dictWord{4, 11, 298}, - dictWord{137, 11, 483}, - dictWord{6, 0, 1177}, - dictWord{6, 0, 1271}, - dictWord{5, 11, 164}, - dictWord{ - 7, - 11, - 121, - }, - dictWord{142, 11, 189}, - dictWord{7, 0, 1608}, - dictWord{4, 10, 707}, - dictWord{5, 10, 588}, - dictWord{6, 10, 393}, - dictWord{13, 10, 106}, - dictWord{ - 18, - 10, - 49, - }, - dictWord{147, 10, 41}, - dictWord{23, 0, 16}, - dictWord{151, 11, 16}, - dictWord{6, 10, 211}, - dictWord{7, 10, 1690}, - dictWord{11, 10, 486}, - dictWord{140, 10, 369}, - dictWord{133, 0, 485}, - dictWord{19, 11, 15}, - dictWord{149, 11, 27}, - dictWord{4, 11, 172}, - dictWord{9, 11, 611}, - dictWord{10, 11, 436}, - dictWord{12, 11, 673}, - dictWord{141, 11, 255}, - dictWord{5, 11, 844}, - dictWord{10, 11, 484}, - dictWord{11, 11, 754}, - dictWord{12, 11, 457}, - dictWord{ - 14, - 11, - 171, - }, - dictWord{14, 11, 389}, - dictWord{146, 11, 153}, - dictWord{4, 0, 285}, - dictWord{5, 0, 27}, - dictWord{5, 0, 317}, - dictWord{6, 0, 301}, - dictWord{7, 0, 7}, - dictWord{ - 8, - 0, - 153, - }, - dictWord{10, 0, 766}, - dictWord{11, 0, 468}, - dictWord{12, 0, 467}, - dictWord{141, 0, 143}, - dictWord{134, 0, 1462}, - dictWord{9, 11, 263}, - dictWord{ - 10, - 11, - 147, - }, - dictWord{138, 11, 492}, - dictWord{133, 11, 537}, - dictWord{6, 0, 1945}, - dictWord{6, 0, 1986}, - dictWord{6, 0, 1991}, - dictWord{134, 0, 2038}, - dictWord{134, 10, 219}, - dictWord{137, 11, 842}, - dictWord{14, 0, 52}, - dictWord{17, 0, 50}, - dictWord{5, 10, 582}, - dictWord{6, 10, 1646}, - dictWord{7, 10, 99}, - dictWord{7, 10, 1962}, - dictWord{7, 10, 1986}, - dictWord{8, 10, 515}, - dictWord{8, 10, 773}, - dictWord{9, 10, 23}, - dictWord{9, 10, 491}, - dictWord{12, 10, 620}, - dictWord{142, 10, 93}, - dictWord{138, 11, 97}, - dictWord{20, 0, 21}, - dictWord{20, 0, 44}, - dictWord{133, 10, 851}, - dictWord{136, 0, 819}, - dictWord{139, 0, 917}, - dictWord{5, 11, 230}, - dictWord{5, 11, 392}, - dictWord{6, 11, 420}, - dictWord{8, 10, 762}, - dictWord{8, 10, 812}, - dictWord{9, 11, 568}, - dictWord{9, 10, 910}, - dictWord{140, 11, 612}, - dictWord{135, 0, 784}, - dictWord{15, 0, 135}, - dictWord{143, 11, 135}, - dictWord{10, 0, 454}, - dictWord{140, 0, 324}, - dictWord{4, 11, 0}, - dictWord{5, 11, 41}, - dictWord{7, 11, 1459}, - dictWord{7, 11, 1469}, - dictWord{7, 11, 1618}, - dictWord{7, 11, 1859}, - dictWord{9, 11, 549}, - dictWord{139, 11, 905}, - dictWord{4, 10, 98}, - dictWord{7, 10, 1365}, - dictWord{9, 10, 422}, - dictWord{9, 10, 670}, - dictWord{10, 10, 775}, - dictWord{11, 10, 210}, - dictWord{13, 10, 26}, - dictWord{13, 10, 457}, - dictWord{141, 10, 476}, - dictWord{6, 0, 1719}, - dictWord{6, 0, 1735}, - dictWord{7, 0, 2016}, - dictWord{7, 0, 2020}, - dictWord{8, 0, 837}, - dictWord{137, 0, 852}, - dictWord{133, 11, 696}, - dictWord{135, 0, 852}, - dictWord{132, 0, 952}, - dictWord{134, 10, 1730}, - dictWord{132, 11, 771}, - dictWord{ - 138, - 0, - 568, - }, - dictWord{137, 0, 448}, - dictWord{139, 0, 146}, - dictWord{8, 0, 67}, - dictWord{138, 0, 419}, - dictWord{133, 11, 921}, - dictWord{137, 10, 147}, - dictWord{134, 0, 1826}, - dictWord{10, 0, 657}, - dictWord{14, 0, 297}, - dictWord{142, 0, 361}, - dictWord{6, 0, 666}, - dictWord{6, 0, 767}, - dictWord{134, 0, 1542}, - dictWord{139, 0, 729}, - dictWord{6, 11, 180}, - dictWord{7, 11, 1137}, - dictWord{8, 11, 751}, - dictWord{139, 11, 805}, - dictWord{4, 11, 183}, - dictWord{7, 11, 271}, - dictWord{11, 11, 824}, - dictWord{11, 11, 952}, - dictWord{13, 11, 278}, - dictWord{13, 11, 339}, - dictWord{13, 11, 482}, - dictWord{14, 11, 424}, - dictWord{ - 148, - 11, - 99, - }, - dictWord{4, 0, 669}, - dictWord{5, 11, 477}, - dictWord{5, 11, 596}, - dictWord{6, 11, 505}, - dictWord{7, 11, 1221}, - dictWord{11, 11, 907}, - dictWord{ - 12, - 11, - 209, - }, - dictWord{141, 11, 214}, - dictWord{135, 11, 1215}, - dictWord{5, 0, 402}, - dictWord{6, 10, 30}, - dictWord{11, 10, 56}, - dictWord{139, 10, 305}, - dictWord{ - 7, - 11, - 564, - }, - dictWord{142, 11, 168}, - dictWord{139, 0, 152}, - dictWord{7, 0, 912}, - dictWord{135, 10, 1614}, - dictWord{4, 10, 150}, - dictWord{5, 10, 303}, - dictWord{134, 10, 327}, - dictWord{7, 0, 320}, - dictWord{8, 0, 51}, - dictWord{9, 0, 868}, - dictWord{10, 0, 833}, - dictWord{12, 0, 481}, - dictWord{12, 0, 570}, - dictWord{ - 148, - 0, - 106, - }, - dictWord{132, 0, 445}, - dictWord{7, 11, 274}, - dictWord{11, 11, 263}, - dictWord{11, 11, 479}, - dictWord{11, 11, 507}, - dictWord{140, 11, 277}, - dictWord{10, 0, 555}, - dictWord{11, 0, 308}, - dictWord{19, 0, 95}, - dictWord{6, 11, 1645}, - dictWord{8, 10, 192}, - dictWord{10, 10, 78}, - dictWord{141, 10, 359}, - dictWord{135, 10, 786}, - dictWord{6, 11, 92}, - dictWord{6, 11, 188}, - dictWord{7, 11, 1269}, - dictWord{7, 11, 1524}, - dictWord{7, 11, 1876}, - dictWord{10, 11, 228}, - dictWord{139, 11, 1020}, - dictWord{4, 11, 459}, - dictWord{133, 11, 966}, - dictWord{11, 0, 386}, - dictWord{6, 10, 1638}, - dictWord{7, 10, 79}, - dictWord{ - 7, - 10, - 496, - }, - dictWord{9, 10, 138}, - dictWord{10, 10, 336}, - dictWord{12, 10, 412}, - dictWord{12, 10, 440}, - dictWord{142, 10, 305}, - dictWord{133, 0, 239}, - dictWord{ - 7, - 0, - 83, - }, - dictWord{7, 0, 1990}, - dictWord{8, 0, 130}, - dictWord{139, 0, 720}, - dictWord{138, 11, 709}, - dictWord{4, 0, 143}, - dictWord{5, 0, 550}, - dictWord{ - 133, - 0, - 752, - }, - dictWord{5, 0, 123}, - dictWord{6, 0, 530}, - dictWord{7, 0, 348}, - dictWord{135, 0, 1419}, - dictWord{135, 0, 2024}, - dictWord{6, 11, 18}, - dictWord{7, 11, 179}, - dictWord{7, 11, 721}, - dictWord{7, 11, 932}, - dictWord{8, 11, 548}, - dictWord{8, 11, 757}, - dictWord{9, 11, 54}, - dictWord{9, 11, 65}, - dictWord{9, 11, 532}, - dictWord{ - 9, - 11, - 844, - }, - dictWord{10, 11, 113}, - dictWord{10, 11, 117}, - dictWord{10, 11, 236}, - dictWord{10, 11, 315}, - dictWord{10, 11, 430}, - dictWord{10, 11, 798}, - dictWord{11, 11, 153}, - dictWord{11, 11, 351}, - dictWord{11, 11, 375}, - dictWord{12, 11, 78}, - dictWord{12, 11, 151}, - dictWord{12, 11, 392}, - dictWord{ - 14, - 11, - 248, - }, - dictWord{143, 11, 23}, - dictWord{7, 10, 204}, - dictWord{7, 10, 415}, - dictWord{8, 10, 42}, - dictWord{10, 10, 85}, - dictWord{139, 10, 564}, - dictWord{ - 134, - 0, - 958, - }, - dictWord{133, 11, 965}, - dictWord{132, 0, 210}, - dictWord{135, 11, 1429}, - dictWord{138, 11, 480}, - dictWord{134, 11, 182}, - dictWord{ - 139, - 11, - 345, - }, - dictWord{10, 11, 65}, - dictWord{10, 11, 488}, - dictWord{138, 11, 497}, - dictWord{4, 10, 3}, - dictWord{5, 10, 247}, - dictWord{5, 10, 644}, - dictWord{ - 7, - 10, - 744, - }, - dictWord{7, 10, 1207}, - dictWord{7, 10, 1225}, - dictWord{7, 10, 1909}, - dictWord{146, 10, 147}, - dictWord{132, 0, 430}, - dictWord{5, 10, 285}, - dictWord{ - 9, - 10, - 67, - }, - dictWord{13, 10, 473}, - dictWord{143, 10, 82}, - dictWord{144, 11, 16}, - dictWord{7, 11, 1162}, - dictWord{9, 11, 588}, - dictWord{10, 11, 260}, - dictWord{151, 10, 8}, - dictWord{133, 0, 213}, - dictWord{138, 0, 7}, - dictWord{135, 0, 801}, - dictWord{134, 11, 1786}, - dictWord{135, 11, 308}, - dictWord{6, 0, 936}, - dictWord{134, 0, 1289}, - dictWord{133, 0, 108}, - dictWord{132, 0, 885}, - dictWord{133, 0, 219}, - dictWord{139, 0, 587}, - dictWord{4, 0, 193}, - dictWord{5, 0, 916}, - dictWord{6, 0, 1041}, - dictWord{7, 0, 364}, - dictWord{10, 0, 398}, - dictWord{10, 0, 726}, - dictWord{11, 0, 317}, - dictWord{11, 0, 626}, - dictWord{12, 0, 142}, - dictWord{12, 0, 288}, - dictWord{12, 0, 678}, - dictWord{13, 0, 313}, - dictWord{15, 0, 113}, - dictWord{146, 0, 114}, - dictWord{135, 0, 1165}, - dictWord{6, 0, 241}, - dictWord{ - 9, - 0, - 342, - }, - dictWord{10, 0, 729}, - dictWord{11, 0, 284}, - dictWord{11, 0, 445}, - dictWord{11, 0, 651}, - dictWord{11, 0, 863}, - dictWord{13, 0, 398}, - dictWord{ - 146, - 0, - 99, - }, - dictWord{7, 0, 907}, - dictWord{136, 0, 832}, - dictWord{9, 0, 303}, - dictWord{4, 10, 29}, - dictWord{6, 10, 532}, - dictWord{7, 10, 1628}, - dictWord{7, 10, 1648}, - dictWord{9, 10, 350}, - dictWord{10, 10, 433}, - dictWord{11, 10, 97}, - dictWord{11, 10, 557}, - dictWord{11, 10, 745}, - dictWord{12, 10, 289}, - dictWord{ - 12, - 10, - 335, - }, - dictWord{12, 10, 348}, - dictWord{12, 10, 606}, - dictWord{13, 10, 116}, - dictWord{13, 10, 233}, - dictWord{13, 10, 466}, - dictWord{14, 10, 181}, - dictWord{ - 14, - 10, - 209, - }, - dictWord{14, 10, 232}, - dictWord{14, 10, 236}, - dictWord{14, 10, 300}, - dictWord{16, 10, 41}, - dictWord{148, 10, 97}, - dictWord{7, 11, 423}, - dictWord{7, 10, 1692}, - dictWord{136, 11, 588}, - dictWord{6, 0, 931}, - dictWord{134, 0, 1454}, - dictWord{5, 10, 501}, - dictWord{7, 10, 1704}, - dictWord{9, 10, 553}, - dictWord{11, 10, 520}, - dictWord{12, 10, 557}, - dictWord{141, 10, 249}, - dictWord{136, 11, 287}, - dictWord{4, 0, 562}, - dictWord{9, 0, 254}, - dictWord{ - 139, - 0, - 879, - }, - dictWord{132, 0, 786}, - dictWord{14, 11, 32}, - dictWord{18, 11, 85}, - dictWord{20, 11, 2}, - dictWord{152, 11, 16}, - dictWord{135, 0, 1294}, - dictWord{ - 7, - 11, - 723, - }, - dictWord{135, 11, 1135}, - dictWord{6, 0, 216}, - dictWord{7, 0, 901}, - dictWord{7, 0, 1343}, - dictWord{8, 0, 493}, - dictWord{134, 11, 403}, - dictWord{ - 7, - 11, - 719, - }, - dictWord{8, 11, 809}, - dictWord{136, 11, 834}, - dictWord{5, 11, 210}, - dictWord{6, 11, 213}, - dictWord{7, 11, 60}, - dictWord{10, 11, 364}, - dictWord{ - 139, - 11, - 135, - }, - dictWord{7, 0, 341}, - dictWord{11, 0, 219}, - dictWord{5, 11, 607}, - dictWord{8, 11, 326}, - dictWord{136, 11, 490}, - dictWord{4, 11, 701}, - dictWord{ - 5, - 11, - 472, - }, - dictWord{5, 11, 639}, - dictWord{7, 11, 1249}, - dictWord{9, 11, 758}, - dictWord{139, 11, 896}, - dictWord{135, 11, 380}, - dictWord{135, 11, 1947}, - dictWord{139, 0, 130}, - dictWord{135, 0, 1734}, - dictWord{10, 0, 115}, - dictWord{11, 0, 420}, - dictWord{12, 0, 154}, - dictWord{13, 0, 404}, - dictWord{14, 0, 346}, - dictWord{143, 0, 54}, - dictWord{134, 10, 129}, - dictWord{4, 11, 386}, - dictWord{7, 11, 41}, - dictWord{8, 11, 405}, - dictWord{9, 11, 497}, - dictWord{11, 11, 110}, - dictWord{11, 11, 360}, - dictWord{15, 11, 37}, - dictWord{144, 11, 84}, - dictWord{141, 11, 282}, - dictWord{5, 11, 46}, - dictWord{7, 11, 1452}, - dictWord{7, 11, 1480}, - dictWord{8, 11, 634}, - dictWord{140, 11, 472}, - dictWord{4, 11, 524}, - dictWord{136, 11, 810}, - dictWord{10, 11, 238}, - dictWord{141, 11, 33}, - dictWord{ - 133, - 0, - 604, - }, - dictWord{5, 0, 1011}, - dictWord{136, 0, 701}, - dictWord{8, 0, 856}, - dictWord{8, 0, 858}, - dictWord{8, 0, 879}, - dictWord{12, 0, 702}, - dictWord{142, 0, 447}, - dictWord{4, 0, 54}, - dictWord{5, 0, 666}, - dictWord{7, 0, 1039}, - dictWord{7, 0, 1130}, - dictWord{9, 0, 195}, - dictWord{138, 0, 302}, - dictWord{4, 10, 25}, - dictWord{ - 5, - 10, - 60, - }, - dictWord{6, 10, 504}, - dictWord{7, 10, 614}, - dictWord{7, 10, 1155}, - dictWord{140, 10, 0}, - dictWord{7, 10, 1248}, - dictWord{11, 10, 621}, - dictWord{ - 139, - 10, - 702, - }, - dictWord{133, 11, 997}, - dictWord{137, 10, 321}, - dictWord{134, 0, 1669}, - dictWord{134, 0, 1791}, - dictWord{4, 10, 379}, - dictWord{ - 135, - 10, - 1397, - }, - dictWord{138, 11, 372}, - dictWord{5, 11, 782}, - dictWord{5, 11, 829}, - dictWord{134, 11, 1738}, - dictWord{135, 0, 1228}, - dictWord{4, 10, 118}, - dictWord{6, 10, 274}, - dictWord{6, 10, 361}, - dictWord{7, 10, 75}, - dictWord{141, 10, 441}, - dictWord{132, 0, 623}, - dictWord{9, 11, 279}, - dictWord{10, 11, 407}, - dictWord{14, 11, 84}, - dictWord{150, 11, 18}, - dictWord{137, 10, 841}, - dictWord{135, 0, 798}, - dictWord{140, 10, 693}, - dictWord{5, 10, 314}, - dictWord{6, 10, 221}, - dictWord{7, 10, 419}, - dictWord{10, 10, 650}, - dictWord{11, 10, 396}, - dictWord{12, 10, 156}, - dictWord{13, 10, 369}, - dictWord{14, 10, 333}, - dictWord{ - 145, - 10, - 47, - }, - dictWord{135, 11, 1372}, - dictWord{7, 0, 122}, - dictWord{9, 0, 259}, - dictWord{10, 0, 84}, - dictWord{11, 0, 470}, - dictWord{12, 0, 541}, - dictWord{ - 141, - 0, - 379, - }, - dictWord{134, 0, 837}, - dictWord{8, 0, 1013}, - dictWord{4, 11, 78}, - dictWord{5, 11, 96}, - dictWord{5, 11, 182}, - dictWord{7, 11, 1724}, - dictWord{ - 7, - 11, - 1825, - }, - dictWord{10, 11, 394}, - dictWord{10, 11, 471}, - dictWord{11, 11, 532}, - dictWord{14, 11, 340}, - dictWord{145, 11, 88}, - dictWord{134, 0, 577}, - dictWord{135, 11, 1964}, - dictWord{132, 10, 913}, - dictWord{134, 0, 460}, - dictWord{8, 0, 891}, - dictWord{10, 0, 901}, - dictWord{10, 0, 919}, - dictWord{10, 0, 932}, - dictWord{12, 0, 715}, - dictWord{12, 0, 728}, - dictWord{12, 0, 777}, - dictWord{14, 0, 457}, - dictWord{144, 0, 103}, - dictWord{5, 0, 82}, - dictWord{5, 0, 131}, - dictWord{ - 7, - 0, - 1755, - }, - dictWord{8, 0, 31}, - dictWord{9, 0, 168}, - dictWord{9, 0, 764}, - dictWord{139, 0, 869}, - dictWord{136, 10, 475}, - dictWord{6, 0, 605}, - dictWord{ - 5, - 10, - 1016, - }, - dictWord{9, 11, 601}, - dictWord{9, 11, 619}, - dictWord{10, 11, 505}, - dictWord{10, 11, 732}, - dictWord{11, 11, 355}, - dictWord{140, 11, 139}, - dictWord{ - 7, - 10, - 602, - }, - dictWord{8, 10, 179}, - dictWord{10, 10, 781}, - dictWord{140, 10, 126}, - dictWord{134, 0, 1246}, - dictWord{6, 10, 329}, - dictWord{138, 10, 111}, - dictWord{6, 11, 215}, - dictWord{7, 11, 1028}, - dictWord{7, 11, 1473}, - dictWord{7, 11, 1721}, - dictWord{9, 11, 424}, - dictWord{138, 11, 779}, - dictWord{5, 0, 278}, - dictWord{137, 0, 68}, - dictWord{6, 0, 932}, - dictWord{6, 0, 1084}, - dictWord{144, 0, 86}, - dictWord{4, 0, 163}, - dictWord{5, 0, 201}, - dictWord{5, 0, 307}, - dictWord{ - 5, - 0, - 310, - }, - dictWord{6, 0, 335}, - dictWord{7, 0, 284}, - dictWord{7, 0, 1660}, - dictWord{136, 0, 165}, - dictWord{136, 0, 781}, - dictWord{134, 0, 707}, - dictWord{6, 0, 33}, - dictWord{135, 0, 1244}, - dictWord{5, 10, 821}, - dictWord{6, 11, 67}, - dictWord{6, 10, 1687}, - dictWord{7, 11, 258}, - dictWord{7, 11, 1630}, - dictWord{9, 11, 354}, - dictWord{9, 11, 675}, - dictWord{10, 11, 830}, - dictWord{14, 11, 80}, - dictWord{145, 11, 80}, - dictWord{6, 11, 141}, - dictWord{7, 11, 225}, - dictWord{9, 11, 59}, - dictWord{9, 11, 607}, - dictWord{10, 11, 312}, - dictWord{11, 11, 687}, - dictWord{12, 11, 555}, - dictWord{13, 11, 373}, - dictWord{13, 11, 494}, - dictWord{148, 11, 58}, - dictWord{134, 0, 1113}, - dictWord{9, 0, 388}, - dictWord{5, 10, 71}, - dictWord{7, 10, 1407}, - dictWord{9, 10, 704}, - dictWord{10, 10, 261}, - dictWord{10, 10, 619}, - dictWord{11, 10, 547}, - dictWord{11, 10, 619}, - dictWord{143, 10, 157}, - dictWord{7, 0, 1953}, - dictWord{136, 0, 720}, - dictWord{138, 0, 203}, - dictWord{ - 7, - 10, - 2008, - }, - dictWord{9, 10, 337}, - dictWord{138, 10, 517}, - dictWord{6, 0, 326}, - dictWord{7, 0, 677}, - dictWord{137, 0, 425}, - dictWord{139, 11, 81}, - dictWord{ - 7, - 0, - 1316, - }, - dictWord{7, 0, 1412}, - dictWord{7, 0, 1839}, - dictWord{9, 0, 589}, - dictWord{11, 0, 241}, - dictWord{11, 0, 676}, - dictWord{11, 0, 811}, - dictWord{11, 0, 891}, - dictWord{12, 0, 140}, - dictWord{12, 0, 346}, - dictWord{12, 0, 479}, - dictWord{13, 0, 140}, - dictWord{13, 0, 381}, - dictWord{14, 0, 188}, - dictWord{18, 0, 30}, - dictWord{148, 0, 108}, - dictWord{5, 0, 416}, - dictWord{6, 10, 86}, - dictWord{6, 10, 603}, - dictWord{7, 10, 292}, - dictWord{7, 10, 561}, - dictWord{8, 10, 257}, - dictWord{ - 8, - 10, - 382, - }, - dictWord{9, 10, 721}, - dictWord{9, 10, 778}, - dictWord{11, 10, 581}, - dictWord{140, 10, 466}, - dictWord{4, 10, 486}, - dictWord{133, 10, 491}, - dictWord{134, 0, 1300}, - dictWord{132, 10, 72}, - dictWord{7, 0, 847}, - dictWord{6, 10, 265}, - dictWord{7, 11, 430}, - dictWord{139, 11, 46}, - dictWord{5, 11, 602}, - dictWord{6, 11, 106}, - dictWord{7, 11, 1786}, - dictWord{7, 11, 1821}, - dictWord{7, 11, 2018}, - dictWord{9, 11, 418}, - dictWord{137, 11, 763}, - dictWord{5, 0, 358}, - dictWord{7, 0, 535}, - dictWord{7, 0, 1184}, - dictWord{10, 0, 662}, - dictWord{13, 0, 212}, - dictWord{13, 0, 304}, - dictWord{13, 0, 333}, - dictWord{145, 0, 98}, - dictWord{ - 5, - 11, - 65, - }, - dictWord{6, 11, 416}, - dictWord{7, 11, 1720}, - dictWord{7, 11, 1924}, - dictWord{8, 11, 677}, - dictWord{10, 11, 109}, - dictWord{11, 11, 14}, - dictWord{ - 11, - 11, - 70, - }, - dictWord{11, 11, 569}, - dictWord{11, 11, 735}, - dictWord{15, 11, 153}, - dictWord{148, 11, 80}, - dictWord{6, 0, 1823}, - dictWord{8, 0, 839}, - dictWord{ - 8, - 0, - 852, - }, - dictWord{8, 0, 903}, - dictWord{10, 0, 940}, - dictWord{12, 0, 707}, - dictWord{140, 0, 775}, - dictWord{135, 11, 1229}, - dictWord{6, 0, 1522}, - dictWord{ - 140, - 0, - 654, - }, - dictWord{136, 11, 595}, - dictWord{139, 0, 163}, - dictWord{141, 0, 314}, - dictWord{132, 0, 978}, - dictWord{4, 0, 601}, - dictWord{6, 0, 2035}, - dictWord{137, 10, 234}, - dictWord{5, 10, 815}, - dictWord{6, 10, 1688}, - dictWord{134, 10, 1755}, - dictWord{133, 0, 946}, - dictWord{136, 0, 434}, - dictWord{ - 6, - 10, - 197, - }, - dictWord{136, 10, 205}, - dictWord{7, 0, 411}, - dictWord{7, 0, 590}, - dictWord{8, 0, 631}, - dictWord{9, 0, 323}, - dictWord{10, 0, 355}, - dictWord{11, 0, 491}, - dictWord{12, 0, 143}, - dictWord{12, 0, 402}, - dictWord{13, 0, 73}, - dictWord{14, 0, 408}, - dictWord{15, 0, 107}, - dictWord{146, 0, 71}, - dictWord{7, 0, 1467}, - dictWord{ - 8, - 0, - 328, - }, - dictWord{10, 0, 544}, - dictWord{11, 0, 955}, - dictWord{12, 0, 13}, - dictWord{13, 0, 320}, - dictWord{145, 0, 83}, - dictWord{142, 0, 410}, - dictWord{ - 11, - 0, - 511, - }, - dictWord{13, 0, 394}, - dictWord{14, 0, 298}, - dictWord{14, 0, 318}, - dictWord{146, 0, 103}, - dictWord{6, 10, 452}, - dictWord{7, 10, 312}, - dictWord{ - 138, - 10, - 219, - }, - dictWord{138, 10, 589}, - dictWord{4, 10, 333}, - dictWord{9, 10, 176}, - dictWord{12, 10, 353}, - dictWord{141, 10, 187}, - dictWord{135, 11, 329}, - dictWord{132, 11, 469}, - dictWord{5, 0, 835}, - dictWord{134, 0, 483}, - dictWord{134, 11, 1743}, - dictWord{5, 11, 929}, - dictWord{6, 11, 340}, - dictWord{8, 11, 376}, - dictWord{136, 11, 807}, - dictWord{134, 10, 1685}, - dictWord{132, 0, 677}, - dictWord{5, 11, 218}, - dictWord{7, 11, 1610}, - dictWord{138, 11, 83}, - dictWord{ - 5, - 11, - 571, - }, - dictWord{135, 11, 1842}, - dictWord{132, 11, 455}, - dictWord{137, 0, 70}, - dictWord{135, 0, 1405}, - dictWord{7, 10, 135}, - dictWord{8, 10, 7}, - dictWord{ - 8, - 10, - 62, - }, - dictWord{9, 10, 243}, - dictWord{10, 10, 658}, - dictWord{10, 10, 697}, - dictWord{11, 10, 456}, - dictWord{139, 10, 756}, - dictWord{9, 10, 395}, - dictWord{138, 10, 79}, - dictWord{137, 0, 108}, - dictWord{6, 11, 161}, - dictWord{7, 11, 372}, - dictWord{137, 11, 597}, - dictWord{132, 11, 349}, - dictWord{ - 132, - 0, - 777, - }, - dictWord{132, 0, 331}, - dictWord{135, 10, 631}, - dictWord{133, 0, 747}, - dictWord{6, 11, 432}, - dictWord{6, 11, 608}, - dictWord{139, 11, 322}, - dictWord{138, 10, 835}, - dictWord{5, 11, 468}, - dictWord{7, 11, 1809}, - dictWord{10, 11, 325}, - dictWord{11, 11, 856}, - dictWord{12, 11, 345}, - dictWord{ - 143, - 11, - 104, - }, - dictWord{133, 11, 223}, - dictWord{7, 10, 406}, - dictWord{7, 10, 459}, - dictWord{8, 10, 606}, - dictWord{139, 10, 726}, - dictWord{132, 11, 566}, - dictWord{142, 0, 68}, - dictWord{4, 11, 59}, - dictWord{135, 11, 1394}, - dictWord{6, 11, 436}, - dictWord{139, 11, 481}, - dictWord{4, 11, 48}, - dictWord{5, 11, 271}, - dictWord{135, 11, 953}, - dictWord{139, 11, 170}, - dictWord{5, 11, 610}, - dictWord{136, 11, 457}, - dictWord{133, 11, 755}, - dictWord{135, 11, 1217}, - dictWord{ - 133, - 10, - 612, - }, - dictWord{132, 11, 197}, - dictWord{132, 0, 505}, - dictWord{4, 10, 372}, - dictWord{7, 10, 482}, - dictWord{8, 10, 158}, - dictWord{9, 10, 602}, - dictWord{ - 9, - 10, - 615, - }, - dictWord{10, 10, 245}, - dictWord{10, 10, 678}, - dictWord{10, 10, 744}, - dictWord{11, 10, 248}, - dictWord{139, 10, 806}, - dictWord{133, 0, 326}, - dictWord{5, 10, 854}, - dictWord{135, 10, 1991}, - dictWord{4, 0, 691}, - dictWord{146, 0, 16}, - dictWord{6, 0, 628}, - dictWord{9, 0, 35}, - dictWord{10, 0, 680}, - dictWord{10, 0, 793}, - dictWord{11, 0, 364}, - dictWord{13, 0, 357}, - dictWord{143, 0, 164}, - dictWord{138, 0, 654}, - dictWord{6, 0, 32}, - dictWord{7, 0, 385}, - dictWord{ - 7, - 0, - 757, - }, - dictWord{7, 0, 1916}, - dictWord{8, 0, 37}, - dictWord{8, 0, 94}, - dictWord{8, 0, 711}, - dictWord{9, 0, 541}, - dictWord{10, 0, 162}, - dictWord{10, 0, 795}, - dictWord{ - 11, - 0, - 989, - }, - dictWord{11, 0, 1010}, - dictWord{12, 0, 14}, - dictWord{142, 0, 308}, - dictWord{133, 11, 217}, - dictWord{6, 0, 152}, - dictWord{6, 0, 349}, - dictWord{ - 6, - 0, - 1682, - }, - dictWord{7, 0, 1252}, - dictWord{8, 0, 112}, - dictWord{9, 0, 435}, - dictWord{9, 0, 668}, - dictWord{10, 0, 290}, - dictWord{10, 0, 319}, - dictWord{10, 0, 815}, - dictWord{11, 0, 180}, - dictWord{11, 0, 837}, - dictWord{12, 0, 240}, - dictWord{13, 0, 152}, - dictWord{13, 0, 219}, - dictWord{142, 0, 158}, - dictWord{4, 0, 581}, - dictWord{134, 0, 726}, - dictWord{5, 10, 195}, - dictWord{135, 10, 1685}, - dictWord{6, 0, 126}, - dictWord{7, 0, 573}, - dictWord{8, 0, 397}, - dictWord{142, 0, 44}, - dictWord{138, 0, 89}, - dictWord{7, 10, 1997}, - dictWord{8, 10, 730}, - dictWord{139, 10, 1006}, - dictWord{134, 0, 1531}, - dictWord{134, 0, 1167}, - dictWord{ - 5, - 0, - 926, - }, - dictWord{12, 0, 203}, - dictWord{133, 10, 751}, - dictWord{4, 11, 165}, - dictWord{7, 11, 1398}, - dictWord{135, 11, 1829}, - dictWord{7, 0, 1232}, - dictWord{137, 0, 531}, - dictWord{135, 10, 821}, - dictWord{134, 0, 943}, - dictWord{133, 0, 670}, - dictWord{4, 0, 880}, - dictWord{139, 0, 231}, - dictWord{ - 134, - 0, - 1617, - }, - dictWord{135, 0, 1957}, - dictWord{5, 11, 9}, - dictWord{7, 11, 297}, - dictWord{7, 11, 966}, - dictWord{140, 11, 306}, - dictWord{6, 0, 975}, - dictWord{ - 134, - 0, - 985, - }, - dictWord{5, 10, 950}, - dictWord{5, 10, 994}, - dictWord{134, 10, 351}, - dictWord{12, 11, 21}, - dictWord{151, 11, 7}, - dictWord{5, 11, 146}, - dictWord{ - 6, - 11, - 411, - }, - dictWord{138, 11, 721}, - dictWord{7, 0, 242}, - dictWord{135, 0, 1942}, - dictWord{6, 11, 177}, - dictWord{135, 11, 467}, - dictWord{5, 0, 421}, - dictWord{ - 7, - 10, - 47, - }, - dictWord{137, 10, 684}, - dictWord{5, 0, 834}, - dictWord{7, 0, 1202}, - dictWord{8, 0, 14}, - dictWord{9, 0, 481}, - dictWord{137, 0, 880}, - dictWord{138, 0, 465}, - dictWord{6, 0, 688}, - dictWord{9, 0, 834}, - dictWord{132, 10, 350}, - dictWord{132, 0, 855}, - dictWord{4, 0, 357}, - dictWord{6, 0, 172}, - dictWord{7, 0, 143}, - dictWord{137, 0, 413}, - dictWord{133, 11, 200}, - dictWord{132, 0, 590}, - dictWord{7, 10, 1812}, - dictWord{13, 10, 259}, - dictWord{13, 10, 356}, - dictWord{ - 14, - 10, - 242, - }, - dictWord{147, 10, 114}, - dictWord{133, 10, 967}, - dictWord{11, 0, 114}, - dictWord{4, 10, 473}, - dictWord{7, 10, 623}, - dictWord{8, 10, 808}, - dictWord{ - 9, - 10, - 871, - }, - dictWord{9, 10, 893}, - dictWord{11, 10, 431}, - dictWord{12, 10, 112}, - dictWord{12, 10, 217}, - dictWord{12, 10, 243}, - dictWord{12, 10, 562}, - dictWord{ - 12, - 10, - 663, - }, - dictWord{12, 10, 683}, - dictWord{13, 10, 141}, - dictWord{13, 10, 197}, - dictWord{13, 10, 227}, - dictWord{13, 10, 406}, - dictWord{13, 10, 487}, - dictWord{14, 10, 156}, - dictWord{14, 10, 203}, - dictWord{14, 10, 224}, - dictWord{14, 10, 256}, - dictWord{18, 10, 58}, - dictWord{150, 10, 0}, - dictWord{ - 138, - 10, - 286, - }, - dictWord{4, 10, 222}, - dictWord{7, 10, 286}, - dictWord{136, 10, 629}, - dictWord{5, 0, 169}, - dictWord{7, 0, 333}, - dictWord{136, 0, 45}, - dictWord{ - 134, - 11, - 481, - }, - dictWord{132, 0, 198}, - dictWord{4, 0, 24}, - dictWord{5, 0, 140}, - dictWord{5, 0, 185}, - dictWord{7, 0, 1500}, - dictWord{11, 0, 565}, - dictWord{11, 0, 838}, - dictWord{4, 11, 84}, - dictWord{7, 11, 1482}, - dictWord{10, 11, 76}, - dictWord{138, 11, 142}, - dictWord{133, 0, 585}, - dictWord{141, 10, 306}, - dictWord{ - 133, - 11, - 1015, - }, - dictWord{4, 11, 315}, - dictWord{5, 11, 507}, - dictWord{135, 11, 1370}, - dictWord{136, 10, 146}, - dictWord{6, 0, 691}, - dictWord{134, 0, 1503}, - dictWord{ - 4, - 0, - 334, - }, - dictWord{133, 0, 593}, - dictWord{4, 10, 465}, - dictWord{135, 10, 1663}, - dictWord{142, 11, 173}, - dictWord{135, 0, 913}, - dictWord{12, 0, 116}, - dictWord{134, 11, 1722}, - dictWord{134, 0, 1360}, - dictWord{132, 0, 802}, - dictWord{8, 11, 222}, - dictWord{8, 11, 476}, - dictWord{9, 11, 238}, - dictWord{ - 11, - 11, - 516, - }, - dictWord{11, 11, 575}, - dictWord{15, 11, 109}, - dictWord{146, 11, 100}, - dictWord{6, 0, 308}, - dictWord{9, 0, 673}, - dictWord{7, 10, 138}, - dictWord{ - 7, - 10, - 517, - }, - dictWord{139, 10, 238}, - dictWord{132, 0, 709}, - dictWord{6, 0, 1876}, - dictWord{6, 0, 1895}, - dictWord{9, 0, 994}, - dictWord{9, 0, 1006}, - dictWord{ - 12, - 0, - 829, - }, - dictWord{12, 0, 888}, - dictWord{12, 0, 891}, - dictWord{146, 0, 185}, - dictWord{148, 10, 94}, - dictWord{4, 0, 228}, - dictWord{133, 0, 897}, - dictWord{ - 7, - 0, - 1840, - }, - dictWord{5, 10, 495}, - dictWord{7, 10, 834}, - dictWord{9, 10, 733}, - dictWord{139, 10, 378}, - dictWord{133, 10, 559}, - dictWord{6, 10, 21}, - dictWord{ - 6, - 10, - 1737, - }, - dictWord{7, 10, 1444}, - dictWord{136, 10, 224}, - dictWord{4, 0, 608}, - dictWord{133, 0, 497}, - dictWord{6, 11, 40}, - dictWord{135, 11, 1781}, - dictWord{134, 0, 1573}, - dictWord{135, 0, 2039}, - dictWord{6, 0, 540}, - dictWord{136, 0, 136}, - dictWord{4, 0, 897}, - dictWord{5, 0, 786}, - dictWord{133, 10, 519}, - dictWord{6, 0, 1878}, - dictWord{6, 0, 1884}, - dictWord{9, 0, 938}, - dictWord{9, 0, 948}, - dictWord{9, 0, 955}, - dictWord{9, 0, 973}, - dictWord{9, 0, 1012}, - dictWord{ - 12, - 0, - 895, - }, - dictWord{12, 0, 927}, - dictWord{143, 0, 254}, - dictWord{134, 0, 1469}, - dictWord{133, 0, 999}, - dictWord{4, 0, 299}, - dictWord{135, 0, 1004}, - dictWord{ - 4, - 0, - 745, - }, - dictWord{133, 0, 578}, - dictWord{136, 11, 574}, - dictWord{133, 0, 456}, - dictWord{134, 0, 1457}, - dictWord{7, 0, 1679}, - dictWord{132, 10, 402}, - dictWord{7, 0, 693}, - dictWord{8, 0, 180}, - dictWord{12, 0, 163}, - dictWord{8, 10, 323}, - dictWord{136, 10, 479}, - dictWord{11, 10, 580}, - dictWord{142, 10, 201}, - dictWord{5, 10, 59}, - dictWord{135, 10, 672}, - dictWord{132, 11, 354}, - dictWord{146, 10, 34}, - dictWord{4, 0, 755}, - dictWord{135, 11, 1558}, - dictWord{ - 7, - 0, - 1740, - }, - dictWord{146, 0, 48}, - dictWord{4, 10, 85}, - dictWord{135, 10, 549}, - dictWord{139, 0, 338}, - dictWord{133, 10, 94}, - dictWord{134, 0, 1091}, - dictWord{135, 11, 469}, - dictWord{12, 0, 695}, - dictWord{12, 0, 704}, - dictWord{20, 0, 113}, - dictWord{5, 11, 830}, - dictWord{14, 11, 338}, - dictWord{148, 11, 81}, - dictWord{135, 0, 1464}, - dictWord{6, 10, 11}, - dictWord{135, 10, 187}, - dictWord{135, 0, 975}, - dictWord{13, 0, 335}, - dictWord{132, 10, 522}, - dictWord{ - 134, - 0, - 1979, - }, - dictWord{5, 11, 496}, - dictWord{135, 11, 203}, - dictWord{4, 10, 52}, - dictWord{135, 10, 661}, - dictWord{7, 0, 1566}, - dictWord{8, 0, 269}, - dictWord{ - 9, - 0, - 212, - }, - dictWord{9, 0, 718}, - dictWord{14, 0, 15}, - dictWord{14, 0, 132}, - dictWord{142, 0, 227}, - dictWord{4, 0, 890}, - dictWord{5, 0, 805}, - dictWord{5, 0, 819}, - dictWord{ - 5, - 0, - 961, - }, - dictWord{6, 0, 396}, - dictWord{6, 0, 1631}, - dictWord{6, 0, 1678}, - dictWord{7, 0, 1967}, - dictWord{7, 0, 2041}, - dictWord{9, 0, 630}, - dictWord{11, 0, 8}, - dictWord{11, 0, 1019}, - dictWord{12, 0, 176}, - dictWord{13, 0, 225}, - dictWord{14, 0, 292}, - dictWord{21, 0, 24}, - dictWord{4, 10, 383}, - dictWord{133, 10, 520}, - dictWord{134, 11, 547}, - dictWord{135, 11, 1748}, - dictWord{5, 11, 88}, - dictWord{137, 11, 239}, - dictWord{146, 11, 128}, - dictWord{7, 11, 650}, - dictWord{ - 135, - 11, - 1310, - }, - dictWord{4, 10, 281}, - dictWord{5, 10, 38}, - dictWord{7, 10, 194}, - dictWord{7, 10, 668}, - dictWord{7, 10, 1893}, - dictWord{137, 10, 397}, - dictWord{135, 0, 1815}, - dictWord{9, 10, 635}, - dictWord{139, 10, 559}, - dictWord{7, 0, 1505}, - dictWord{10, 0, 190}, - dictWord{10, 0, 634}, - dictWord{11, 0, 792}, - dictWord{12, 0, 358}, - dictWord{140, 0, 447}, - dictWord{5, 0, 0}, - dictWord{6, 0, 536}, - dictWord{7, 0, 604}, - dictWord{13, 0, 445}, - dictWord{145, 0, 126}, - dictWord{ - 7, - 11, - 1076, - }, - dictWord{9, 11, 80}, - dictWord{11, 11, 78}, - dictWord{11, 11, 421}, - dictWord{11, 11, 534}, - dictWord{140, 11, 545}, - dictWord{8, 0, 966}, - dictWord{ - 10, - 0, - 1023, - }, - dictWord{14, 11, 369}, - dictWord{146, 11, 72}, - dictWord{135, 11, 1641}, - dictWord{6, 0, 232}, - dictWord{6, 0, 412}, - dictWord{7, 0, 1074}, - dictWord{ - 8, - 0, - 9, - }, - dictWord{8, 0, 157}, - dictWord{8, 0, 786}, - dictWord{9, 0, 196}, - dictWord{9, 0, 352}, - dictWord{9, 0, 457}, - dictWord{10, 0, 337}, - dictWord{11, 0, 232}, - dictWord{ - 11, - 0, - 877, - }, - dictWord{12, 0, 480}, - dictWord{140, 0, 546}, - dictWord{135, 0, 958}, - dictWord{4, 0, 382}, - dictWord{136, 0, 579}, - dictWord{4, 0, 212}, - dictWord{ - 135, - 0, - 1206, - }, - dictWord{4, 11, 497}, - dictWord{5, 11, 657}, - dictWord{135, 11, 1584}, - dictWord{132, 0, 681}, - dictWord{8, 0, 971}, - dictWord{138, 0, 965}, - dictWord{ - 5, - 10, - 448, - }, - dictWord{136, 10, 535}, - dictWord{14, 0, 16}, - dictWord{146, 0, 44}, - dictWord{11, 0, 584}, - dictWord{11, 0, 616}, - dictWord{14, 0, 275}, - dictWord{ - 11, - 11, - 584, - }, - dictWord{11, 11, 616}, - dictWord{142, 11, 275}, - dictWord{136, 11, 13}, - dictWord{7, 10, 610}, - dictWord{135, 10, 1501}, - dictWord{7, 11, 642}, - dictWord{8, 11, 250}, - dictWord{11, 11, 123}, - dictWord{11, 11, 137}, - dictWord{13, 11, 48}, - dictWord{142, 11, 95}, - dictWord{133, 0, 655}, - dictWord{17, 0, 67}, - dictWord{147, 0, 74}, - dictWord{134, 0, 751}, - dictWord{134, 0, 1967}, - dictWord{6, 0, 231}, - dictWord{136, 0, 423}, - dictWord{5, 0, 300}, - dictWord{138, 0, 1016}, - dictWord{4, 10, 319}, - dictWord{5, 10, 699}, - dictWord{138, 10, 673}, - dictWord{6, 0, 237}, - dictWord{7, 0, 611}, - dictWord{8, 0, 100}, - dictWord{9, 0, 416}, - dictWord{ - 11, - 0, - 335, - }, - dictWord{12, 0, 173}, - dictWord{18, 0, 101}, - dictWord{6, 10, 336}, - dictWord{8, 10, 552}, - dictWord{9, 10, 285}, - dictWord{10, 10, 99}, - dictWord{ - 139, - 10, - 568, - }, - dictWord{134, 0, 1370}, - dictWord{7, 10, 1406}, - dictWord{9, 10, 218}, - dictWord{141, 10, 222}, - dictWord{133, 10, 256}, - dictWord{ - 135, - 0, - 1208, - }, - dictWord{14, 11, 213}, - dictWord{148, 11, 38}, - dictWord{6, 0, 1219}, - dictWord{135, 11, 1642}, - dictWord{13, 0, 417}, - dictWord{14, 0, 129}, - dictWord{143, 0, 15}, - dictWord{10, 11, 545}, - dictWord{140, 11, 301}, - dictWord{17, 10, 39}, - dictWord{148, 10, 36}, - dictWord{133, 0, 199}, - dictWord{4, 11, 904}, - dictWord{133, 11, 794}, - dictWord{12, 0, 427}, - dictWord{146, 0, 38}, - dictWord{134, 0, 949}, - dictWord{8, 0, 665}, - dictWord{135, 10, 634}, - dictWord{ - 132, - 10, - 618, - }, - dictWord{135, 10, 259}, - dictWord{132, 10, 339}, - dictWord{133, 11, 761}, - dictWord{141, 10, 169}, - dictWord{132, 10, 759}, - dictWord{5, 0, 688}, - dictWord{7, 0, 539}, - dictWord{135, 0, 712}, - dictWord{7, 11, 386}, - dictWord{138, 11, 713}, - dictWord{134, 0, 1186}, - dictWord{6, 11, 7}, - dictWord{6, 11, 35}, - dictWord{ - 7, - 11, - 147, - }, - dictWord{7, 11, 1069}, - dictWord{7, 11, 1568}, - dictWord{7, 11, 1575}, - dictWord{7, 11, 1917}, - dictWord{8, 11, 43}, - dictWord{8, 11, 208}, - dictWord{ - 9, - 11, - 128, - }, - dictWord{9, 11, 866}, - dictWord{10, 11, 20}, - dictWord{11, 11, 981}, - dictWord{147, 11, 33}, - dictWord{7, 11, 893}, - dictWord{8, 10, 482}, - dictWord{141, 11, 424}, - dictWord{6, 0, 312}, - dictWord{6, 0, 1715}, - dictWord{10, 0, 584}, - dictWord{11, 0, 546}, - dictWord{11, 0, 692}, - dictWord{12, 0, 259}, - dictWord{ - 12, - 0, - 295, - }, - dictWord{13, 0, 46}, - dictWord{141, 0, 154}, - dictWord{5, 10, 336}, - dictWord{6, 10, 341}, - dictWord{6, 10, 478}, - dictWord{6, 10, 1763}, - dictWord{ - 136, - 10, - 386, - }, - dictWord{137, 0, 151}, - dictWord{132, 0, 588}, - dictWord{152, 0, 4}, - dictWord{6, 11, 322}, - dictWord{9, 11, 552}, - dictWord{11, 11, 274}, - dictWord{ - 13, - 11, - 209, - }, - dictWord{13, 11, 499}, - dictWord{14, 11, 85}, - dictWord{15, 11, 126}, - dictWord{145, 11, 70}, - dictWord{135, 10, 73}, - dictWord{4, 0, 231}, - dictWord{ - 5, - 0, - 61, - }, - dictWord{6, 0, 104}, - dictWord{7, 0, 729}, - dictWord{7, 0, 964}, - dictWord{7, 0, 1658}, - dictWord{140, 0, 414}, - dictWord{6, 0, 263}, - dictWord{138, 0, 757}, - dictWord{135, 10, 1971}, - dictWord{4, 0, 612}, - dictWord{133, 0, 561}, - dictWord{132, 0, 320}, - dictWord{135, 10, 1344}, - dictWord{8, 11, 83}, - dictWord{ - 8, - 11, - 817, - }, - dictWord{9, 11, 28}, - dictWord{9, 11, 29}, - dictWord{9, 11, 885}, - dictWord{10, 11, 387}, - dictWord{11, 11, 633}, - dictWord{11, 11, 740}, - dictWord{ - 13, - 11, - 235, - }, - dictWord{13, 11, 254}, - dictWord{15, 11, 143}, - dictWord{143, 11, 146}, - dictWord{5, 10, 396}, - dictWord{134, 10, 501}, - dictWord{140, 11, 49}, - dictWord{132, 0, 225}, - dictWord{4, 10, 929}, - dictWord{5, 10, 799}, - dictWord{8, 10, 46}, - dictWord{136, 10, 740}, - dictWord{4, 0, 405}, - dictWord{7, 0, 817}, - dictWord{ - 14, - 0, - 58, - }, - dictWord{17, 0, 37}, - dictWord{146, 0, 124}, - dictWord{133, 0, 974}, - dictWord{4, 11, 412}, - dictWord{133, 11, 581}, - dictWord{4, 10, 892}, - dictWord{ - 133, - 10, - 770, - }, - dictWord{4, 0, 996}, - dictWord{134, 0, 2026}, - dictWord{4, 0, 527}, - dictWord{5, 0, 235}, - dictWord{7, 0, 1239}, - dictWord{11, 0, 131}, - dictWord{ - 140, - 0, - 370, - }, - dictWord{9, 0, 16}, - dictWord{13, 0, 386}, - dictWord{135, 11, 421}, - dictWord{7, 0, 956}, - dictWord{7, 0, 1157}, - dictWord{7, 0, 1506}, - dictWord{7, 0, 1606}, - dictWord{7, 0, 1615}, - dictWord{7, 0, 1619}, - dictWord{7, 0, 1736}, - dictWord{7, 0, 1775}, - dictWord{8, 0, 590}, - dictWord{9, 0, 324}, - dictWord{9, 0, 736}, - dictWord{ - 9, - 0, - 774, - }, - dictWord{9, 0, 776}, - dictWord{9, 0, 784}, - dictWord{10, 0, 567}, - dictWord{10, 0, 708}, - dictWord{11, 0, 518}, - dictWord{11, 0, 613}, - dictWord{11, 0, 695}, - dictWord{11, 0, 716}, - dictWord{11, 0, 739}, - dictWord{11, 0, 770}, - dictWord{11, 0, 771}, - dictWord{11, 0, 848}, - dictWord{11, 0, 857}, - dictWord{11, 0, 931}, - dictWord{ - 11, - 0, - 947, - }, - dictWord{12, 0, 326}, - dictWord{12, 0, 387}, - dictWord{12, 0, 484}, - dictWord{12, 0, 528}, - dictWord{12, 0, 552}, - dictWord{12, 0, 613}, - dictWord{ - 13, - 0, - 189, - }, - dictWord{13, 0, 256}, - dictWord{13, 0, 340}, - dictWord{13, 0, 432}, - dictWord{13, 0, 436}, - dictWord{13, 0, 440}, - dictWord{13, 0, 454}, - dictWord{14, 0, 174}, - dictWord{14, 0, 220}, - dictWord{14, 0, 284}, - dictWord{14, 0, 390}, - dictWord{145, 0, 121}, - dictWord{135, 10, 158}, - dictWord{9, 0, 137}, - dictWord{138, 0, 221}, - dictWord{4, 11, 110}, - dictWord{10, 11, 415}, - dictWord{10, 11, 597}, - dictWord{142, 11, 206}, - dictWord{141, 11, 496}, - dictWord{135, 11, 205}, - dictWord{ - 151, - 10, - 25, - }, - dictWord{135, 11, 778}, - dictWord{7, 11, 1656}, - dictWord{7, 10, 2001}, - dictWord{9, 11, 369}, - dictWord{10, 11, 338}, - dictWord{10, 11, 490}, - dictWord{11, 11, 154}, - dictWord{11, 11, 545}, - dictWord{11, 11, 775}, - dictWord{13, 11, 77}, - dictWord{141, 11, 274}, - dictWord{4, 11, 444}, - dictWord{ - 10, - 11, - 146, - }, - dictWord{140, 11, 9}, - dictWord{7, 0, 390}, - dictWord{138, 0, 140}, - dictWord{135, 0, 1144}, - dictWord{134, 0, 464}, - dictWord{7, 10, 1461}, - dictWord{ - 140, - 10, - 91, - }, - dictWord{132, 10, 602}, - dictWord{4, 11, 283}, - dictWord{135, 11, 1194}, - dictWord{5, 0, 407}, - dictWord{11, 0, 204}, - dictWord{11, 0, 243}, - dictWord{ - 11, - 0, - 489, - }, - dictWord{12, 0, 293}, - dictWord{19, 0, 37}, - dictWord{20, 0, 73}, - dictWord{150, 0, 38}, - dictWord{7, 0, 1218}, - dictWord{136, 0, 303}, - dictWord{ - 5, - 0, - 325, - }, - dictWord{8, 0, 5}, - dictWord{8, 0, 227}, - dictWord{9, 0, 105}, - dictWord{10, 0, 585}, - dictWord{12, 0, 614}, - dictWord{4, 10, 13}, - dictWord{5, 10, 567}, - dictWord{ - 7, - 10, - 1498, - }, - dictWord{9, 10, 124}, - dictWord{11, 10, 521}, - dictWord{140, 10, 405}, - dictWord{135, 10, 1006}, - dictWord{7, 0, 800}, - dictWord{10, 0, 12}, - dictWord{134, 11, 1720}, - dictWord{135, 0, 1783}, - dictWord{132, 10, 735}, - dictWord{138, 10, 812}, - dictWord{4, 10, 170}, - dictWord{135, 10, 323}, - dictWord{ - 6, - 0, - 621, - }, - dictWord{13, 0, 504}, - dictWord{144, 0, 89}, - dictWord{5, 10, 304}, - dictWord{135, 10, 1403}, - dictWord{137, 11, 216}, - dictWord{6, 0, 920}, - dictWord{ - 6, - 0, - 1104, - }, - dictWord{9, 11, 183}, - dictWord{139, 11, 286}, - dictWord{4, 0, 376}, - dictWord{133, 10, 742}, - dictWord{134, 0, 218}, - dictWord{8, 0, 641}, - dictWord{ - 11, - 0, - 388, - }, - dictWord{140, 0, 580}, - dictWord{7, 0, 454}, - dictWord{7, 0, 782}, - dictWord{8, 0, 768}, - dictWord{140, 0, 686}, - dictWord{137, 11, 33}, - dictWord{ - 133, - 10, - 111, - }, - dictWord{144, 0, 0}, - dictWord{10, 0, 676}, - dictWord{140, 0, 462}, - dictWord{6, 0, 164}, - dictWord{136, 11, 735}, - dictWord{133, 10, 444}, - dictWord{ - 150, - 0, - 50, - }, - dictWord{7, 11, 1862}, - dictWord{12, 11, 491}, - dictWord{12, 11, 520}, - dictWord{13, 11, 383}, - dictWord{14, 11, 244}, - dictWord{146, 11, 12}, - dictWord{ - 5, - 11, - 132, - }, - dictWord{9, 11, 486}, - dictWord{9, 11, 715}, - dictWord{10, 11, 458}, - dictWord{11, 11, 373}, - dictWord{11, 11, 668}, - dictWord{11, 11, 795}, - dictWord{11, 11, 897}, - dictWord{12, 11, 272}, - dictWord{12, 11, 424}, - dictWord{12, 11, 539}, - dictWord{12, 11, 558}, - dictWord{14, 11, 245}, - dictWord{ - 14, - 11, - 263, - }, - dictWord{14, 11, 264}, - dictWord{14, 11, 393}, - dictWord{142, 11, 403}, - dictWord{8, 10, 123}, - dictWord{15, 10, 6}, - dictWord{144, 10, 7}, - dictWord{ - 6, - 0, - 285, - }, - dictWord{8, 0, 654}, - dictWord{11, 0, 749}, - dictWord{12, 0, 190}, - dictWord{12, 0, 327}, - dictWord{13, 0, 120}, - dictWord{13, 0, 121}, - dictWord{13, 0, 327}, - dictWord{15, 0, 47}, - dictWord{146, 0, 40}, - dictWord{5, 11, 8}, - dictWord{6, 11, 89}, - dictWord{6, 11, 400}, - dictWord{7, 11, 1569}, - dictWord{7, 11, 1623}, - dictWord{ - 7, - 11, - 1850, - }, - dictWord{8, 11, 218}, - dictWord{8, 11, 422}, - dictWord{9, 11, 570}, - dictWord{138, 11, 626}, - dictWord{6, 11, 387}, - dictWord{7, 11, 882}, - dictWord{141, 11, 111}, - dictWord{6, 0, 343}, - dictWord{7, 0, 195}, - dictWord{9, 0, 226}, - dictWord{10, 0, 197}, - dictWord{10, 0, 575}, - dictWord{11, 0, 502}, - dictWord{ - 11, - 0, - 899, - }, - dictWord{6, 11, 224}, - dictWord{7, 11, 877}, - dictWord{137, 11, 647}, - dictWord{5, 10, 937}, - dictWord{135, 10, 100}, - dictWord{135, 11, 790}, - dictWord{150, 0, 29}, - dictWord{147, 0, 8}, - dictWord{134, 0, 1812}, - dictWord{149, 0, 8}, - dictWord{135, 11, 394}, - dictWord{7, 0, 1125}, - dictWord{9, 0, 143}, - dictWord{ - 11, - 0, - 61, - }, - dictWord{14, 0, 405}, - dictWord{150, 0, 21}, - dictWord{10, 11, 755}, - dictWord{147, 11, 29}, - dictWord{9, 11, 378}, - dictWord{141, 11, 162}, - dictWord{135, 10, 922}, - dictWord{5, 10, 619}, - dictWord{133, 10, 698}, - dictWord{134, 0, 1327}, - dictWord{6, 0, 1598}, - dictWord{137, 0, 575}, - dictWord{ - 9, - 11, - 569, - }, - dictWord{12, 11, 12}, - dictWord{12, 11, 81}, - dictWord{12, 11, 319}, - dictWord{13, 11, 69}, - dictWord{14, 11, 259}, - dictWord{16, 11, 87}, - dictWord{ - 17, - 11, - 1, - }, - dictWord{17, 11, 21}, - dictWord{17, 11, 24}, - dictWord{18, 11, 15}, - dictWord{18, 11, 56}, - dictWord{18, 11, 59}, - dictWord{18, 11, 127}, - dictWord{18, 11, 154}, - dictWord{19, 11, 19}, - dictWord{148, 11, 31}, - dictWord{6, 0, 895}, - dictWord{135, 11, 1231}, - dictWord{5, 0, 959}, - dictWord{7, 11, 124}, - dictWord{136, 11, 38}, - dictWord{5, 11, 261}, - dictWord{7, 11, 78}, - dictWord{7, 11, 199}, - dictWord{8, 11, 815}, - dictWord{9, 11, 126}, - dictWord{138, 11, 342}, - dictWord{5, 10, 917}, - dictWord{134, 10, 1659}, - dictWord{7, 0, 1759}, - dictWord{5, 11, 595}, - dictWord{135, 11, 1863}, - dictWord{136, 0, 173}, - dictWord{134, 0, 266}, - dictWord{ - 142, - 0, - 261, - }, - dictWord{132, 11, 628}, - dictWord{5, 10, 251}, - dictWord{5, 10, 956}, - dictWord{8, 10, 268}, - dictWord{9, 10, 214}, - dictWord{146, 10, 142}, - dictWord{ - 7, - 11, - 266, - }, - dictWord{136, 11, 804}, - dictWord{135, 11, 208}, - dictWord{6, 11, 79}, - dictWord{7, 11, 1021}, - dictWord{135, 11, 1519}, - dictWord{11, 11, 704}, - dictWord{141, 11, 396}, - dictWord{5, 10, 346}, - dictWord{5, 10, 711}, - dictWord{136, 10, 390}, - dictWord{136, 11, 741}, - dictWord{134, 11, 376}, - dictWord{ - 134, - 0, - 1427, - }, - dictWord{6, 0, 1033}, - dictWord{6, 0, 1217}, - dictWord{136, 0, 300}, - dictWord{133, 10, 624}, - dictWord{6, 11, 100}, - dictWord{7, 11, 244}, - dictWord{ - 7, - 11, - 632, - }, - dictWord{7, 11, 1609}, - dictWord{8, 11, 178}, - dictWord{8, 11, 638}, - dictWord{141, 11, 58}, - dictWord{6, 0, 584}, - dictWord{5, 10, 783}, - dictWord{ - 7, - 10, - 1998, - }, - dictWord{135, 10, 2047}, - dictWord{5, 0, 427}, - dictWord{5, 0, 734}, - dictWord{7, 0, 478}, - dictWord{136, 0, 52}, - dictWord{7, 0, 239}, - dictWord{ - 11, - 0, - 217, - }, - dictWord{142, 0, 165}, - dictWord{134, 0, 1129}, - dictWord{6, 0, 168}, - dictWord{6, 0, 1734}, - dictWord{7, 0, 20}, - dictWord{7, 0, 1056}, - dictWord{8, 0, 732}, - dictWord{9, 0, 406}, - dictWord{9, 0, 911}, - dictWord{138, 0, 694}, - dictWord{132, 10, 594}, - dictWord{133, 11, 791}, - dictWord{7, 11, 686}, - dictWord{8, 11, 33}, - dictWord{8, 11, 238}, - dictWord{10, 11, 616}, - dictWord{11, 11, 467}, - dictWord{11, 11, 881}, - dictWord{13, 11, 217}, - dictWord{13, 11, 253}, - dictWord{ - 142, - 11, - 268, - }, - dictWord{137, 11, 476}, - dictWord{134, 0, 418}, - dictWord{133, 0, 613}, - dictWord{132, 0, 632}, - dictWord{132, 11, 447}, - dictWord{7, 0, 32}, - dictWord{ - 7, - 0, - 984, - }, - dictWord{8, 0, 85}, - dictWord{8, 0, 709}, - dictWord{9, 0, 579}, - dictWord{9, 0, 847}, - dictWord{9, 0, 856}, - dictWord{10, 0, 799}, - dictWord{11, 0, 258}, - dictWord{ - 11, - 0, - 1007, - }, - dictWord{12, 0, 331}, - dictWord{12, 0, 615}, - dictWord{13, 0, 188}, - dictWord{13, 0, 435}, - dictWord{14, 0, 8}, - dictWord{15, 0, 165}, - dictWord{ - 16, - 0, - 27, - }, - dictWord{20, 0, 40}, - dictWord{144, 11, 35}, - dictWord{4, 11, 128}, - dictWord{5, 11, 415}, - dictWord{6, 11, 462}, - dictWord{7, 11, 294}, - dictWord{7, 11, 578}, - dictWord{10, 11, 710}, - dictWord{139, 11, 86}, - dictWord{5, 0, 694}, - dictWord{136, 0, 909}, - dictWord{7, 0, 1109}, - dictWord{11, 0, 7}, - dictWord{5, 10, 37}, - dictWord{ - 6, - 10, - 39, - }, - dictWord{6, 10, 451}, - dictWord{7, 10, 218}, - dictWord{7, 10, 1166}, - dictWord{7, 10, 1687}, - dictWord{8, 10, 662}, - dictWord{144, 10, 2}, - dictWord{ - 136, - 11, - 587, - }, - dictWord{6, 11, 427}, - dictWord{7, 11, 1018}, - dictWord{138, 11, 692}, - dictWord{4, 11, 195}, - dictWord{6, 10, 508}, - dictWord{135, 11, 802}, - dictWord{4, 0, 167}, - dictWord{135, 0, 82}, - dictWord{5, 0, 62}, - dictWord{6, 0, 24}, - dictWord{6, 0, 534}, - dictWord{7, 0, 74}, - dictWord{7, 0, 678}, - dictWord{7, 0, 684}, - dictWord{ - 7, - 0, - 1043, - }, - dictWord{7, 0, 1072}, - dictWord{8, 0, 280}, - dictWord{8, 0, 541}, - dictWord{8, 0, 686}, - dictWord{9, 0, 258}, - dictWord{10, 0, 519}, - dictWord{11, 0, 252}, - dictWord{140, 0, 282}, - dictWord{138, 0, 33}, - dictWord{4, 0, 359}, - dictWord{133, 11, 738}, - dictWord{7, 0, 980}, - dictWord{9, 0, 328}, - dictWord{13, 0, 186}, - dictWord{13, 0, 364}, - dictWord{7, 10, 635}, - dictWord{7, 10, 796}, - dictWord{8, 10, 331}, - dictWord{9, 10, 330}, - dictWord{9, 10, 865}, - dictWord{10, 10, 119}, - dictWord{ - 10, - 10, - 235, - }, - dictWord{11, 10, 111}, - dictWord{11, 10, 129}, - dictWord{11, 10, 240}, - dictWord{12, 10, 31}, - dictWord{12, 10, 66}, - dictWord{12, 10, 222}, - dictWord{12, 10, 269}, - dictWord{12, 10, 599}, - dictWord{12, 10, 684}, - dictWord{12, 10, 689}, - dictWord{12, 10, 691}, - dictWord{142, 10, 345}, - dictWord{ - 137, - 10, - 527, - }, - dictWord{6, 0, 596}, - dictWord{7, 0, 585}, - dictWord{135, 10, 702}, - dictWord{134, 11, 1683}, - dictWord{133, 0, 211}, - dictWord{6, 0, 145}, - dictWord{ - 141, - 0, - 336, - }, - dictWord{134, 0, 1130}, - dictWord{7, 0, 873}, - dictWord{6, 10, 37}, - dictWord{7, 10, 1666}, - dictWord{8, 10, 195}, - dictWord{8, 10, 316}, - dictWord{ - 9, - 10, - 178, - }, - dictWord{9, 10, 276}, - dictWord{9, 10, 339}, - dictWord{9, 10, 536}, - dictWord{10, 10, 102}, - dictWord{10, 10, 362}, - dictWord{10, 10, 785}, - dictWord{ - 11, - 10, - 55, - }, - dictWord{11, 10, 149}, - dictWord{11, 10, 773}, - dictWord{13, 10, 416}, - dictWord{13, 10, 419}, - dictWord{14, 10, 38}, - dictWord{14, 10, 41}, - dictWord{ - 142, - 10, - 210, - }, - dictWord{8, 0, 840}, - dictWord{136, 0, 841}, - dictWord{132, 0, 263}, - dictWord{5, 11, 3}, - dictWord{8, 11, 578}, - dictWord{9, 11, 118}, - dictWord{ - 10, - 11, - 705, - }, - dictWord{12, 11, 383}, - dictWord{141, 11, 279}, - dictWord{132, 0, 916}, - dictWord{133, 11, 229}, - dictWord{133, 10, 645}, - dictWord{15, 0, 155}, - dictWord{16, 0, 79}, - dictWord{8, 11, 102}, - dictWord{10, 11, 578}, - dictWord{10, 11, 672}, - dictWord{12, 11, 496}, - dictWord{13, 11, 408}, - dictWord{14, 11, 121}, - dictWord{145, 11, 106}, - dictWord{4, 0, 599}, - dictWord{5, 0, 592}, - dictWord{6, 0, 1634}, - dictWord{7, 0, 5}, - dictWord{7, 0, 55}, - dictWord{7, 0, 67}, - dictWord{7, 0, 97}, - dictWord{7, 0, 691}, - dictWord{7, 0, 979}, - dictWord{7, 0, 1600}, - dictWord{7, 0, 1697}, - dictWord{8, 0, 207}, - dictWord{8, 0, 214}, - dictWord{8, 0, 231}, - dictWord{8, 0, 294}, - dictWord{8, 0, 336}, - dictWord{8, 0, 428}, - dictWord{8, 0, 471}, - dictWord{8, 0, 622}, - dictWord{8, 0, 626}, - dictWord{8, 0, 679}, - dictWord{8, 0, 759}, - dictWord{8, 0, 829}, - dictWord{9, 0, 11}, - dictWord{9, 0, 246}, - dictWord{9, 0, 484}, - dictWord{9, 0, 573}, - dictWord{9, 0, 706}, - dictWord{9, 0, 762}, - dictWord{9, 0, 798}, - dictWord{9, 0, 855}, - dictWord{9, 0, 870}, - dictWord{9, 0, 912}, - dictWord{10, 0, 303}, - dictWord{10, 0, 335}, - dictWord{10, 0, 424}, - dictWord{10, 0, 461}, - dictWord{10, 0, 543}, - dictWord{ - 10, - 0, - 759, - }, - dictWord{10, 0, 814}, - dictWord{11, 0, 59}, - dictWord{11, 0, 199}, - dictWord{11, 0, 235}, - dictWord{11, 0, 590}, - dictWord{11, 0, 631}, - dictWord{11, 0, 929}, - dictWord{11, 0, 963}, - dictWord{11, 0, 987}, - dictWord{12, 0, 114}, - dictWord{12, 0, 182}, - dictWord{12, 0, 226}, - dictWord{12, 0, 332}, - dictWord{12, 0, 439}, - dictWord{12, 0, 575}, - dictWord{12, 0, 598}, - dictWord{12, 0, 675}, - dictWord{13, 0, 8}, - dictWord{13, 0, 125}, - dictWord{13, 0, 194}, - dictWord{13, 0, 287}, - dictWord{ - 14, - 0, - 197, - }, - dictWord{14, 0, 383}, - dictWord{15, 0, 53}, - dictWord{17, 0, 63}, - dictWord{19, 0, 46}, - dictWord{19, 0, 98}, - dictWord{19, 0, 106}, - dictWord{148, 0, 85}, - dictWord{ - 7, - 0, - 1356, - }, - dictWord{132, 10, 290}, - dictWord{6, 10, 70}, - dictWord{7, 10, 1292}, - dictWord{10, 10, 762}, - dictWord{139, 10, 288}, - dictWord{150, 11, 55}, - dictWord{4, 0, 593}, - dictWord{8, 11, 115}, - dictWord{8, 11, 350}, - dictWord{9, 11, 489}, - dictWord{10, 11, 128}, - dictWord{11, 11, 306}, - dictWord{12, 11, 373}, - dictWord{14, 11, 30}, - dictWord{17, 11, 79}, - dictWord{147, 11, 80}, - dictWord{135, 11, 1235}, - dictWord{134, 0, 1392}, - dictWord{4, 11, 230}, - dictWord{ - 133, - 11, - 702, - }, - dictWord{147, 0, 126}, - dictWord{7, 10, 131}, - dictWord{7, 10, 422}, - dictWord{8, 10, 210}, - dictWord{140, 10, 573}, - dictWord{134, 0, 1179}, - dictWord{ - 139, - 11, - 435, - }, - dictWord{139, 10, 797}, - dictWord{134, 11, 1728}, - dictWord{4, 0, 162}, - dictWord{18, 11, 26}, - dictWord{19, 11, 42}, - dictWord{20, 11, 43}, - dictWord{21, 11, 0}, - dictWord{23, 11, 27}, - dictWord{152, 11, 14}, - dictWord{132, 10, 936}, - dictWord{6, 0, 765}, - dictWord{5, 10, 453}, - dictWord{134, 10, 441}, - dictWord{133, 0, 187}, - dictWord{135, 0, 1286}, - dictWord{6, 0, 635}, - dictWord{6, 0, 904}, - dictWord{6, 0, 1210}, - dictWord{134, 0, 1489}, - dictWord{4, 0, 215}, - dictWord{ - 8, - 0, - 890, - }, - dictWord{9, 0, 38}, - dictWord{10, 0, 923}, - dictWord{11, 0, 23}, - dictWord{11, 0, 127}, - dictWord{139, 0, 796}, - dictWord{6, 0, 1165}, - dictWord{ - 134, - 0, - 1306, - }, - dictWord{7, 0, 716}, - dictWord{13, 0, 97}, - dictWord{141, 0, 251}, - dictWord{132, 10, 653}, - dictWord{136, 0, 657}, - dictWord{146, 10, 80}, - dictWord{ - 5, - 11, - 622, - }, - dictWord{7, 11, 1032}, - dictWord{11, 11, 26}, - dictWord{11, 11, 213}, - dictWord{11, 11, 707}, - dictWord{12, 11, 380}, - dictWord{13, 11, 226}, - dictWord{141, 11, 355}, - dictWord{6, 0, 299}, - dictWord{5, 11, 70}, - dictWord{6, 11, 334}, - dictWord{9, 11, 171}, - dictWord{11, 11, 637}, - dictWord{12, 11, 202}, - dictWord{14, 11, 222}, - dictWord{145, 11, 42}, - dictWord{142, 0, 134}, - dictWord{4, 11, 23}, - dictWord{5, 11, 313}, - dictWord{5, 11, 1014}, - dictWord{6, 11, 50}, - dictWord{ - 6, - 11, - 51, - }, - dictWord{7, 11, 142}, - dictWord{7, 11, 384}, - dictWord{9, 11, 783}, - dictWord{139, 11, 741}, - dictWord{4, 11, 141}, - dictWord{7, 11, 559}, - dictWord{ - 8, - 11, - 640, - }, - dictWord{9, 11, 460}, - dictWord{12, 11, 183}, - dictWord{141, 11, 488}, - dictWord{136, 11, 614}, - dictWord{7, 10, 1368}, - dictWord{8, 10, 232}, - dictWord{8, 10, 361}, - dictWord{10, 10, 682}, - dictWord{138, 10, 742}, - dictWord{137, 10, 534}, - dictWord{6, 0, 1082}, - dictWord{140, 0, 658}, - dictWord{ - 137, - 10, - 27, - }, - dictWord{135, 0, 2002}, - dictWord{142, 10, 12}, - dictWord{4, 0, 28}, - dictWord{5, 0, 440}, - dictWord{7, 0, 248}, - dictWord{11, 0, 833}, - dictWord{140, 0, 344}, - dictWord{7, 10, 736}, - dictWord{139, 10, 264}, - dictWord{134, 10, 1657}, - dictWord{134, 0, 1654}, - dictWord{138, 0, 531}, - dictWord{5, 11, 222}, - dictWord{ - 9, - 11, - 140, - }, - dictWord{138, 11, 534}, - dictWord{6, 0, 634}, - dictWord{6, 0, 798}, - dictWord{134, 0, 840}, - dictWord{138, 11, 503}, - dictWord{135, 10, 127}, - dictWord{133, 0, 853}, - dictWord{5, 11, 154}, - dictWord{7, 11, 1491}, - dictWord{10, 11, 379}, - dictWord{138, 11, 485}, - dictWord{6, 0, 249}, - dictWord{7, 0, 1234}, - dictWord{139, 0, 573}, - dictWord{133, 11, 716}, - dictWord{7, 11, 1570}, - dictWord{140, 11, 542}, - dictWord{136, 10, 364}, - dictWord{138, 0, 527}, - dictWord{ - 4, - 11, - 91, - }, - dictWord{5, 11, 388}, - dictWord{5, 11, 845}, - dictWord{6, 11, 206}, - dictWord{6, 11, 252}, - dictWord{6, 11, 365}, - dictWord{7, 11, 136}, - dictWord{7, 11, 531}, - dictWord{8, 11, 264}, - dictWord{136, 11, 621}, - dictWord{134, 0, 1419}, - dictWord{135, 11, 1441}, - dictWord{7, 0, 49}, - dictWord{7, 0, 392}, - dictWord{8, 0, 20}, - dictWord{8, 0, 172}, - dictWord{8, 0, 690}, - dictWord{9, 0, 383}, - dictWord{9, 0, 845}, - dictWord{10, 0, 48}, - dictWord{11, 0, 293}, - dictWord{11, 0, 832}, - dictWord{ - 11, - 0, - 920, - }, - dictWord{11, 0, 984}, - dictWord{141, 0, 221}, - dictWord{5, 0, 858}, - dictWord{133, 0, 992}, - dictWord{5, 0, 728}, - dictWord{137, 10, 792}, - dictWord{ - 5, - 10, - 909, - }, - dictWord{9, 10, 849}, - dictWord{138, 10, 805}, - dictWord{7, 0, 525}, - dictWord{7, 0, 1579}, - dictWord{8, 0, 497}, - dictWord{136, 0, 573}, - dictWord{6, 0, 268}, - dictWord{137, 0, 62}, - dictWord{135, 11, 576}, - dictWord{134, 0, 1201}, - dictWord{5, 11, 771}, - dictWord{5, 11, 863}, - dictWord{5, 11, 898}, - dictWord{ - 6, - 11, - 1632, - }, - dictWord{6, 11, 1644}, - dictWord{134, 11, 1780}, - dictWord{133, 11, 331}, - dictWord{7, 0, 193}, - dictWord{7, 0, 1105}, - dictWord{10, 0, 495}, - dictWord{ - 7, - 10, - 397, - }, - dictWord{8, 10, 124}, - dictWord{8, 10, 619}, - dictWord{9, 10, 305}, - dictWord{11, 10, 40}, - dictWord{12, 10, 349}, - dictWord{13, 10, 134}, - dictWord{ - 13, - 10, - 295, - }, - dictWord{14, 10, 155}, - dictWord{15, 10, 120}, - dictWord{146, 10, 105}, - dictWord{138, 0, 106}, - dictWord{6, 0, 859}, - dictWord{5, 11, 107}, - dictWord{ - 7, - 11, - 201, - }, - dictWord{136, 11, 518}, - dictWord{6, 11, 446}, - dictWord{135, 11, 1817}, - dictWord{13, 0, 23}, - dictWord{4, 10, 262}, - dictWord{135, 10, 342}, - dictWord{133, 10, 641}, - dictWord{137, 11, 851}, - dictWord{6, 0, 925}, - dictWord{137, 0, 813}, - dictWord{132, 11, 504}, - dictWord{6, 0, 613}, - dictWord{ - 136, - 0, - 223, - }, - dictWord{4, 10, 99}, - dictWord{6, 10, 250}, - dictWord{6, 10, 346}, - dictWord{8, 10, 127}, - dictWord{138, 10, 81}, - dictWord{136, 0, 953}, - dictWord{ - 132, - 10, - 915, - }, - dictWord{139, 11, 892}, - dictWord{5, 10, 75}, - dictWord{9, 10, 517}, - dictWord{10, 10, 470}, - dictWord{12, 10, 155}, - dictWord{141, 10, 224}, - dictWord{ - 4, - 0, - 666, - }, - dictWord{7, 0, 1017}, - dictWord{7, 11, 996}, - dictWord{138, 11, 390}, - dictWord{5, 11, 883}, - dictWord{133, 11, 975}, - dictWord{14, 10, 83}, - dictWord{ - 142, - 11, - 83, - }, - dictWord{4, 0, 670}, - dictWord{5, 11, 922}, - dictWord{134, 11, 1707}, - dictWord{135, 0, 216}, - dictWord{9, 0, 40}, - dictWord{11, 0, 136}, - dictWord{ - 135, - 11, - 787, - }, - dictWord{5, 10, 954}, - dictWord{5, 11, 993}, - dictWord{7, 11, 515}, - dictWord{137, 11, 91}, - dictWord{139, 0, 259}, - dictWord{7, 0, 1114}, - dictWord{ - 9, - 0, - 310, - }, - dictWord{9, 0, 682}, - dictWord{10, 0, 440}, - dictWord{13, 0, 40}, - dictWord{6, 10, 304}, - dictWord{8, 10, 418}, - dictWord{11, 10, 341}, - dictWord{ - 139, - 10, - 675, - }, - dictWord{14, 0, 296}, - dictWord{9, 10, 410}, - dictWord{139, 10, 425}, - dictWord{10, 11, 377}, - dictWord{12, 11, 363}, - dictWord{13, 11, 68}, - dictWord{ - 13, - 11, - 94, - }, - dictWord{14, 11, 108}, - dictWord{142, 11, 306}, - dictWord{7, 0, 1401}, - dictWord{135, 0, 1476}, - dictWord{4, 0, 296}, - dictWord{6, 0, 475}, - dictWord{ - 7, - 0, - 401, - }, - dictWord{7, 0, 1410}, - dictWord{7, 0, 1594}, - dictWord{7, 0, 1674}, - dictWord{8, 0, 63}, - dictWord{8, 0, 660}, - dictWord{137, 0, 74}, - dictWord{4, 0, 139}, - dictWord{4, 0, 388}, - dictWord{140, 0, 188}, - dictWord{132, 0, 797}, - dictWord{132, 11, 766}, - dictWord{5, 11, 103}, - dictWord{7, 11, 921}, - dictWord{8, 11, 580}, - dictWord{8, 11, 593}, - dictWord{8, 11, 630}, - dictWord{138, 11, 28}, - dictWord{4, 11, 911}, - dictWord{5, 11, 867}, - dictWord{133, 11, 1013}, - dictWord{134, 10, 14}, - dictWord{134, 0, 1572}, - dictWord{134, 10, 1708}, - dictWord{21, 0, 39}, - dictWord{5, 10, 113}, - dictWord{6, 10, 243}, - dictWord{7, 10, 1865}, - dictWord{ - 11, - 10, - 161, - }, - dictWord{16, 10, 37}, - dictWord{145, 10, 99}, - dictWord{7, 11, 1563}, - dictWord{141, 11, 182}, - dictWord{5, 11, 135}, - dictWord{6, 11, 519}, - dictWord{ - 7, - 11, - 1722, - }, - dictWord{10, 11, 271}, - dictWord{11, 11, 261}, - dictWord{145, 11, 54}, - dictWord{132, 10, 274}, - dictWord{134, 0, 1594}, - dictWord{4, 11, 300}, - dictWord{5, 11, 436}, - dictWord{135, 11, 484}, - dictWord{4, 0, 747}, - dictWord{6, 0, 290}, - dictWord{7, 0, 649}, - dictWord{7, 0, 1479}, - dictWord{135, 0, 1583}, - dictWord{133, 11, 535}, - dictWord{147, 11, 82}, - dictWord{133, 0, 232}, - dictWord{137, 0, 887}, - dictWord{135, 10, 166}, - dictWord{136, 0, 521}, - dictWord{4, 0, 14}, - dictWord{7, 0, 472}, - dictWord{7, 0, 1801}, - dictWord{10, 0, 748}, - dictWord{141, 0, 458}, - dictWord{134, 0, 741}, - dictWord{134, 0, 992}, - dictWord{16, 0, 111}, - dictWord{137, 10, 304}, - dictWord{4, 0, 425}, - dictWord{5, 11, 387}, - dictWord{7, 11, 557}, - dictWord{12, 11, 547}, - dictWord{142, 11, 86}, - dictWord{ - 135, - 11, - 1747, - }, - dictWord{5, 10, 654}, - dictWord{135, 11, 1489}, - dictWord{7, 0, 789}, - dictWord{4, 11, 6}, - dictWord{5, 11, 708}, - dictWord{136, 11, 75}, - dictWord{ - 6, - 10, - 273, - }, - dictWord{10, 10, 188}, - dictWord{13, 10, 377}, - dictWord{146, 10, 77}, - dictWord{6, 0, 1593}, - dictWord{4, 11, 303}, - dictWord{7, 11, 619}, - dictWord{ - 10, - 11, - 547, - }, - dictWord{10, 11, 687}, - dictWord{11, 11, 122}, - dictWord{140, 11, 601}, - dictWord{134, 0, 1768}, - dictWord{135, 10, 410}, - dictWord{138, 11, 772}, - dictWord{11, 0, 233}, - dictWord{139, 10, 524}, - dictWord{5, 0, 943}, - dictWord{134, 0, 1779}, - dictWord{134, 10, 1785}, - dictWord{136, 11, 529}, - dictWord{ - 132, - 0, - 955, - }, - dictWord{5, 0, 245}, - dictWord{6, 0, 576}, - dictWord{7, 0, 582}, - dictWord{136, 0, 225}, - dictWord{132, 10, 780}, - dictWord{142, 0, 241}, - dictWord{ - 134, - 0, - 1943, - }, - dictWord{4, 11, 106}, - dictWord{7, 11, 310}, - dictWord{7, 11, 1785}, - dictWord{10, 11, 690}, - dictWord{139, 11, 717}, - dictWord{134, 0, 1284}, - dictWord{5, 11, 890}, - dictWord{133, 11, 988}, - dictWord{6, 11, 626}, - dictWord{142, 11, 431}, - dictWord{10, 11, 706}, - dictWord{145, 11, 32}, - dictWord{ - 137, - 11, - 332, - }, - dictWord{132, 11, 698}, - dictWord{135, 0, 709}, - dictWord{5, 10, 948}, - dictWord{138, 11, 17}, - dictWord{136, 0, 554}, - dictWord{134, 0, 1564}, - dictWord{139, 10, 941}, - dictWord{132, 0, 443}, - dictWord{134, 0, 909}, - dictWord{134, 11, 84}, - dictWord{142, 0, 280}, - dictWord{4, 10, 532}, - dictWord{5, 10, 706}, - dictWord{135, 10, 662}, - dictWord{132, 0, 729}, - dictWord{5, 10, 837}, - dictWord{6, 10, 1651}, - dictWord{139, 10, 985}, - dictWord{135, 10, 1861}, - dictWord{ - 4, - 0, - 348, - }, - dictWord{152, 11, 3}, - dictWord{5, 11, 986}, - dictWord{6, 11, 130}, - dictWord{7, 11, 1582}, - dictWord{8, 11, 458}, - dictWord{10, 11, 101}, - dictWord{ - 10, - 11, - 318, - }, - dictWord{138, 11, 823}, - dictWord{134, 0, 758}, - dictWord{4, 0, 298}, - dictWord{137, 0, 848}, - dictWord{4, 10, 330}, - dictWord{7, 10, 933}, - dictWord{ - 7, - 10, - 2012, - }, - dictWord{136, 10, 292}, - dictWord{7, 11, 1644}, - dictWord{137, 11, 129}, - dictWord{6, 0, 1422}, - dictWord{9, 0, 829}, - dictWord{135, 10, 767}, - dictWord{5, 0, 164}, - dictWord{7, 0, 121}, - dictWord{142, 0, 189}, - dictWord{7, 0, 812}, - dictWord{7, 0, 1261}, - dictWord{7, 0, 1360}, - dictWord{9, 0, 632}, - dictWord{ - 140, - 0, - 352, - }, - dictWord{135, 11, 1788}, - dictWord{139, 0, 556}, - dictWord{135, 11, 997}, - dictWord{145, 10, 114}, - dictWord{4, 0, 172}, - dictWord{9, 0, 611}, - dictWord{10, 0, 436}, - dictWord{12, 0, 673}, - dictWord{13, 0, 255}, - dictWord{137, 10, 883}, - dictWord{11, 0, 530}, - dictWord{138, 10, 274}, - dictWord{133, 0, 844}, - dictWord{134, 0, 984}, - dictWord{13, 0, 232}, - dictWord{18, 0, 35}, - dictWord{4, 10, 703}, - dictWord{135, 10, 207}, - dictWord{132, 10, 571}, - dictWord{9, 0, 263}, - dictWord{10, 0, 147}, - dictWord{138, 0, 492}, - dictWord{7, 11, 1756}, - dictWord{137, 11, 98}, - dictWord{5, 10, 873}, - dictWord{5, 10, 960}, - dictWord{8, 10, 823}, - dictWord{137, 10, 881}, - dictWord{133, 0, 537}, - dictWord{132, 0, 859}, - dictWord{7, 11, 1046}, - dictWord{139, 11, 160}, - dictWord{137, 0, 842}, - dictWord{ - 139, - 10, - 283, - }, - dictWord{5, 10, 33}, - dictWord{6, 10, 470}, - dictWord{139, 10, 424}, - dictWord{6, 11, 45}, - dictWord{7, 11, 433}, - dictWord{8, 11, 129}, - dictWord{ - 9, - 11, - 21, - }, - dictWord{10, 11, 392}, - dictWord{11, 11, 79}, - dictWord{12, 11, 499}, - dictWord{13, 11, 199}, - dictWord{141, 11, 451}, - dictWord{135, 0, 1291}, - dictWord{135, 10, 1882}, - dictWord{7, 11, 558}, - dictWord{136, 11, 353}, - dictWord{134, 0, 1482}, - dictWord{5, 0, 230}, - dictWord{5, 0, 392}, - dictWord{6, 0, 420}, - dictWord{9, 0, 568}, - dictWord{140, 0, 612}, - dictWord{6, 0, 262}, - dictWord{7, 10, 90}, - dictWord{7, 10, 664}, - dictWord{7, 10, 830}, - dictWord{7, 10, 1380}, - dictWord{ - 7, - 10, - 2025, - }, - dictWord{8, 11, 81}, - dictWord{8, 10, 448}, - dictWord{8, 10, 828}, - dictWord{9, 11, 189}, - dictWord{9, 11, 201}, - dictWord{11, 11, 478}, - dictWord{ - 11, - 11, - 712, - }, - dictWord{141, 11, 338}, - dictWord{142, 0, 31}, - dictWord{5, 11, 353}, - dictWord{151, 11, 26}, - dictWord{132, 0, 753}, - dictWord{4, 0, 0}, - dictWord{ - 5, - 0, - 41, - }, - dictWord{7, 0, 1459}, - dictWord{7, 0, 1469}, - dictWord{7, 0, 1859}, - dictWord{9, 0, 549}, - dictWord{139, 0, 905}, - dictWord{9, 10, 417}, - dictWord{ - 137, - 10, - 493, - }, - dictWord{135, 11, 1113}, - dictWord{133, 0, 696}, - dictWord{141, 11, 448}, - dictWord{134, 10, 295}, - dictWord{132, 0, 834}, - dictWord{4, 0, 771}, - dictWord{5, 10, 1019}, - dictWord{6, 11, 25}, - dictWord{7, 11, 855}, - dictWord{7, 11, 1258}, - dictWord{144, 11, 32}, - dictWord{134, 0, 1076}, - dictWord{133, 0, 921}, - dictWord{133, 0, 674}, - dictWord{4, 11, 4}, - dictWord{7, 11, 1118}, - dictWord{7, 11, 1320}, - dictWord{7, 11, 1706}, - dictWord{8, 11, 277}, - dictWord{9, 11, 622}, - dictWord{10, 11, 9}, - dictWord{11, 11, 724}, - dictWord{12, 11, 350}, - dictWord{12, 11, 397}, - dictWord{13, 11, 28}, - dictWord{13, 11, 159}, - dictWord{15, 11, 89}, - dictWord{18, 11, 5}, - dictWord{19, 11, 9}, - dictWord{20, 11, 34}, - dictWord{150, 11, 47}, - dictWord{134, 10, 208}, - dictWord{6, 0, 444}, - dictWord{136, 0, 308}, - dictWord{ - 6, - 0, - 180, - }, - dictWord{7, 0, 1137}, - dictWord{8, 0, 751}, - dictWord{139, 0, 805}, - dictWord{4, 0, 183}, - dictWord{7, 0, 271}, - dictWord{11, 0, 824}, - dictWord{ - 11, - 0, - 952, - }, - dictWord{13, 0, 278}, - dictWord{13, 0, 339}, - dictWord{13, 0, 482}, - dictWord{14, 0, 424}, - dictWord{148, 0, 99}, - dictWord{7, 11, 317}, - dictWord{ - 135, - 11, - 569, - }, - dictWord{4, 0, 19}, - dictWord{5, 0, 477}, - dictWord{5, 0, 596}, - dictWord{6, 0, 505}, - dictWord{7, 0, 1221}, - dictWord{11, 0, 907}, - dictWord{12, 0, 209}, - dictWord{141, 0, 214}, - dictWord{135, 0, 1215}, - dictWord{6, 0, 271}, - dictWord{7, 0, 398}, - dictWord{8, 0, 387}, - dictWord{10, 0, 344}, - dictWord{7, 10, 448}, - dictWord{ - 7, - 10, - 1629, - }, - dictWord{7, 10, 1813}, - dictWord{8, 10, 442}, - dictWord{9, 10, 710}, - dictWord{10, 10, 282}, - dictWord{138, 10, 722}, - dictWord{11, 10, 844}, - dictWord{12, 10, 104}, - dictWord{140, 10, 625}, - dictWord{134, 11, 255}, - dictWord{133, 10, 787}, - dictWord{134, 0, 1645}, - dictWord{11, 11, 956}, - dictWord{ - 151, - 11, - 3, - }, - dictWord{6, 0, 92}, - dictWord{6, 0, 188}, - dictWord{7, 0, 209}, - dictWord{7, 0, 1269}, - dictWord{7, 0, 1524}, - dictWord{7, 0, 1876}, - dictWord{8, 0, 661}, - dictWord{10, 0, 42}, - dictWord{10, 0, 228}, - dictWord{11, 0, 58}, - dictWord{11, 0, 1020}, - dictWord{12, 0, 58}, - dictWord{12, 0, 118}, - dictWord{141, 0, 32}, - dictWord{ - 4, - 0, - 459, - }, - dictWord{133, 0, 966}, - dictWord{4, 11, 536}, - dictWord{7, 11, 1141}, - dictWord{10, 11, 723}, - dictWord{139, 11, 371}, - dictWord{140, 0, 330}, - dictWord{134, 0, 1557}, - dictWord{7, 11, 285}, - dictWord{135, 11, 876}, - dictWord{136, 10, 491}, - dictWord{135, 11, 560}, - dictWord{6, 0, 18}, - dictWord{7, 0, 179}, - dictWord{7, 0, 932}, - dictWord{8, 0, 548}, - dictWord{8, 0, 757}, - dictWord{9, 0, 54}, - dictWord{9, 0, 65}, - dictWord{9, 0, 532}, - dictWord{9, 0, 844}, - dictWord{10, 0, 113}, - dictWord{10, 0, 117}, - dictWord{10, 0, 315}, - dictWord{10, 0, 560}, - dictWord{10, 0, 622}, - dictWord{10, 0, 798}, - dictWord{11, 0, 153}, - dictWord{11, 0, 351}, - dictWord{ - 11, - 0, - 375, - }, - dictWord{12, 0, 78}, - dictWord{12, 0, 151}, - dictWord{12, 0, 392}, - dictWord{12, 0, 666}, - dictWord{14, 0, 248}, - dictWord{143, 0, 23}, - dictWord{ - 6, - 0, - 1742, - }, - dictWord{132, 11, 690}, - dictWord{4, 10, 403}, - dictWord{5, 10, 441}, - dictWord{7, 10, 450}, - dictWord{10, 10, 840}, - dictWord{11, 10, 101}, - dictWord{ - 12, - 10, - 193, - }, - dictWord{141, 10, 430}, - dictWord{133, 0, 965}, - dictWord{134, 0, 182}, - dictWord{10, 0, 65}, - dictWord{10, 0, 488}, - dictWord{138, 0, 497}, - dictWord{135, 11, 1346}, - dictWord{6, 0, 973}, - dictWord{6, 0, 1158}, - dictWord{10, 11, 200}, - dictWord{19, 11, 2}, - dictWord{151, 11, 22}, - dictWord{4, 11, 190}, - dictWord{133, 11, 554}, - dictWord{133, 10, 679}, - dictWord{7, 0, 328}, - dictWord{137, 10, 326}, - dictWord{133, 11, 1001}, - dictWord{9, 0, 588}, - dictWord{ - 138, - 0, - 260, - }, - dictWord{133, 11, 446}, - dictWord{135, 10, 1128}, - dictWord{135, 10, 1796}, - dictWord{147, 11, 119}, - dictWord{134, 0, 1786}, - dictWord{ - 6, - 0, - 1328, - }, - dictWord{6, 0, 1985}, - dictWord{8, 0, 962}, - dictWord{138, 0, 1017}, - dictWord{135, 0, 308}, - dictWord{11, 0, 508}, - dictWord{4, 10, 574}, - dictWord{ - 7, - 10, - 350, - }, - dictWord{7, 10, 1024}, - dictWord{8, 10, 338}, - dictWord{9, 10, 677}, - dictWord{138, 10, 808}, - dictWord{138, 11, 752}, - dictWord{135, 10, 1081}, - dictWord{137, 11, 96}, - dictWord{7, 10, 1676}, - dictWord{135, 10, 2037}, - dictWord{136, 0, 588}, - dictWord{132, 11, 304}, - dictWord{133, 0, 614}, - dictWord{ - 140, - 0, - 793, - }, - dictWord{136, 0, 287}, - dictWord{137, 10, 297}, - dictWord{141, 10, 37}, - dictWord{6, 11, 53}, - dictWord{6, 11, 199}, - dictWord{7, 11, 1408}, - dictWord{ - 8, - 11, - 32, - }, - dictWord{8, 11, 93}, - dictWord{9, 11, 437}, - dictWord{10, 11, 397}, - dictWord{10, 11, 629}, - dictWord{11, 11, 593}, - dictWord{11, 11, 763}, - dictWord{ - 13, - 11, - 326, - }, - dictWord{145, 11, 35}, - dictWord{134, 11, 105}, - dictWord{9, 11, 320}, - dictWord{10, 11, 506}, - dictWord{138, 11, 794}, - dictWord{5, 11, 114}, - dictWord{5, 11, 255}, - dictWord{141, 11, 285}, - dictWord{140, 0, 290}, - dictWord{7, 11, 2035}, - dictWord{8, 11, 19}, - dictWord{9, 11, 89}, - dictWord{138, 11, 831}, - dictWord{134, 0, 1136}, - dictWord{7, 0, 719}, - dictWord{8, 0, 796}, - dictWord{8, 0, 809}, - dictWord{8, 0, 834}, - dictWord{6, 10, 306}, - dictWord{7, 10, 1140}, - dictWord{ - 7, - 10, - 1340, - }, - dictWord{8, 10, 133}, - dictWord{138, 10, 449}, - dictWord{139, 10, 1011}, - dictWord{5, 0, 210}, - dictWord{6, 0, 213}, - dictWord{7, 0, 60}, - dictWord{ - 10, - 0, - 364, - }, - dictWord{139, 0, 135}, - dictWord{5, 0, 607}, - dictWord{8, 0, 326}, - dictWord{136, 0, 490}, - dictWord{138, 11, 176}, - dictWord{132, 0, 701}, - dictWord{ - 5, - 0, - 472, - }, - dictWord{7, 0, 380}, - dictWord{137, 0, 758}, - dictWord{135, 0, 1947}, - dictWord{6, 0, 1079}, - dictWord{138, 0, 278}, - dictWord{138, 11, 391}, - dictWord{ - 5, - 10, - 329, - }, - dictWord{8, 10, 260}, - dictWord{139, 11, 156}, - dictWord{4, 0, 386}, - dictWord{7, 0, 41}, - dictWord{8, 0, 405}, - dictWord{8, 0, 728}, - dictWord{9, 0, 497}, - dictWord{11, 0, 110}, - dictWord{11, 0, 360}, - dictWord{15, 0, 37}, - dictWord{144, 0, 84}, - dictWord{5, 0, 46}, - dictWord{7, 0, 1452}, - dictWord{7, 0, 1480}, - dictWord{ - 8, - 0, - 634, - }, - dictWord{140, 0, 472}, - dictWord{136, 0, 961}, - dictWord{4, 0, 524}, - dictWord{136, 0, 810}, - dictWord{10, 0, 238}, - dictWord{141, 0, 33}, - dictWord{ - 132, - 10, - 657, - }, - dictWord{152, 10, 7}, - dictWord{133, 0, 532}, - dictWord{5, 0, 997}, - dictWord{135, 10, 1665}, - dictWord{7, 11, 594}, - dictWord{7, 11, 851}, - dictWord{ - 7, - 11, - 1858, - }, - dictWord{9, 11, 411}, - dictWord{9, 11, 574}, - dictWord{9, 11, 666}, - dictWord{9, 11, 737}, - dictWord{10, 11, 346}, - dictWord{10, 11, 712}, - dictWord{ - 11, - 11, - 246, - }, - dictWord{11, 11, 432}, - dictWord{11, 11, 517}, - dictWord{11, 11, 647}, - dictWord{11, 11, 679}, - dictWord{11, 11, 727}, - dictWord{12, 11, 304}, - dictWord{12, 11, 305}, - dictWord{12, 11, 323}, - dictWord{12, 11, 483}, - dictWord{12, 11, 572}, - dictWord{12, 11, 593}, - dictWord{12, 11, 602}, - dictWord{ - 13, - 11, - 95, - }, - dictWord{13, 11, 101}, - dictWord{13, 11, 171}, - dictWord{13, 11, 315}, - dictWord{13, 11, 378}, - dictWord{13, 11, 425}, - dictWord{13, 11, 475}, - dictWord{ - 14, - 11, - 63, - }, - dictWord{14, 11, 380}, - dictWord{14, 11, 384}, - dictWord{15, 11, 133}, - dictWord{18, 11, 112}, - dictWord{148, 11, 72}, - dictWord{5, 11, 955}, - dictWord{136, 11, 814}, - dictWord{134, 0, 1301}, - dictWord{5, 10, 66}, - dictWord{7, 10, 1896}, - dictWord{136, 10, 288}, - dictWord{133, 11, 56}, - dictWord{ - 134, - 10, - 1643, - }, - dictWord{6, 0, 1298}, - dictWord{148, 11, 100}, - dictWord{5, 0, 782}, - dictWord{5, 0, 829}, - dictWord{6, 0, 671}, - dictWord{6, 0, 1156}, - dictWord{6, 0, 1738}, - dictWord{137, 11, 621}, - dictWord{4, 0, 306}, - dictWord{5, 0, 570}, - dictWord{7, 0, 1347}, - dictWord{5, 10, 91}, - dictWord{5, 10, 648}, - dictWord{5, 10, 750}, - dictWord{ - 5, - 10, - 781, - }, - dictWord{6, 10, 54}, - dictWord{6, 10, 112}, - dictWord{6, 10, 402}, - dictWord{6, 10, 1732}, - dictWord{7, 10, 315}, - dictWord{7, 10, 749}, - dictWord{ - 7, - 10, - 1900, - }, - dictWord{9, 10, 78}, - dictWord{9, 10, 508}, - dictWord{10, 10, 611}, - dictWord{10, 10, 811}, - dictWord{11, 10, 510}, - dictWord{11, 10, 728}, - dictWord{ - 13, - 10, - 36, - }, - dictWord{14, 10, 39}, - dictWord{16, 10, 83}, - dictWord{17, 10, 124}, - dictWord{148, 10, 30}, - dictWord{8, 10, 570}, - dictWord{9, 11, 477}, - dictWord{ - 141, - 11, - 78, - }, - dictWord{4, 11, 639}, - dictWord{10, 11, 4}, - dictWord{10, 10, 322}, - dictWord{10, 10, 719}, - dictWord{11, 10, 407}, - dictWord{11, 11, 638}, - dictWord{ - 12, - 11, - 177, - }, - dictWord{148, 11, 57}, - dictWord{7, 0, 1823}, - dictWord{139, 0, 693}, - dictWord{7, 0, 759}, - dictWord{5, 11, 758}, - dictWord{8, 10, 125}, - dictWord{ - 8, - 10, - 369, - }, - dictWord{8, 10, 524}, - dictWord{10, 10, 486}, - dictWord{11, 10, 13}, - dictWord{11, 10, 381}, - dictWord{11, 10, 736}, - dictWord{11, 10, 766}, - dictWord{ - 11, - 10, - 845, - }, - dictWord{13, 10, 114}, - dictWord{13, 10, 292}, - dictWord{142, 10, 47}, - dictWord{7, 0, 1932}, - dictWord{6, 10, 1684}, - dictWord{6, 10, 1731}, - dictWord{7, 10, 356}, - dictWord{8, 10, 54}, - dictWord{8, 10, 221}, - dictWord{9, 10, 225}, - dictWord{9, 10, 356}, - dictWord{10, 10, 77}, - dictWord{10, 10, 446}, - dictWord{ - 10, - 10, - 731, - }, - dictWord{12, 10, 404}, - dictWord{141, 10, 491}, - dictWord{135, 11, 552}, - dictWord{135, 11, 1112}, - dictWord{4, 0, 78}, - dictWord{5, 0, 96}, - dictWord{ - 5, - 0, - 182, - }, - dictWord{6, 0, 1257}, - dictWord{7, 0, 1724}, - dictWord{7, 0, 1825}, - dictWord{10, 0, 394}, - dictWord{10, 0, 471}, - dictWord{11, 0, 532}, - dictWord{ - 14, - 0, - 340, - }, - dictWord{145, 0, 88}, - dictWord{139, 11, 328}, - dictWord{135, 0, 1964}, - dictWord{132, 10, 411}, - dictWord{4, 10, 80}, - dictWord{5, 10, 44}, - dictWord{ - 137, - 11, - 133, - }, - dictWord{5, 11, 110}, - dictWord{6, 11, 169}, - dictWord{6, 11, 1702}, - dictWord{7, 11, 400}, - dictWord{8, 11, 538}, - dictWord{9, 11, 184}, - dictWord{ - 9, - 11, - 524, - }, - dictWord{140, 11, 218}, - dictWord{4, 0, 521}, - dictWord{5, 10, 299}, - dictWord{7, 10, 1083}, - dictWord{140, 11, 554}, - dictWord{6, 11, 133}, - dictWord{ - 9, - 11, - 353, - }, - dictWord{12, 11, 628}, - dictWord{146, 11, 79}, - dictWord{6, 0, 215}, - dictWord{7, 0, 584}, - dictWord{7, 0, 1028}, - dictWord{7, 0, 1473}, - dictWord{ - 7, - 0, - 1721, - }, - dictWord{9, 0, 424}, - dictWord{138, 0, 779}, - dictWord{7, 0, 857}, - dictWord{7, 0, 1209}, - dictWord{7, 10, 1713}, - dictWord{9, 10, 537}, - dictWord{ - 10, - 10, - 165, - }, - dictWord{12, 10, 219}, - dictWord{140, 10, 561}, - dictWord{4, 10, 219}, - dictWord{6, 11, 93}, - dictWord{7, 11, 1422}, - dictWord{7, 10, 1761}, - dictWord{ - 7, - 11, - 1851, - }, - dictWord{8, 11, 673}, - dictWord{9, 10, 86}, - dictWord{9, 11, 529}, - dictWord{140, 11, 43}, - dictWord{137, 11, 371}, - dictWord{136, 0, 671}, - dictWord{ - 5, - 0, - 328, - }, - dictWord{135, 0, 918}, - dictWord{132, 0, 529}, - dictWord{9, 11, 25}, - dictWord{10, 11, 467}, - dictWord{138, 11, 559}, - dictWord{4, 11, 335}, - dictWord{ - 135, - 11, - 942, - }, - dictWord{134, 0, 716}, - dictWord{134, 0, 1509}, - dictWord{6, 0, 67}, - dictWord{7, 0, 258}, - dictWord{7, 0, 1630}, - dictWord{9, 0, 354}, - dictWord{ - 9, - 0, - 675, - }, - dictWord{10, 0, 830}, - dictWord{14, 0, 80}, - dictWord{17, 0, 80}, - dictWord{140, 10, 428}, - dictWord{134, 0, 1112}, - dictWord{6, 0, 141}, - dictWord{7, 0, 225}, - dictWord{9, 0, 59}, - dictWord{9, 0, 607}, - dictWord{10, 0, 312}, - dictWord{11, 0, 687}, - dictWord{12, 0, 555}, - dictWord{13, 0, 373}, - dictWord{13, 0, 494}, - dictWord{ - 148, - 0, - 58, - }, - dictWord{133, 10, 514}, - dictWord{8, 11, 39}, - dictWord{10, 11, 773}, - dictWord{11, 11, 84}, - dictWord{12, 11, 205}, - dictWord{142, 11, 1}, - dictWord{ - 8, - 0, - 783, - }, - dictWord{5, 11, 601}, - dictWord{133, 11, 870}, - dictWord{136, 11, 594}, - dictWord{4, 10, 55}, - dictWord{5, 10, 301}, - dictWord{6, 10, 571}, - dictWord{ - 14, - 10, - 49, - }, - dictWord{146, 10, 102}, - dictWord{132, 11, 181}, - dictWord{134, 11, 1652}, - dictWord{133, 10, 364}, - dictWord{4, 11, 97}, - dictWord{5, 11, 147}, - dictWord{6, 11, 286}, - dictWord{7, 11, 1362}, - dictWord{141, 11, 176}, - dictWord{4, 10, 76}, - dictWord{7, 10, 1550}, - dictWord{9, 10, 306}, - dictWord{9, 10, 430}, - dictWord{9, 10, 663}, - dictWord{10, 10, 683}, - dictWord{11, 10, 427}, - dictWord{11, 10, 753}, - dictWord{12, 10, 334}, - dictWord{12, 10, 442}, - dictWord{ - 14, - 10, - 258, - }, - dictWord{14, 10, 366}, - dictWord{143, 10, 131}, - dictWord{137, 10, 52}, - dictWord{6, 0, 955}, - dictWord{134, 0, 1498}, - dictWord{6, 11, 375}, - dictWord{ - 7, - 11, - 169, - }, - dictWord{7, 11, 254}, - dictWord{136, 11, 780}, - dictWord{7, 0, 430}, - dictWord{11, 0, 46}, - dictWord{14, 0, 343}, - dictWord{142, 11, 343}, - dictWord{ - 135, - 0, - 1183, - }, - dictWord{5, 0, 602}, - dictWord{7, 0, 2018}, - dictWord{9, 0, 418}, - dictWord{9, 0, 803}, - dictWord{135, 11, 1447}, - dictWord{8, 0, 677}, - dictWord{ - 135, - 11, - 1044, - }, - dictWord{139, 11, 285}, - dictWord{4, 10, 656}, - dictWord{135, 10, 779}, - dictWord{135, 10, 144}, - dictWord{5, 11, 629}, - dictWord{ - 135, - 11, - 1549, - }, - dictWord{135, 10, 1373}, - dictWord{138, 11, 209}, - dictWord{7, 10, 554}, - dictWord{7, 10, 605}, - dictWord{141, 10, 10}, - dictWord{5, 10, 838}, - dictWord{ - 5, - 10, - 841, - }, - dictWord{134, 10, 1649}, - dictWord{133, 10, 1012}, - dictWord{6, 0, 1357}, - dictWord{134, 0, 1380}, - dictWord{144, 0, 53}, - dictWord{6, 0, 590}, - dictWord{7, 10, 365}, - dictWord{7, 10, 1357}, - dictWord{7, 10, 1497}, - dictWord{8, 10, 154}, - dictWord{141, 10, 281}, - dictWord{133, 10, 340}, - dictWord{ - 132, - 11, - 420, - }, - dictWord{135, 0, 329}, - dictWord{147, 11, 32}, - dictWord{4, 0, 469}, - dictWord{10, 11, 429}, - dictWord{139, 10, 495}, - dictWord{8, 10, 261}, - dictWord{ - 9, - 10, - 144, - }, - dictWord{9, 10, 466}, - dictWord{10, 10, 370}, - dictWord{12, 10, 470}, - dictWord{13, 10, 144}, - dictWord{142, 10, 348}, - dictWord{142, 0, 460}, - dictWord{4, 11, 325}, - dictWord{9, 10, 897}, - dictWord{138, 11, 125}, - dictWord{6, 0, 1743}, - dictWord{6, 10, 248}, - dictWord{9, 10, 546}, - dictWord{10, 10, 535}, - dictWord{11, 10, 681}, - dictWord{141, 10, 135}, - dictWord{4, 0, 990}, - dictWord{5, 0, 929}, - dictWord{6, 0, 340}, - dictWord{8, 0, 376}, - dictWord{8, 0, 807}, - dictWord{ - 8, - 0, - 963, - }, - dictWord{8, 0, 980}, - dictWord{138, 0, 1007}, - dictWord{134, 0, 1603}, - dictWord{140, 0, 250}, - dictWord{4, 11, 714}, - dictWord{133, 11, 469}, - dictWord{134, 10, 567}, - dictWord{136, 10, 445}, - dictWord{5, 0, 218}, - dictWord{7, 0, 1610}, - dictWord{8, 0, 646}, - dictWord{10, 0, 83}, - dictWord{11, 11, 138}, - dictWord{140, 11, 40}, - dictWord{7, 0, 1512}, - dictWord{135, 0, 1794}, - dictWord{135, 11, 1216}, - dictWord{11, 0, 0}, - dictWord{16, 0, 78}, - dictWord{132, 11, 718}, - dictWord{133, 0, 571}, - dictWord{132, 0, 455}, - dictWord{134, 0, 1012}, - dictWord{5, 11, 124}, - dictWord{5, 11, 144}, - dictWord{6, 11, 548}, - dictWord{7, 11, 15}, - dictWord{7, 11, 153}, - dictWord{137, 11, 629}, - dictWord{142, 11, 10}, - dictWord{6, 11, 75}, - dictWord{7, 11, 1531}, - dictWord{8, 11, 416}, - dictWord{9, 11, 240}, - dictWord{9, 11, 275}, - dictWord{10, 11, 100}, - dictWord{11, 11, 658}, - dictWord{11, 11, 979}, - dictWord{12, 11, 86}, - dictWord{13, 11, 468}, - dictWord{14, 11, 66}, - dictWord{14, 11, 207}, - dictWord{15, 11, 20}, - dictWord{15, 11, 25}, - dictWord{144, 11, 58}, - dictWord{132, 10, 577}, - dictWord{5, 11, 141}, - dictWord{ - 5, - 11, - 915, - }, - dictWord{6, 11, 1783}, - dictWord{7, 11, 211}, - dictWord{7, 11, 698}, - dictWord{7, 11, 1353}, - dictWord{9, 11, 83}, - dictWord{9, 11, 281}, - dictWord{ - 10, - 11, - 376, - }, - dictWord{10, 11, 431}, - dictWord{11, 11, 543}, - dictWord{12, 11, 664}, - dictWord{13, 11, 280}, - dictWord{13, 11, 428}, - dictWord{14, 11, 61}, - dictWord{ - 14, - 11, - 128, - }, - dictWord{17, 11, 52}, - dictWord{145, 11, 81}, - dictWord{6, 0, 161}, - dictWord{7, 0, 372}, - dictWord{137, 0, 597}, - dictWord{132, 0, 349}, - dictWord{ - 10, - 11, - 702, - }, - dictWord{139, 11, 245}, - dictWord{134, 0, 524}, - dictWord{134, 10, 174}, - dictWord{6, 0, 432}, - dictWord{9, 0, 751}, - dictWord{139, 0, 322}, - dictWord{147, 11, 94}, - dictWord{4, 11, 338}, - dictWord{133, 11, 400}, - dictWord{5, 0, 468}, - dictWord{10, 0, 325}, - dictWord{11, 0, 856}, - dictWord{12, 0, 345}, - dictWord{143, 0, 104}, - dictWord{133, 0, 223}, - dictWord{132, 0, 566}, - dictWord{4, 11, 221}, - dictWord{5, 11, 659}, - dictWord{5, 11, 989}, - dictWord{7, 11, 697}, - dictWord{7, 11, 1211}, - dictWord{138, 11, 284}, - dictWord{135, 11, 1070}, - dictWord{4, 0, 59}, - dictWord{135, 0, 1394}, - dictWord{6, 0, 436}, - dictWord{11, 0, 481}, - dictWord{5, 10, 878}, - dictWord{133, 10, 972}, - dictWord{4, 0, 48}, - dictWord{5, 0, 271}, - dictWord{135, 0, 953}, - dictWord{5, 0, 610}, - dictWord{136, 0, 457}, - dictWord{ - 4, - 0, - 773, - }, - dictWord{5, 0, 618}, - dictWord{137, 0, 756}, - dictWord{133, 0, 755}, - dictWord{135, 0, 1217}, - dictWord{138, 11, 507}, - dictWord{132, 10, 351}, - dictWord{132, 0, 197}, - dictWord{143, 11, 78}, - dictWord{4, 11, 188}, - dictWord{7, 11, 805}, - dictWord{11, 11, 276}, - dictWord{142, 11, 293}, - dictWord{ - 5, - 11, - 884, - }, - dictWord{139, 11, 991}, - dictWord{132, 10, 286}, - dictWord{10, 0, 259}, - dictWord{10, 0, 428}, - dictWord{7, 10, 438}, - dictWord{7, 10, 627}, - dictWord{ - 7, - 10, - 1516, - }, - dictWord{8, 10, 40}, - dictWord{9, 10, 56}, - dictWord{9, 10, 294}, - dictWord{11, 10, 969}, - dictWord{11, 10, 995}, - dictWord{146, 10, 148}, - dictWord{ - 4, - 0, - 356, - }, - dictWord{5, 0, 217}, - dictWord{5, 0, 492}, - dictWord{5, 0, 656}, - dictWord{8, 0, 544}, - dictWord{136, 11, 544}, - dictWord{5, 0, 259}, - dictWord{6, 0, 1230}, - dictWord{7, 0, 414}, - dictWord{7, 0, 854}, - dictWord{142, 0, 107}, - dictWord{132, 0, 1007}, - dictWord{15, 0, 14}, - dictWord{144, 0, 5}, - dictWord{6, 0, 1580}, - dictWord{ - 132, - 10, - 738, - }, - dictWord{132, 11, 596}, - dictWord{132, 0, 673}, - dictWord{133, 10, 866}, - dictWord{6, 0, 1843}, - dictWord{135, 11, 1847}, - dictWord{4, 0, 165}, - dictWord{7, 0, 1398}, - dictWord{135, 0, 1829}, - dictWord{135, 11, 1634}, - dictWord{147, 11, 65}, - dictWord{6, 0, 885}, - dictWord{6, 0, 1009}, - dictWord{ - 137, - 0, - 809, - }, - dictWord{133, 10, 116}, - dictWord{132, 10, 457}, - dictWord{136, 11, 770}, - dictWord{9, 0, 498}, - dictWord{12, 0, 181}, - dictWord{10, 11, 361}, - dictWord{142, 11, 316}, - dictWord{134, 11, 595}, - dictWord{5, 0, 9}, - dictWord{7, 0, 297}, - dictWord{7, 0, 966}, - dictWord{140, 0, 306}, - dictWord{4, 11, 89}, - dictWord{ - 5, - 11, - 489, - }, - dictWord{6, 11, 315}, - dictWord{7, 11, 553}, - dictWord{7, 11, 1745}, - dictWord{138, 11, 243}, - dictWord{134, 0, 1487}, - dictWord{132, 0, 437}, - dictWord{ - 5, - 0, - 146, - }, - dictWord{6, 0, 411}, - dictWord{138, 0, 721}, - dictWord{5, 10, 527}, - dictWord{6, 10, 189}, - dictWord{135, 10, 859}, - dictWord{11, 10, 104}, - dictWord{ - 11, - 10, - 554, - }, - dictWord{15, 10, 60}, - dictWord{143, 10, 125}, - dictWord{6, 11, 1658}, - dictWord{9, 11, 3}, - dictWord{10, 11, 154}, - dictWord{11, 11, 641}, - dictWord{13, 11, 85}, - dictWord{13, 11, 201}, - dictWord{141, 11, 346}, - dictWord{6, 0, 177}, - dictWord{135, 0, 467}, - dictWord{134, 0, 1377}, - dictWord{ - 134, - 10, - 116, - }, - dictWord{136, 11, 645}, - dictWord{4, 11, 166}, - dictWord{5, 11, 505}, - dictWord{6, 11, 1670}, - dictWord{137, 11, 110}, - dictWord{133, 10, 487}, - dictWord{ - 4, - 10, - 86, - }, - dictWord{5, 10, 667}, - dictWord{5, 10, 753}, - dictWord{6, 10, 316}, - dictWord{6, 10, 455}, - dictWord{135, 10, 946}, - dictWord{133, 0, 200}, - dictWord{132, 0, 959}, - dictWord{6, 0, 1928}, - dictWord{134, 0, 1957}, - dictWord{139, 11, 203}, - dictWord{150, 10, 45}, - dictWord{4, 10, 79}, - dictWord{7, 10, 1773}, - dictWord{10, 10, 450}, - dictWord{11, 10, 589}, - dictWord{13, 10, 332}, - dictWord{13, 10, 493}, - dictWord{14, 10, 183}, - dictWord{14, 10, 334}, - dictWord{ - 14, - 10, - 362, - }, - dictWord{14, 10, 368}, - dictWord{14, 10, 376}, - dictWord{14, 10, 379}, - dictWord{19, 10, 90}, - dictWord{19, 10, 103}, - dictWord{19, 10, 127}, - dictWord{148, 10, 90}, - dictWord{6, 0, 1435}, - dictWord{135, 11, 1275}, - dictWord{134, 0, 481}, - dictWord{7, 11, 445}, - dictWord{8, 11, 307}, - dictWord{8, 11, 704}, - dictWord{10, 11, 41}, - dictWord{10, 11, 439}, - dictWord{11, 11, 237}, - dictWord{11, 11, 622}, - dictWord{140, 11, 201}, - dictWord{135, 11, 869}, - dictWord{ - 4, - 0, - 84, - }, - dictWord{7, 0, 1482}, - dictWord{10, 0, 76}, - dictWord{138, 0, 142}, - dictWord{11, 11, 277}, - dictWord{144, 11, 14}, - dictWord{135, 11, 1977}, - dictWord{ - 4, - 11, - 189, - }, - dictWord{5, 11, 713}, - dictWord{136, 11, 57}, - dictWord{133, 0, 1015}, - dictWord{138, 11, 371}, - dictWord{4, 0, 315}, - dictWord{5, 0, 507}, - dictWord{ - 135, - 0, - 1370, - }, - dictWord{4, 11, 552}, - dictWord{142, 10, 381}, - dictWord{9, 0, 759}, - dictWord{16, 0, 31}, - dictWord{16, 0, 39}, - dictWord{16, 0, 75}, - dictWord{18, 0, 24}, - dictWord{20, 0, 42}, - dictWord{152, 0, 1}, - dictWord{134, 0, 712}, - dictWord{134, 0, 1722}, - dictWord{133, 10, 663}, - dictWord{133, 10, 846}, - dictWord{ - 8, - 0, - 222, - }, - dictWord{8, 0, 476}, - dictWord{9, 0, 238}, - dictWord{11, 0, 516}, - dictWord{11, 0, 575}, - dictWord{15, 0, 109}, - dictWord{146, 0, 100}, - dictWord{7, 0, 1402}, - dictWord{7, 0, 1414}, - dictWord{12, 0, 456}, - dictWord{5, 10, 378}, - dictWord{8, 10, 465}, - dictWord{9, 10, 286}, - dictWord{10, 10, 185}, - dictWord{10, 10, 562}, - dictWord{10, 10, 635}, - dictWord{11, 10, 31}, - dictWord{11, 10, 393}, - dictWord{13, 10, 312}, - dictWord{18, 10, 65}, - dictWord{18, 10, 96}, - dictWord{147, 10, 89}, - dictWord{4, 0, 986}, - dictWord{6, 0, 1958}, - dictWord{6, 0, 2032}, - dictWord{8, 0, 934}, - dictWord{138, 0, 985}, - dictWord{7, 10, 1880}, - dictWord{9, 10, 680}, - dictWord{139, 10, 798}, - dictWord{134, 10, 1770}, - dictWord{145, 11, 49}, - dictWord{132, 11, 614}, - dictWord{132, 10, 648}, - dictWord{5, 10, 945}, - dictWord{ - 6, - 10, - 1656, - }, - dictWord{6, 10, 1787}, - dictWord{7, 10, 167}, - dictWord{8, 10, 824}, - dictWord{9, 10, 391}, - dictWord{10, 10, 375}, - dictWord{139, 10, 185}, - dictWord{138, 11, 661}, - dictWord{7, 0, 1273}, - dictWord{135, 11, 1945}, - dictWord{7, 0, 706}, - dictWord{7, 0, 1058}, - dictWord{138, 0, 538}, - dictWord{7, 10, 1645}, - dictWord{8, 10, 352}, - dictWord{137, 10, 249}, - dictWord{132, 10, 152}, - dictWord{11, 0, 92}, - dictWord{11, 0, 196}, - dictWord{11, 0, 409}, - dictWord{11, 0, 450}, - dictWord{11, 0, 666}, - dictWord{11, 0, 777}, - dictWord{12, 0, 262}, - dictWord{13, 0, 385}, - dictWord{13, 0, 393}, - dictWord{15, 0, 115}, - dictWord{16, 0, 45}, - dictWord{145, 0, 82}, - dictWord{133, 10, 1006}, - dictWord{6, 0, 40}, - dictWord{135, 0, 1781}, - dictWord{9, 11, 614}, - dictWord{139, 11, 327}, - dictWord{5, 10, 420}, - dictWord{135, 10, 1449}, - dictWord{135, 0, 431}, - dictWord{10, 0, 97}, - dictWord{135, 10, 832}, - dictWord{6, 0, 423}, - dictWord{7, 0, 665}, - dictWord{ - 135, - 0, - 1210, - }, - dictWord{7, 0, 237}, - dictWord{8, 0, 664}, - dictWord{9, 0, 42}, - dictWord{9, 0, 266}, - dictWord{9, 0, 380}, - dictWord{9, 0, 645}, - dictWord{10, 0, 177}, - dictWord{ - 138, - 0, - 276, - }, - dictWord{7, 0, 264}, - dictWord{133, 10, 351}, - dictWord{8, 0, 213}, - dictWord{5, 10, 40}, - dictWord{7, 10, 598}, - dictWord{7, 10, 1638}, - dictWord{ - 9, - 10, - 166, - }, - dictWord{9, 10, 640}, - dictWord{9, 10, 685}, - dictWord{9, 10, 773}, - dictWord{11, 10, 215}, - dictWord{13, 10, 65}, - dictWord{14, 10, 172}, - dictWord{ - 14, - 10, - 317, - }, - dictWord{145, 10, 6}, - dictWord{5, 11, 84}, - dictWord{134, 11, 163}, - dictWord{8, 10, 60}, - dictWord{9, 10, 343}, - dictWord{139, 10, 769}, - dictWord{ - 137, - 0, - 455, - }, - dictWord{133, 11, 410}, - dictWord{8, 0, 906}, - dictWord{12, 0, 700}, - dictWord{12, 0, 706}, - dictWord{140, 0, 729}, - dictWord{21, 11, 33}, - dictWord{ - 150, - 11, - 40, - }, - dictWord{7, 10, 1951}, - dictWord{8, 10, 765}, - dictWord{8, 10, 772}, - dictWord{140, 10, 671}, - dictWord{7, 10, 108}, - dictWord{8, 10, 219}, - dictWord{ - 8, - 10, - 388, - }, - dictWord{9, 10, 639}, - dictWord{9, 10, 775}, - dictWord{11, 10, 275}, - dictWord{140, 10, 464}, - dictWord{5, 11, 322}, - dictWord{7, 11, 1941}, - dictWord{ - 8, - 11, - 186, - }, - dictWord{9, 11, 262}, - dictWord{10, 11, 187}, - dictWord{14, 11, 208}, - dictWord{146, 11, 130}, - dictWord{139, 0, 624}, - dictWord{8, 0, 574}, - dictWord{ - 5, - 11, - 227, - }, - dictWord{140, 11, 29}, - dictWord{7, 11, 1546}, - dictWord{11, 11, 299}, - dictWord{142, 11, 407}, - dictWord{5, 10, 15}, - dictWord{6, 10, 56}, - dictWord{ - 7, - 10, - 1758, - }, - dictWord{8, 10, 500}, - dictWord{9, 10, 730}, - dictWord{11, 10, 331}, - dictWord{13, 10, 150}, - dictWord{142, 10, 282}, - dictWord{7, 11, 1395}, - dictWord{8, 11, 486}, - dictWord{9, 11, 236}, - dictWord{9, 11, 878}, - dictWord{10, 11, 218}, - dictWord{11, 11, 95}, - dictWord{19, 11, 17}, - dictWord{147, 11, 31}, - dictWord{135, 11, 2043}, - dictWord{4, 0, 354}, - dictWord{146, 11, 4}, - dictWord{140, 11, 80}, - dictWord{135, 0, 1558}, - dictWord{134, 10, 1886}, - dictWord{ - 5, - 10, - 205, - }, - dictWord{6, 10, 438}, - dictWord{137, 10, 711}, - dictWord{133, 11, 522}, - dictWord{133, 10, 534}, - dictWord{7, 0, 235}, - dictWord{7, 0, 1475}, - dictWord{ - 15, - 0, - 68, - }, - dictWord{146, 0, 120}, - dictWord{137, 10, 691}, - dictWord{4, 0, 942}, - dictWord{6, 0, 1813}, - dictWord{8, 0, 917}, - dictWord{10, 0, 884}, - dictWord{ - 12, - 0, - 696, - }, - dictWord{12, 0, 717}, - dictWord{12, 0, 723}, - dictWord{12, 0, 738}, - dictWord{12, 0, 749}, - dictWord{12, 0, 780}, - dictWord{16, 0, 97}, - dictWord{146, 0, 169}, - dictWord{6, 10, 443}, - dictWord{8, 11, 562}, - dictWord{9, 10, 237}, - dictWord{9, 10, 571}, - dictWord{9, 10, 695}, - dictWord{10, 10, 139}, - dictWord{11, 10, 715}, - dictWord{12, 10, 417}, - dictWord{141, 10, 421}, - dictWord{135, 0, 957}, - dictWord{133, 0, 830}, - dictWord{134, 11, 1771}, - dictWord{146, 0, 23}, - dictWord{ - 5, - 0, - 496, - }, - dictWord{6, 0, 694}, - dictWord{7, 0, 203}, - dictWord{7, 11, 1190}, - dictWord{137, 11, 620}, - dictWord{137, 11, 132}, - dictWord{6, 0, 547}, - dictWord{ - 134, - 0, - 1549, - }, - dictWord{8, 11, 258}, - dictWord{9, 11, 208}, - dictWord{137, 11, 359}, - dictWord{4, 0, 864}, - dictWord{5, 0, 88}, - dictWord{137, 0, 239}, - dictWord{ - 135, - 11, - 493, - }, - dictWord{4, 11, 317}, - dictWord{135, 11, 1279}, - dictWord{132, 11, 477}, - dictWord{4, 10, 578}, - dictWord{5, 11, 63}, - dictWord{133, 11, 509}, - dictWord{ - 7, - 0, - 650, - }, - dictWord{135, 0, 1310}, - dictWord{7, 0, 1076}, - dictWord{9, 0, 80}, - dictWord{11, 0, 78}, - dictWord{11, 0, 421}, - dictWord{11, 0, 534}, - dictWord{ - 140, - 0, - 545, - }, - dictWord{132, 11, 288}, - dictWord{12, 0, 553}, - dictWord{14, 0, 118}, - dictWord{133, 10, 923}, - dictWord{7, 0, 274}, - dictWord{11, 0, 479}, - dictWord{ - 139, - 0, - 507, - }, - dictWord{8, 11, 89}, - dictWord{8, 11, 620}, - dictWord{9, 11, 49}, - dictWord{10, 11, 774}, - dictWord{11, 11, 628}, - dictWord{12, 11, 322}, - dictWord{ - 143, - 11, - 124, - }, - dictWord{4, 0, 497}, - dictWord{135, 0, 1584}, - dictWord{7, 0, 261}, - dictWord{7, 0, 1115}, - dictWord{7, 0, 1354}, - dictWord{7, 0, 1404}, - dictWord{ - 7, - 0, - 1588, - }, - dictWord{7, 0, 1705}, - dictWord{7, 0, 1902}, - dictWord{9, 0, 465}, - dictWord{10, 0, 248}, - dictWord{10, 0, 349}, - dictWord{10, 0, 647}, - dictWord{11, 0, 527}, - dictWord{11, 0, 660}, - dictWord{11, 0, 669}, - dictWord{12, 0, 529}, - dictWord{13, 0, 305}, - dictWord{132, 10, 924}, - dictWord{133, 10, 665}, - dictWord{ - 136, - 0, - 13, - }, - dictWord{6, 0, 791}, - dictWord{138, 11, 120}, - dictWord{7, 0, 642}, - dictWord{8, 0, 250}, - dictWord{11, 0, 123}, - dictWord{11, 0, 137}, - dictWord{13, 0, 48}, - dictWord{142, 0, 95}, - dictWord{4, 10, 265}, - dictWord{7, 10, 807}, - dictWord{135, 10, 950}, - dictWord{5, 10, 93}, - dictWord{140, 10, 267}, - dictWord{135, 0, 1429}, - dictWord{4, 0, 949}, - dictWord{10, 0, 885}, - dictWord{10, 0, 891}, - dictWord{10, 0, 900}, - dictWord{10, 0, 939}, - dictWord{12, 0, 760}, - dictWord{142, 0, 449}, - dictWord{139, 11, 366}, - dictWord{132, 0, 818}, - dictWord{134, 11, 85}, - dictWord{135, 10, 994}, - dictWord{7, 0, 330}, - dictWord{5, 10, 233}, - dictWord{5, 10, 320}, - dictWord{6, 10, 140}, - dictWord{136, 10, 295}, - dictWord{4, 0, 1004}, - dictWord{8, 0, 982}, - dictWord{136, 0, 993}, - dictWord{133, 10, 978}, - dictWord{4, 10, 905}, - dictWord{6, 10, 1701}, - dictWord{137, 10, 843}, - dictWord{10, 0, 545}, - dictWord{140, 0, 301}, - dictWord{6, 0, 947}, - dictWord{134, 0, 1062}, - dictWord{ - 134, - 0, - 1188, - }, - dictWord{4, 0, 904}, - dictWord{5, 0, 794}, - dictWord{152, 10, 6}, - dictWord{134, 0, 1372}, - dictWord{135, 11, 608}, - dictWord{5, 11, 279}, - dictWord{ - 6, - 11, - 235, - }, - dictWord{7, 11, 468}, - dictWord{8, 11, 446}, - dictWord{9, 11, 637}, - dictWord{10, 11, 717}, - dictWord{11, 11, 738}, - dictWord{140, 11, 514}, - dictWord{ - 132, - 10, - 509, - }, - dictWord{5, 11, 17}, - dictWord{6, 11, 371}, - dictWord{137, 11, 528}, - dictWord{132, 0, 693}, - dictWord{4, 11, 115}, - dictWord{5, 11, 669}, - dictWord{ - 6, - 11, - 407, - }, - dictWord{8, 11, 311}, - dictWord{11, 11, 10}, - dictWord{141, 11, 5}, - dictWord{11, 0, 377}, - dictWord{7, 10, 273}, - dictWord{137, 11, 381}, - dictWord{ - 135, - 0, - 695, - }, - dictWord{7, 0, 386}, - dictWord{138, 0, 713}, - dictWord{135, 10, 1041}, - dictWord{134, 0, 1291}, - dictWord{6, 0, 7}, - dictWord{6, 0, 35}, - dictWord{ - 7, - 0, - 147, - }, - dictWord{7, 0, 1069}, - dictWord{7, 0, 1568}, - dictWord{7, 0, 1575}, - dictWord{7, 0, 1917}, - dictWord{8, 0, 43}, - dictWord{8, 0, 208}, - dictWord{9, 0, 128}, - dictWord{ - 9, - 0, - 866, - }, - dictWord{10, 0, 20}, - dictWord{11, 0, 981}, - dictWord{147, 0, 33}, - dictWord{7, 0, 893}, - dictWord{141, 0, 424}, - dictWord{139, 10, 234}, - dictWord{ - 150, - 11, - 56, - }, - dictWord{5, 11, 779}, - dictWord{5, 11, 807}, - dictWord{6, 11, 1655}, - dictWord{134, 11, 1676}, - dictWord{5, 10, 802}, - dictWord{7, 10, 2021}, - dictWord{136, 10, 805}, - dictWord{4, 11, 196}, - dictWord{5, 10, 167}, - dictWord{5, 11, 558}, - dictWord{5, 10, 899}, - dictWord{5, 11, 949}, - dictWord{6, 10, 410}, - dictWord{137, 10, 777}, - dictWord{137, 10, 789}, - dictWord{134, 10, 1705}, - dictWord{8, 0, 904}, - dictWord{140, 0, 787}, - dictWord{6, 0, 322}, - dictWord{9, 0, 552}, - dictWord{11, 0, 274}, - dictWord{13, 0, 209}, - dictWord{13, 0, 499}, - dictWord{14, 0, 85}, - dictWord{15, 0, 126}, - dictWord{145, 0, 70}, - dictWord{135, 10, 10}, - dictWord{ - 5, - 10, - 11, - }, - dictWord{6, 10, 117}, - dictWord{6, 10, 485}, - dictWord{7, 10, 1133}, - dictWord{9, 10, 582}, - dictWord{9, 10, 594}, - dictWord{11, 10, 21}, - dictWord{ - 11, - 10, - 818, - }, - dictWord{12, 10, 535}, - dictWord{141, 10, 86}, - dictWord{4, 10, 264}, - dictWord{7, 10, 1067}, - dictWord{8, 10, 204}, - dictWord{8, 10, 385}, - dictWord{139, 10, 953}, - dictWord{132, 11, 752}, - dictWord{138, 10, 56}, - dictWord{133, 10, 470}, - dictWord{6, 0, 1808}, - dictWord{8, 0, 83}, - dictWord{8, 0, 742}, - dictWord{8, 0, 817}, - dictWord{9, 0, 28}, - dictWord{9, 0, 29}, - dictWord{9, 0, 885}, - dictWord{10, 0, 387}, - dictWord{11, 0, 633}, - dictWord{11, 0, 740}, - dictWord{13, 0, 235}, - dictWord{13, 0, 254}, - dictWord{15, 0, 143}, - dictWord{143, 0, 146}, - dictWord{140, 0, 49}, - dictWord{134, 0, 1832}, - dictWord{4, 11, 227}, - dictWord{5, 11, 159}, - dictWord{5, 11, 409}, - dictWord{7, 11, 80}, - dictWord{10, 11, 294}, - dictWord{10, 11, 479}, - dictWord{12, 11, 418}, - dictWord{14, 11, 50}, - dictWord{14, 11, 249}, - dictWord{142, 11, 295}, - dictWord{7, 11, 1470}, - dictWord{8, 11, 66}, - dictWord{8, 11, 137}, - dictWord{8, 11, 761}, - dictWord{9, 11, 638}, - dictWord{11, 11, 80}, - dictWord{11, 11, 212}, - dictWord{11, 11, 368}, - dictWord{11, 11, 418}, - dictWord{12, 11, 8}, - dictWord{13, 11, 15}, - dictWord{16, 11, 61}, - dictWord{17, 11, 59}, - dictWord{19, 11, 28}, - dictWord{148, 11, 84}, - dictWord{139, 10, 1015}, - dictWord{138, 11, 468}, - dictWord{135, 0, 421}, - dictWord{6, 0, 415}, - dictWord{ - 7, - 0, - 1049, - }, - dictWord{137, 0, 442}, - dictWord{6, 11, 38}, - dictWord{7, 11, 1220}, - dictWord{8, 11, 185}, - dictWord{8, 11, 256}, - dictWord{9, 11, 22}, - dictWord{ - 9, - 11, - 331, - }, - dictWord{10, 11, 738}, - dictWord{11, 11, 205}, - dictWord{11, 11, 540}, - dictWord{11, 11, 746}, - dictWord{13, 11, 399}, - dictWord{13, 11, 465}, - dictWord{ - 14, - 11, - 88, - }, - dictWord{142, 11, 194}, - dictWord{139, 0, 289}, - dictWord{133, 10, 715}, - dictWord{4, 0, 110}, - dictWord{10, 0, 415}, - dictWord{10, 0, 597}, - dictWord{142, 0, 206}, - dictWord{4, 11, 159}, - dictWord{6, 11, 115}, - dictWord{7, 11, 252}, - dictWord{7, 11, 257}, - dictWord{7, 11, 1928}, - dictWord{8, 11, 69}, - dictWord{ - 9, - 11, - 384, - }, - dictWord{10, 11, 91}, - dictWord{10, 11, 615}, - dictWord{12, 11, 375}, - dictWord{14, 11, 235}, - dictWord{18, 11, 117}, - dictWord{147, 11, 123}, - dictWord{5, 11, 911}, - dictWord{136, 11, 278}, - dictWord{7, 0, 205}, - dictWord{7, 0, 2000}, - dictWord{8, 10, 794}, - dictWord{9, 10, 400}, - dictWord{10, 10, 298}, - dictWord{142, 10, 228}, - dictWord{135, 11, 1774}, - dictWord{4, 11, 151}, - dictWord{7, 11, 1567}, - dictWord{8, 11, 351}, - dictWord{137, 11, 322}, - dictWord{ - 136, - 10, - 724, - }, - dictWord{133, 11, 990}, - dictWord{7, 0, 1539}, - dictWord{11, 0, 512}, - dictWord{13, 0, 205}, - dictWord{19, 0, 30}, - dictWord{22, 0, 36}, - dictWord{23, 0, 19}, - dictWord{135, 11, 1539}, - dictWord{5, 11, 194}, - dictWord{7, 11, 1662}, - dictWord{9, 11, 90}, - dictWord{140, 11, 180}, - dictWord{6, 10, 190}, - dictWord{ - 7, - 10, - 768, - }, - dictWord{135, 10, 1170}, - dictWord{134, 0, 1340}, - dictWord{4, 0, 283}, - dictWord{135, 0, 1194}, - dictWord{133, 11, 425}, - dictWord{133, 11, 971}, - dictWord{12, 0, 549}, - dictWord{14, 10, 67}, - dictWord{147, 10, 60}, - dictWord{135, 10, 1023}, - dictWord{134, 0, 1720}, - dictWord{138, 11, 587}, - dictWord{ - 5, - 11, - 72, - }, - dictWord{6, 11, 264}, - dictWord{7, 11, 21}, - dictWord{7, 11, 46}, - dictWord{7, 11, 2013}, - dictWord{8, 11, 215}, - dictWord{8, 11, 513}, - dictWord{10, 11, 266}, - dictWord{139, 11, 22}, - dictWord{5, 0, 319}, - dictWord{135, 0, 534}, - dictWord{6, 10, 137}, - dictWord{9, 10, 75}, - dictWord{9, 10, 253}, - dictWord{10, 10, 194}, - dictWord{138, 10, 444}, - dictWord{7, 0, 1180}, - dictWord{20, 0, 112}, - dictWord{6, 11, 239}, - dictWord{7, 11, 118}, - dictWord{10, 11, 95}, - dictWord{11, 11, 603}, - dictWord{13, 11, 443}, - dictWord{14, 11, 160}, - dictWord{143, 11, 4}, - dictWord{134, 11, 431}, - dictWord{5, 11, 874}, - dictWord{6, 11, 1677}, - dictWord{ - 11, - 10, - 643, - }, - dictWord{12, 10, 115}, - dictWord{143, 11, 0}, - dictWord{134, 0, 967}, - dictWord{6, 11, 65}, - dictWord{7, 11, 939}, - dictWord{7, 11, 1172}, - dictWord{ - 7, - 11, - 1671, - }, - dictWord{9, 11, 540}, - dictWord{10, 11, 696}, - dictWord{11, 11, 265}, - dictWord{11, 11, 732}, - dictWord{11, 11, 928}, - dictWord{11, 11, 937}, - dictWord{ - 12, - 11, - 399, - }, - dictWord{13, 11, 438}, - dictWord{149, 11, 19}, - dictWord{137, 11, 200}, - dictWord{135, 0, 1940}, - dictWord{5, 10, 760}, - dictWord{7, 10, 542}, - dictWord{8, 10, 135}, - dictWord{136, 10, 496}, - dictWord{140, 11, 44}, - dictWord{7, 11, 1655}, - dictWord{136, 11, 305}, - dictWord{7, 10, 319}, - dictWord{ - 7, - 10, - 355, - }, - dictWord{7, 10, 763}, - dictWord{10, 10, 389}, - dictWord{145, 10, 43}, - dictWord{136, 0, 735}, - dictWord{138, 10, 786}, - dictWord{137, 11, 19}, - dictWord{132, 11, 696}, - dictWord{5, 0, 132}, - dictWord{9, 0, 486}, - dictWord{9, 0, 715}, - dictWord{10, 0, 458}, - dictWord{11, 0, 373}, - dictWord{11, 0, 668}, - dictWord{ - 11, - 0, - 795, - }, - dictWord{11, 0, 897}, - dictWord{12, 0, 272}, - dictWord{12, 0, 424}, - dictWord{12, 0, 539}, - dictWord{12, 0, 558}, - dictWord{14, 0, 245}, - dictWord{ - 14, - 0, - 263, - }, - dictWord{14, 0, 264}, - dictWord{14, 0, 393}, - dictWord{142, 0, 403}, - dictWord{10, 0, 38}, - dictWord{139, 0, 784}, - dictWord{132, 0, 838}, - dictWord{ - 4, - 11, - 302, - }, - dictWord{135, 11, 1766}, - dictWord{133, 0, 379}, - dictWord{5, 0, 8}, - dictWord{6, 0, 89}, - dictWord{6, 0, 400}, - dictWord{7, 0, 1569}, - dictWord{7, 0, 1623}, - dictWord{7, 0, 1850}, - dictWord{8, 0, 218}, - dictWord{8, 0, 422}, - dictWord{9, 0, 570}, - dictWord{10, 0, 626}, - dictWord{4, 11, 726}, - dictWord{133, 11, 630}, - dictWord{ - 4, - 0, - 1017, - }, - dictWord{138, 0, 660}, - dictWord{6, 0, 387}, - dictWord{7, 0, 882}, - dictWord{141, 0, 111}, - dictWord{6, 0, 224}, - dictWord{7, 0, 877}, - dictWord{ - 137, - 0, - 647, - }, - dictWord{4, 10, 58}, - dictWord{5, 10, 286}, - dictWord{6, 10, 319}, - dictWord{7, 10, 402}, - dictWord{7, 10, 1254}, - dictWord{7, 10, 1903}, - dictWord{ - 8, - 10, - 356, - }, - dictWord{140, 10, 408}, - dictWord{135, 0, 790}, - dictWord{9, 0, 510}, - dictWord{10, 0, 53}, - dictWord{4, 10, 389}, - dictWord{9, 10, 181}, - dictWord{ - 10, - 10, - 29, - }, - dictWord{10, 10, 816}, - dictWord{11, 10, 311}, - dictWord{11, 10, 561}, - dictWord{12, 10, 67}, - dictWord{141, 10, 181}, - dictWord{142, 0, 458}, - dictWord{ - 6, - 11, - 118, - }, - dictWord{7, 11, 215}, - dictWord{7, 11, 1521}, - dictWord{140, 11, 11}, - dictWord{134, 0, 954}, - dictWord{135, 0, 394}, - dictWord{134, 0, 1367}, - dictWord{5, 11, 225}, - dictWord{133, 10, 373}, - dictWord{132, 0, 882}, - dictWord{7, 0, 1409}, - dictWord{135, 10, 1972}, - dictWord{135, 10, 1793}, - dictWord{ - 4, - 11, - 370, - }, - dictWord{5, 11, 756}, - dictWord{135, 11, 1326}, - dictWord{150, 11, 13}, - dictWord{7, 11, 354}, - dictWord{10, 11, 410}, - dictWord{139, 11, 815}, - dictWord{6, 11, 1662}, - dictWord{7, 11, 48}, - dictWord{8, 11, 771}, - dictWord{10, 11, 116}, - dictWord{13, 11, 104}, - dictWord{14, 11, 105}, - dictWord{14, 11, 184}, - dictWord{15, 11, 168}, - dictWord{19, 11, 92}, - dictWord{148, 11, 68}, - dictWord{7, 0, 124}, - dictWord{136, 0, 38}, - dictWord{5, 0, 261}, - dictWord{7, 0, 78}, - dictWord{ - 7, - 0, - 199, - }, - dictWord{8, 0, 815}, - dictWord{9, 0, 126}, - dictWord{10, 0, 342}, - dictWord{140, 0, 647}, - dictWord{4, 0, 628}, - dictWord{140, 0, 724}, - dictWord{7, 0, 266}, - dictWord{8, 0, 804}, - dictWord{7, 10, 1651}, - dictWord{145, 10, 89}, - dictWord{135, 0, 208}, - dictWord{134, 0, 1178}, - dictWord{6, 0, 79}, - dictWord{135, 0, 1519}, - dictWord{132, 10, 672}, - dictWord{133, 10, 737}, - dictWord{136, 0, 741}, - dictWord{132, 11, 120}, - dictWord{4, 0, 710}, - dictWord{6, 0, 376}, - dictWord{ - 134, - 0, - 606, - }, - dictWord{134, 0, 1347}, - dictWord{134, 0, 1494}, - dictWord{6, 0, 850}, - dictWord{6, 0, 1553}, - dictWord{137, 0, 821}, - dictWord{5, 10, 145}, - dictWord{ - 134, - 11, - 593, - }, - dictWord{7, 0, 1311}, - dictWord{140, 0, 135}, - dictWord{4, 0, 467}, - dictWord{5, 0, 405}, - dictWord{134, 0, 544}, - dictWord{5, 11, 820}, - dictWord{ - 135, - 11, - 931, - }, - dictWord{6, 0, 100}, - dictWord{7, 0, 244}, - dictWord{7, 0, 632}, - dictWord{7, 0, 1609}, - dictWord{8, 0, 178}, - dictWord{8, 0, 638}, - dictWord{141, 0, 58}, - dictWord{4, 10, 387}, - dictWord{135, 10, 1288}, - dictWord{6, 11, 151}, - dictWord{6, 11, 1675}, - dictWord{7, 11, 383}, - dictWord{151, 11, 10}, - dictWord{ - 132, - 0, - 481, - }, - dictWord{135, 10, 550}, - dictWord{134, 0, 1378}, - dictWord{6, 11, 1624}, - dictWord{11, 11, 11}, - dictWord{12, 11, 422}, - dictWord{13, 11, 262}, - dictWord{142, 11, 360}, - dictWord{133, 0, 791}, - dictWord{4, 11, 43}, - dictWord{5, 11, 344}, - dictWord{133, 11, 357}, - dictWord{7, 0, 1227}, - dictWord{140, 0, 978}, - dictWord{7, 0, 686}, - dictWord{8, 0, 33}, - dictWord{8, 0, 238}, - dictWord{10, 0, 616}, - dictWord{11, 0, 467}, - dictWord{11, 0, 881}, - dictWord{13, 0, 217}, - dictWord{ - 13, - 0, - 253, - }, - dictWord{142, 0, 268}, - dictWord{137, 0, 857}, - dictWord{8, 0, 467}, - dictWord{8, 0, 1006}, - dictWord{7, 11, 148}, - dictWord{8, 11, 284}, - dictWord{ - 141, - 11, - 63, - }, - dictWord{4, 10, 576}, - dictWord{135, 10, 1263}, - dictWord{133, 11, 888}, - dictWord{5, 10, 919}, - dictWord{134, 10, 1673}, - dictWord{20, 10, 37}, - dictWord{148, 11, 37}, - dictWord{132, 0, 447}, - dictWord{132, 11, 711}, - dictWord{4, 0, 128}, - dictWord{5, 0, 415}, - dictWord{6, 0, 462}, - dictWord{7, 0, 294}, - dictWord{ - 7, - 0, - 578, - }, - dictWord{10, 0, 710}, - dictWord{139, 0, 86}, - dictWord{4, 10, 82}, - dictWord{5, 10, 333}, - dictWord{5, 10, 904}, - dictWord{6, 10, 207}, - dictWord{7, 10, 325}, - dictWord{7, 10, 1726}, - dictWord{8, 10, 101}, - dictWord{10, 10, 778}, - dictWord{139, 10, 220}, - dictWord{136, 0, 587}, - dictWord{137, 11, 440}, - dictWord{ - 133, - 10, - 903, - }, - dictWord{6, 0, 427}, - dictWord{7, 0, 1018}, - dictWord{138, 0, 692}, - dictWord{4, 0, 195}, - dictWord{135, 0, 802}, - dictWord{140, 10, 147}, - dictWord{ - 134, - 0, - 1546, - }, - dictWord{134, 0, 684}, - dictWord{132, 10, 705}, - dictWord{136, 0, 345}, - dictWord{11, 11, 678}, - dictWord{140, 11, 307}, - dictWord{ - 133, - 0, - 365, - }, - dictWord{134, 0, 1683}, - dictWord{4, 11, 65}, - dictWord{5, 11, 479}, - dictWord{5, 11, 1004}, - dictWord{7, 11, 1913}, - dictWord{8, 11, 317}, - dictWord{ - 9, - 11, - 302, - }, - dictWord{10, 11, 612}, - dictWord{141, 11, 22}, - dictWord{138, 0, 472}, - dictWord{4, 11, 261}, - dictWord{135, 11, 510}, - dictWord{134, 10, 90}, - dictWord{142, 0, 433}, - dictWord{151, 0, 28}, - dictWord{4, 11, 291}, - dictWord{7, 11, 101}, - dictWord{9, 11, 515}, - dictWord{12, 11, 152}, - dictWord{12, 11, 443}, - dictWord{13, 11, 392}, - dictWord{142, 11, 357}, - dictWord{140, 0, 997}, - dictWord{5, 0, 3}, - dictWord{8, 0, 578}, - dictWord{9, 0, 118}, - dictWord{10, 0, 705}, - dictWord{ - 141, - 0, - 279, - }, - dictWord{135, 11, 1266}, - dictWord{7, 10, 813}, - dictWord{12, 10, 497}, - dictWord{141, 10, 56}, - dictWord{133, 0, 229}, - dictWord{6, 10, 125}, - dictWord{135, 10, 1277}, - dictWord{8, 0, 102}, - dictWord{10, 0, 578}, - dictWord{10, 0, 672}, - dictWord{12, 0, 496}, - dictWord{13, 0, 408}, - dictWord{14, 0, 121}, - dictWord{17, 0, 106}, - dictWord{151, 10, 12}, - dictWord{6, 0, 866}, - dictWord{134, 0, 1080}, - dictWord{136, 0, 1022}, - dictWord{4, 11, 130}, - dictWord{135, 11, 843}, - dictWord{5, 11, 42}, - dictWord{5, 11, 879}, - dictWord{7, 11, 245}, - dictWord{7, 11, 324}, - dictWord{7, 11, 1532}, - dictWord{11, 11, 463}, - dictWord{11, 11, 472}, - dictWord{13, 11, 363}, - dictWord{144, 11, 52}, - dictWord{150, 0, 55}, - dictWord{8, 0, 115}, - dictWord{8, 0, 350}, - dictWord{9, 0, 489}, - dictWord{10, 0, 128}, - dictWord{ - 11, - 0, - 306, - }, - dictWord{12, 0, 373}, - dictWord{14, 0, 30}, - dictWord{17, 0, 79}, - dictWord{19, 0, 80}, - dictWord{4, 11, 134}, - dictWord{133, 11, 372}, - dictWord{ - 134, - 0, - 657, - }, - dictWord{134, 0, 933}, - dictWord{135, 11, 1147}, - dictWord{4, 0, 230}, - dictWord{133, 0, 702}, - dictWord{134, 0, 1728}, - dictWord{4, 0, 484}, - dictWord{ - 18, - 0, - 26, - }, - dictWord{19, 0, 42}, - dictWord{20, 0, 43}, - dictWord{21, 0, 0}, - dictWord{23, 0, 27}, - dictWord{152, 0, 14}, - dictWord{7, 0, 185}, - dictWord{135, 0, 703}, - dictWord{ - 6, - 0, - 417, - }, - dictWord{10, 0, 618}, - dictWord{7, 10, 1106}, - dictWord{9, 10, 770}, - dictWord{11, 10, 112}, - dictWord{140, 10, 413}, - dictWord{134, 0, 803}, - dictWord{132, 11, 644}, - dictWord{134, 0, 1262}, - dictWord{7, 11, 540}, - dictWord{12, 10, 271}, - dictWord{145, 10, 109}, - dictWord{135, 11, 123}, - dictWord{ - 132, - 0, - 633, - }, - dictWord{134, 11, 623}, - dictWord{4, 11, 908}, - dictWord{5, 11, 359}, - dictWord{5, 11, 508}, - dictWord{6, 11, 1723}, - dictWord{7, 11, 343}, - dictWord{ - 7, - 11, - 1996, - }, - dictWord{135, 11, 2026}, - dictWord{135, 0, 479}, - dictWord{10, 0, 262}, - dictWord{7, 10, 304}, - dictWord{9, 10, 646}, - dictWord{9, 10, 862}, - dictWord{ - 11, - 10, - 696, - }, - dictWord{12, 10, 208}, - dictWord{15, 10, 79}, - dictWord{147, 10, 108}, - dictWord{4, 11, 341}, - dictWord{135, 11, 480}, - dictWord{134, 0, 830}, - dictWord{5, 0, 70}, - dictWord{5, 0, 622}, - dictWord{6, 0, 334}, - dictWord{7, 0, 1032}, - dictWord{9, 0, 171}, - dictWord{11, 0, 26}, - dictWord{11, 0, 213}, - dictWord{ - 11, - 0, - 637, - }, - dictWord{11, 0, 707}, - dictWord{12, 0, 202}, - dictWord{12, 0, 380}, - dictWord{13, 0, 226}, - dictWord{13, 0, 355}, - dictWord{14, 0, 222}, - dictWord{145, 0, 42}, - dictWord{135, 10, 981}, - dictWord{143, 0, 217}, - dictWord{137, 11, 114}, - dictWord{4, 0, 23}, - dictWord{4, 0, 141}, - dictWord{5, 0, 313}, - dictWord{5, 0, 1014}, - dictWord{6, 0, 50}, - dictWord{6, 0, 51}, - dictWord{7, 0, 142}, - dictWord{7, 0, 384}, - dictWord{7, 0, 559}, - dictWord{8, 0, 640}, - dictWord{9, 0, 460}, - dictWord{9, 0, 783}, - dictWord{11, 0, 741}, - dictWord{12, 0, 183}, - dictWord{141, 0, 488}, - dictWord{141, 0, 360}, - dictWord{7, 0, 1586}, - dictWord{7, 11, 1995}, - dictWord{8, 11, 299}, - dictWord{11, 11, 890}, - dictWord{140, 11, 674}, - dictWord{132, 10, 434}, - dictWord{7, 0, 652}, - dictWord{134, 10, 550}, - dictWord{7, 0, 766}, - dictWord{5, 10, 553}, - dictWord{138, 10, 824}, - dictWord{7, 0, 737}, - dictWord{8, 0, 298}, - dictWord{136, 10, 452}, - dictWord{4, 11, 238}, - dictWord{5, 11, 503}, - dictWord{6, 11, 179}, - dictWord{7, 11, 2003}, - dictWord{8, 11, 381}, - dictWord{8, 11, 473}, - dictWord{9, 11, 149}, - dictWord{10, 11, 183}, - dictWord{15, 11, 45}, - dictWord{143, 11, 86}, - dictWord{133, 10, 292}, - dictWord{5, 0, 222}, - dictWord{9, 0, 655}, - dictWord{138, 0, 534}, - dictWord{138, 10, 135}, - dictWord{4, 11, 121}, - dictWord{5, 11, 156}, - dictWord{5, 11, 349}, - dictWord{9, 11, 136}, - dictWord{10, 11, 605}, - dictWord{14, 11, 342}, - dictWord{147, 11, 107}, - dictWord{137, 0, 906}, - dictWord{6, 0, 1013}, - dictWord{134, 0, 1250}, - dictWord{6, 0, 1956}, - dictWord{6, 0, 2009}, - dictWord{8, 0, 991}, - dictWord{144, 0, 120}, - dictWord{135, 11, 1192}, - dictWord{ - 138, - 0, - 503, - }, - dictWord{5, 0, 154}, - dictWord{7, 0, 1491}, - dictWord{10, 0, 379}, - dictWord{138, 0, 485}, - dictWord{6, 0, 1867}, - dictWord{6, 0, 1914}, - dictWord{6, 0, 1925}, - dictWord{9, 0, 917}, - dictWord{9, 0, 925}, - dictWord{9, 0, 932}, - dictWord{9, 0, 951}, - dictWord{9, 0, 1007}, - dictWord{9, 0, 1013}, - dictWord{12, 0, 806}, - dictWord{ - 12, - 0, - 810, - }, - dictWord{12, 0, 814}, - dictWord{12, 0, 816}, - dictWord{12, 0, 824}, - dictWord{12, 0, 832}, - dictWord{12, 0, 837}, - dictWord{12, 0, 863}, - dictWord{ - 12, - 0, - 868, - }, - dictWord{12, 0, 870}, - dictWord{12, 0, 889}, - dictWord{12, 0, 892}, - dictWord{12, 0, 900}, - dictWord{12, 0, 902}, - dictWord{12, 0, 908}, - dictWord{12, 0, 933}, - dictWord{12, 0, 942}, - dictWord{12, 0, 949}, - dictWord{12, 0, 954}, - dictWord{15, 0, 175}, - dictWord{15, 0, 203}, - dictWord{15, 0, 213}, - dictWord{15, 0, 218}, - dictWord{15, 0, 225}, - dictWord{15, 0, 231}, - dictWord{15, 0, 239}, - dictWord{15, 0, 248}, - dictWord{15, 0, 252}, - dictWord{18, 0, 190}, - dictWord{18, 0, 204}, - dictWord{ - 18, - 0, - 215, - }, - dictWord{18, 0, 216}, - dictWord{18, 0, 222}, - dictWord{18, 0, 225}, - dictWord{18, 0, 230}, - dictWord{18, 0, 239}, - dictWord{18, 0, 241}, - dictWord{ - 21, - 0, - 42, - }, - dictWord{21, 0, 43}, - dictWord{21, 0, 44}, - dictWord{21, 0, 45}, - dictWord{21, 0, 46}, - dictWord{21, 0, 53}, - dictWord{24, 0, 27}, - dictWord{152, 0, 31}, - dictWord{ - 133, - 0, - 716, - }, - dictWord{135, 0, 844}, - dictWord{4, 0, 91}, - dictWord{5, 0, 388}, - dictWord{5, 0, 845}, - dictWord{6, 0, 206}, - dictWord{6, 0, 252}, - dictWord{6, 0, 365}, - dictWord{ - 7, - 0, - 136, - }, - dictWord{7, 0, 531}, - dictWord{136, 0, 621}, - dictWord{7, 10, 393}, - dictWord{10, 10, 603}, - dictWord{139, 10, 206}, - dictWord{6, 11, 80}, - dictWord{ - 6, - 11, - 1694, - }, - dictWord{7, 11, 173}, - dictWord{7, 11, 1974}, - dictWord{9, 11, 547}, - dictWord{10, 11, 730}, - dictWord{14, 11, 18}, - dictWord{150, 11, 39}, - dictWord{137, 0, 748}, - dictWord{4, 11, 923}, - dictWord{134, 11, 1711}, - dictWord{4, 10, 912}, - dictWord{137, 10, 232}, - dictWord{7, 10, 98}, - dictWord{7, 10, 1973}, - dictWord{136, 10, 716}, - dictWord{14, 0, 103}, - dictWord{133, 10, 733}, - dictWord{132, 11, 595}, - dictWord{12, 0, 158}, - dictWord{18, 0, 8}, - dictWord{19, 0, 62}, - dictWord{20, 0, 6}, - dictWord{22, 0, 4}, - dictWord{23, 0, 2}, - dictWord{23, 0, 9}, - dictWord{5, 11, 240}, - dictWord{6, 11, 459}, - dictWord{7, 11, 12}, - dictWord{7, 11, 114}, - dictWord{7, 11, 502}, - dictWord{7, 11, 1751}, - dictWord{7, 11, 1753}, - dictWord{7, 11, 1805}, - dictWord{8, 11, 658}, - dictWord{9, 11, 1}, - dictWord{11, 11, 959}, - dictWord{13, 11, 446}, - dictWord{142, 11, 211}, - dictWord{135, 0, 576}, - dictWord{5, 0, 771}, - dictWord{5, 0, 863}, - dictWord{5, 0, 898}, - dictWord{6, 0, 648}, - dictWord{ - 6, - 0, - 1632, - }, - dictWord{6, 0, 1644}, - dictWord{134, 0, 1780}, - dictWord{133, 0, 331}, - dictWord{7, 11, 633}, - dictWord{7, 11, 905}, - dictWord{7, 11, 909}, - dictWord{ - 7, - 11, - 1538, - }, - dictWord{9, 11, 767}, - dictWord{140, 11, 636}, - dictWord{140, 0, 632}, - dictWord{5, 0, 107}, - dictWord{7, 0, 201}, - dictWord{136, 0, 518}, - dictWord{ - 6, - 0, - 446, - }, - dictWord{7, 0, 1817}, - dictWord{134, 11, 490}, - dictWord{9, 0, 851}, - dictWord{141, 0, 510}, - dictWord{7, 11, 250}, - dictWord{8, 11, 506}, - dictWord{ - 136, - 11, - 507, - }, - dictWord{4, 0, 504}, - dictWord{137, 10, 72}, - dictWord{132, 11, 158}, - dictWord{4, 11, 140}, - dictWord{7, 11, 362}, - dictWord{8, 11, 209}, - dictWord{ - 9, - 11, - 10, - }, - dictWord{9, 11, 160}, - dictWord{9, 11, 503}, - dictWord{10, 11, 689}, - dictWord{11, 11, 350}, - dictWord{11, 11, 553}, - dictWord{11, 11, 725}, - dictWord{ - 12, - 11, - 252, - }, - dictWord{12, 11, 583}, - dictWord{13, 11, 192}, - dictWord{13, 11, 352}, - dictWord{14, 11, 269}, - dictWord{14, 11, 356}, - dictWord{148, 11, 50}, - dictWord{6, 11, 597}, - dictWord{135, 11, 1318}, - dictWord{135, 10, 1454}, - dictWord{5, 0, 883}, - dictWord{5, 0, 975}, - dictWord{8, 0, 392}, - dictWord{148, 0, 7}, - dictWord{6, 11, 228}, - dictWord{7, 11, 1341}, - dictWord{9, 11, 408}, - dictWord{138, 11, 343}, - dictWord{11, 11, 348}, - dictWord{11, 10, 600}, - dictWord{12, 11, 99}, - dictWord{13, 10, 245}, - dictWord{18, 11, 1}, - dictWord{18, 11, 11}, - dictWord{147, 11, 4}, - dictWord{134, 11, 296}, - dictWord{5, 0, 922}, - dictWord{134, 0, 1707}, - dictWord{132, 11, 557}, - dictWord{4, 11, 548}, - dictWord{7, 10, 164}, - dictWord{7, 10, 1571}, - dictWord{9, 10, 107}, - dictWord{140, 10, 225}, - dictWord{ - 7, - 11, - 197, - }, - dictWord{8, 11, 142}, - dictWord{8, 11, 325}, - dictWord{9, 11, 150}, - dictWord{9, 11, 596}, - dictWord{10, 11, 350}, - dictWord{10, 11, 353}, - dictWord{ - 11, - 11, - 74, - }, - dictWord{11, 11, 315}, - dictWord{14, 11, 423}, - dictWord{143, 11, 141}, - dictWord{5, 0, 993}, - dictWord{7, 0, 515}, - dictWord{137, 0, 91}, - dictWord{4, 0, 131}, - dictWord{8, 0, 200}, - dictWord{5, 10, 484}, - dictWord{5, 10, 510}, - dictWord{6, 10, 434}, - dictWord{7, 10, 1000}, - dictWord{7, 10, 1098}, - dictWord{136, 10, 2}, - dictWord{152, 0, 10}, - dictWord{4, 11, 62}, - dictWord{5, 11, 83}, - dictWord{6, 11, 399}, - dictWord{6, 11, 579}, - dictWord{7, 11, 692}, - dictWord{7, 11, 846}, - dictWord{ - 7, - 11, - 1015, - }, - dictWord{7, 11, 1799}, - dictWord{8, 11, 403}, - dictWord{9, 11, 394}, - dictWord{10, 11, 133}, - dictWord{12, 11, 4}, - dictWord{12, 11, 297}, - dictWord{ - 12, - 11, - 452, - }, - dictWord{16, 11, 81}, - dictWord{18, 11, 19}, - dictWord{18, 11, 25}, - dictWord{21, 11, 14}, - dictWord{22, 11, 12}, - dictWord{151, 11, 18}, - dictWord{ - 140, - 11, - 459, - }, - dictWord{132, 11, 177}, - dictWord{7, 0, 1433}, - dictWord{9, 0, 365}, - dictWord{137, 11, 365}, - dictWord{132, 10, 460}, - dictWord{5, 0, 103}, - dictWord{ - 6, - 0, - 2004, - }, - dictWord{7, 0, 921}, - dictWord{8, 0, 580}, - dictWord{8, 0, 593}, - dictWord{8, 0, 630}, - dictWord{10, 0, 28}, - dictWord{5, 11, 411}, - dictWord{ - 135, - 11, - 653, - }, - dictWord{4, 10, 932}, - dictWord{133, 10, 891}, - dictWord{4, 0, 911}, - dictWord{5, 0, 867}, - dictWord{5, 0, 1013}, - dictWord{7, 0, 2034}, - dictWord{8, 0, 798}, - dictWord{136, 0, 813}, - dictWord{7, 11, 439}, - dictWord{10, 11, 727}, - dictWord{11, 11, 260}, - dictWord{139, 11, 684}, - dictWord{136, 10, 625}, - dictWord{ - 5, - 11, - 208, - }, - dictWord{7, 11, 753}, - dictWord{135, 11, 1528}, - dictWord{5, 0, 461}, - dictWord{7, 0, 1925}, - dictWord{12, 0, 39}, - dictWord{13, 0, 265}, - dictWord{ - 13, - 0, - 439, - }, - dictWord{134, 10, 76}, - dictWord{6, 0, 853}, - dictWord{8, 10, 92}, - dictWord{137, 10, 221}, - dictWord{5, 0, 135}, - dictWord{6, 0, 519}, - dictWord{7, 0, 1722}, - dictWord{10, 0, 271}, - dictWord{11, 0, 261}, - dictWord{145, 0, 54}, - dictWord{139, 11, 814}, - dictWord{14, 0, 338}, - dictWord{148, 0, 81}, - dictWord{4, 0, 300}, - dictWord{133, 0, 436}, - dictWord{5, 0, 419}, - dictWord{5, 0, 687}, - dictWord{7, 0, 864}, - dictWord{9, 0, 470}, - dictWord{135, 11, 864}, - dictWord{9, 0, 836}, - dictWord{ - 133, - 11, - 242, - }, - dictWord{134, 0, 1937}, - dictWord{4, 10, 763}, - dictWord{133, 11, 953}, - dictWord{132, 10, 622}, - dictWord{132, 0, 393}, - dictWord{ - 133, - 10, - 253, - }, - dictWord{8, 0, 357}, - dictWord{10, 0, 745}, - dictWord{14, 0, 426}, - dictWord{17, 0, 94}, - dictWord{19, 0, 57}, - dictWord{135, 10, 546}, - dictWord{5, 11, 615}, - dictWord{146, 11, 37}, - dictWord{9, 10, 73}, - dictWord{10, 10, 110}, - dictWord{14, 10, 185}, - dictWord{145, 10, 119}, - dictWord{11, 0, 703}, - dictWord{7, 10, 624}, - dictWord{7, 10, 916}, - dictWord{10, 10, 256}, - dictWord{139, 10, 87}, - dictWord{133, 11, 290}, - dictWord{5, 10, 212}, - dictWord{12, 10, 35}, - dictWord{ - 141, - 10, - 382, - }, - dictWord{132, 11, 380}, - dictWord{5, 11, 52}, - dictWord{7, 11, 277}, - dictWord{9, 11, 368}, - dictWord{139, 11, 791}, - dictWord{133, 0, 387}, - dictWord{ - 10, - 11, - 138, - }, - dictWord{139, 11, 476}, - dictWord{4, 0, 6}, - dictWord{5, 0, 708}, - dictWord{136, 0, 75}, - dictWord{7, 0, 1351}, - dictWord{9, 0, 581}, - dictWord{10, 0, 639}, - dictWord{11, 0, 453}, - dictWord{140, 0, 584}, - dictWord{132, 0, 303}, - dictWord{138, 0, 772}, - dictWord{135, 10, 1175}, - dictWord{4, 0, 749}, - dictWord{ - 5, - 10, - 816, - }, - dictWord{6, 11, 256}, - dictWord{7, 11, 307}, - dictWord{7, 11, 999}, - dictWord{7, 11, 1481}, - dictWord{7, 11, 1732}, - dictWord{7, 11, 1738}, - dictWord{ - 8, - 11, - 265, - }, - dictWord{9, 11, 414}, - dictWord{11, 11, 316}, - dictWord{12, 11, 52}, - dictWord{13, 11, 420}, - dictWord{147, 11, 100}, - dictWord{135, 11, 1296}, - dictWord{ - 6, - 0, - 1065, - }, - dictWord{5, 10, 869}, - dictWord{5, 10, 968}, - dictWord{6, 10, 1626}, - dictWord{8, 10, 734}, - dictWord{136, 10, 784}, - dictWord{4, 10, 542}, - dictWord{ - 6, - 10, - 1716, - }, - dictWord{6, 10, 1727}, - dictWord{7, 10, 1082}, - dictWord{7, 10, 1545}, - dictWord{8, 10, 56}, - dictWord{8, 10, 118}, - dictWord{8, 10, 412}, - dictWord{ - 8, - 10, - 564, - }, - dictWord{9, 10, 888}, - dictWord{9, 10, 908}, - dictWord{10, 10, 50}, - dictWord{10, 10, 423}, - dictWord{11, 10, 685}, - dictWord{11, 10, 697}, - dictWord{11, 10, 933}, - dictWord{12, 10, 299}, - dictWord{13, 10, 126}, - dictWord{13, 10, 136}, - dictWord{13, 10, 170}, - dictWord{141, 10, 190}, - dictWord{ - 134, - 0, - 226, - }, - dictWord{4, 0, 106}, - dictWord{7, 0, 310}, - dictWord{11, 0, 717}, - dictWord{133, 11, 723}, - dictWord{5, 0, 890}, - dictWord{5, 0, 988}, - dictWord{4, 10, 232}, - dictWord{9, 10, 202}, - dictWord{10, 10, 474}, - dictWord{140, 10, 433}, - dictWord{6, 0, 626}, - dictWord{142, 0, 431}, - dictWord{10, 0, 706}, - dictWord{150, 0, 44}, - dictWord{13, 0, 51}, - dictWord{6, 10, 108}, - dictWord{7, 10, 1003}, - dictWord{7, 10, 1181}, - dictWord{8, 10, 111}, - dictWord{136, 10, 343}, - dictWord{132, 0, 698}, - dictWord{5, 11, 109}, - dictWord{6, 11, 1784}, - dictWord{7, 11, 1895}, - dictWord{12, 11, 296}, - dictWord{140, 11, 302}, - dictWord{134, 0, 828}, - dictWord{ - 134, - 10, - 1712, - }, - dictWord{138, 0, 17}, - dictWord{7, 0, 1929}, - dictWord{4, 10, 133}, - dictWord{5, 11, 216}, - dictWord{7, 10, 711}, - dictWord{7, 10, 1298}, - dictWord{ - 7, - 10, - 1585, - }, - dictWord{7, 11, 1879}, - dictWord{9, 11, 141}, - dictWord{9, 11, 270}, - dictWord{9, 11, 679}, - dictWord{10, 11, 159}, - dictWord{10, 11, 553}, - dictWord{ - 11, - 11, - 197, - }, - dictWord{11, 11, 438}, - dictWord{12, 11, 538}, - dictWord{12, 11, 559}, - dictWord{13, 11, 193}, - dictWord{13, 11, 423}, - dictWord{14, 11, 144}, - dictWord{14, 11, 166}, - dictWord{14, 11, 167}, - dictWord{15, 11, 67}, - dictWord{147, 11, 84}, - dictWord{141, 11, 127}, - dictWord{7, 11, 1872}, - dictWord{ - 137, - 11, - 81, - }, - dictWord{6, 10, 99}, - dictWord{7, 10, 1808}, - dictWord{145, 10, 57}, - dictWord{134, 11, 391}, - dictWord{5, 0, 689}, - dictWord{6, 0, 84}, - dictWord{7, 0, 1250}, - dictWord{6, 10, 574}, - dictWord{7, 10, 428}, - dictWord{10, 10, 669}, - dictWord{11, 10, 485}, - dictWord{11, 10, 840}, - dictWord{12, 10, 300}, - dictWord{ - 142, - 10, - 250, - }, - dictWord{7, 11, 322}, - dictWord{136, 11, 249}, - dictWord{7, 11, 432}, - dictWord{135, 11, 1649}, - dictWord{135, 10, 1871}, - dictWord{137, 10, 252}, - dictWord{6, 11, 155}, - dictWord{140, 11, 234}, - dictWord{7, 0, 871}, - dictWord{19, 0, 27}, - dictWord{147, 11, 27}, - dictWord{140, 0, 498}, - dictWord{5, 0, 986}, - dictWord{6, 0, 130}, - dictWord{138, 0, 823}, - dictWord{6, 0, 1793}, - dictWord{7, 0, 1582}, - dictWord{8, 0, 458}, - dictWord{10, 0, 101}, - dictWord{10, 0, 318}, - dictWord{ - 10, - 0, - 945, - }, - dictWord{12, 0, 734}, - dictWord{16, 0, 104}, - dictWord{18, 0, 177}, - dictWord{6, 10, 323}, - dictWord{135, 10, 1564}, - dictWord{5, 11, 632}, - dictWord{ - 138, - 11, - 526, - }, - dictWord{10, 0, 435}, - dictWord{7, 10, 461}, - dictWord{136, 10, 775}, - dictWord{6, 11, 144}, - dictWord{7, 11, 948}, - dictWord{7, 11, 1042}, - dictWord{ - 7, - 11, - 1857, - }, - dictWord{8, 11, 235}, - dictWord{8, 11, 461}, - dictWord{9, 11, 453}, - dictWord{9, 11, 530}, - dictWord{10, 11, 354}, - dictWord{17, 11, 77}, - dictWord{ - 19, - 11, - 99, - }, - dictWord{148, 11, 79}, - dictWord{138, 0, 966}, - dictWord{7, 0, 1644}, - dictWord{137, 0, 129}, - dictWord{135, 0, 997}, - dictWord{136, 0, 502}, - dictWord{ - 5, - 11, - 196, - }, - dictWord{6, 11, 486}, - dictWord{7, 11, 212}, - dictWord{8, 11, 309}, - dictWord{136, 11, 346}, - dictWord{7, 10, 727}, - dictWord{146, 10, 73}, - dictWord{132, 0, 823}, - dictWord{132, 11, 686}, - dictWord{135, 0, 1927}, - dictWord{4, 0, 762}, - dictWord{7, 0, 1756}, - dictWord{137, 0, 98}, - dictWord{136, 10, 577}, - dictWord{24, 0, 8}, - dictWord{4, 11, 30}, - dictWord{5, 11, 43}, - dictWord{152, 11, 8}, - dictWord{7, 0, 1046}, - dictWord{139, 0, 160}, - dictWord{7, 0, 492}, - dictWord{ - 4, - 10, - 413, - }, - dictWord{5, 10, 677}, - dictWord{7, 11, 492}, - dictWord{8, 10, 432}, - dictWord{140, 10, 280}, - dictWord{6, 0, 45}, - dictWord{7, 0, 433}, - dictWord{8, 0, 129}, - dictWord{9, 0, 21}, - dictWord{10, 0, 392}, - dictWord{11, 0, 79}, - dictWord{12, 0, 499}, - dictWord{13, 0, 199}, - dictWord{141, 0, 451}, - dictWord{7, 0, 558}, - dictWord{ - 136, - 0, - 353, - }, - dictWord{4, 11, 220}, - dictWord{7, 11, 1535}, - dictWord{9, 11, 93}, - dictWord{139, 11, 474}, - dictWord{7, 10, 646}, - dictWord{7, 10, 1730}, - dictWord{ - 11, - 10, - 446, - }, - dictWord{141, 10, 178}, - dictWord{133, 0, 785}, - dictWord{134, 0, 1145}, - dictWord{8, 0, 81}, - dictWord{9, 0, 189}, - dictWord{9, 0, 201}, - dictWord{ - 11, - 0, - 478, - }, - dictWord{11, 0, 712}, - dictWord{141, 0, 338}, - dictWord{5, 0, 353}, - dictWord{151, 0, 26}, - dictWord{11, 0, 762}, - dictWord{132, 10, 395}, - dictWord{ - 134, - 0, - 2024, - }, - dictWord{4, 0, 611}, - dictWord{133, 0, 606}, - dictWord{9, 10, 174}, - dictWord{10, 10, 164}, - dictWord{11, 10, 440}, - dictWord{11, 10, 841}, - dictWord{ - 143, - 10, - 98, - }, - dictWord{134, 10, 426}, - dictWord{10, 10, 608}, - dictWord{139, 10, 1002}, - dictWord{138, 10, 250}, - dictWord{6, 0, 25}, - dictWord{7, 0, 855}, - dictWord{7, 0, 1258}, - dictWord{144, 0, 32}, - dictWord{7, 11, 1725}, - dictWord{138, 11, 393}, - dictWord{5, 11, 263}, - dictWord{134, 11, 414}, - dictWord{6, 0, 2011}, - dictWord{133, 10, 476}, - dictWord{4, 0, 4}, - dictWord{7, 0, 1118}, - dictWord{7, 0, 1320}, - dictWord{7, 0, 1706}, - dictWord{8, 0, 277}, - dictWord{9, 0, 622}, - dictWord{ - 10, - 0, - 9, - }, - dictWord{11, 0, 724}, - dictWord{12, 0, 350}, - dictWord{12, 0, 397}, - dictWord{13, 0, 28}, - dictWord{13, 0, 159}, - dictWord{15, 0, 89}, - dictWord{18, 0, 5}, - dictWord{ - 19, - 0, - 9, - }, - dictWord{20, 0, 34}, - dictWord{22, 0, 47}, - dictWord{6, 11, 178}, - dictWord{6, 11, 1750}, - dictWord{8, 11, 251}, - dictWord{9, 11, 690}, - dictWord{ - 10, - 11, - 155, - }, - dictWord{10, 11, 196}, - dictWord{10, 11, 373}, - dictWord{11, 11, 698}, - dictWord{13, 11, 155}, - dictWord{148, 11, 93}, - dictWord{5, 11, 97}, - dictWord{ - 137, - 11, - 393, - }, - dictWord{7, 0, 764}, - dictWord{11, 0, 461}, - dictWord{12, 0, 172}, - dictWord{5, 10, 76}, - dictWord{6, 10, 458}, - dictWord{6, 10, 497}, - dictWord{ - 7, - 10, - 868, - }, - dictWord{9, 10, 658}, - dictWord{10, 10, 594}, - dictWord{11, 10, 566}, - dictWord{12, 10, 338}, - dictWord{141, 10, 200}, - dictWord{134, 0, 1449}, - dictWord{138, 11, 40}, - dictWord{134, 11, 1639}, - dictWord{134, 0, 1445}, - dictWord{6, 0, 1168}, - dictWord{4, 10, 526}, - dictWord{7, 10, 1029}, - dictWord{ - 135, - 10, - 1054, - }, - dictWord{4, 11, 191}, - dictWord{7, 11, 934}, - dictWord{8, 11, 647}, - dictWord{145, 11, 97}, - dictWord{132, 10, 636}, - dictWord{6, 0, 233}, - dictWord{ - 7, - 10, - 660, - }, - dictWord{7, 10, 1124}, - dictWord{17, 10, 31}, - dictWord{19, 10, 22}, - dictWord{151, 10, 14}, - dictWord{6, 10, 1699}, - dictWord{136, 11, 110}, - dictWord{ - 12, - 11, - 246, - }, - dictWord{15, 11, 162}, - dictWord{19, 11, 64}, - dictWord{20, 11, 8}, - dictWord{20, 11, 95}, - dictWord{22, 11, 24}, - dictWord{152, 11, 17}, - dictWord{ - 5, - 11, - 165, - }, - dictWord{9, 11, 346}, - dictWord{138, 11, 655}, - dictWord{5, 11, 319}, - dictWord{135, 11, 534}, - dictWord{134, 0, 255}, - dictWord{9, 0, 216}, - dictWord{ - 8, - 11, - 128, - }, - dictWord{139, 11, 179}, - dictWord{9, 0, 183}, - dictWord{139, 0, 286}, - dictWord{11, 0, 956}, - dictWord{151, 0, 3}, - dictWord{4, 0, 536}, - dictWord{ - 7, - 0, - 1141, - }, - dictWord{10, 0, 723}, - dictWord{139, 0, 371}, - dictWord{4, 10, 279}, - dictWord{7, 10, 301}, - dictWord{137, 10, 362}, - dictWord{7, 0, 285}, - dictWord{ - 5, - 11, - 57, - }, - dictWord{6, 11, 101}, - dictWord{6, 11, 1663}, - dictWord{7, 11, 132}, - dictWord{7, 11, 1048}, - dictWord{7, 11, 1154}, - dictWord{7, 11, 1415}, - dictWord{ - 7, - 11, - 1507, - }, - dictWord{12, 11, 493}, - dictWord{15, 11, 105}, - dictWord{151, 11, 15}, - dictWord{5, 11, 459}, - dictWord{7, 11, 1073}, - dictWord{7, 10, 1743}, - dictWord{ - 8, - 11, - 241, - }, - dictWord{136, 11, 334}, - dictWord{4, 10, 178}, - dictWord{133, 10, 399}, - dictWord{135, 0, 560}, - dictWord{132, 0, 690}, - dictWord{135, 0, 1246}, - dictWord{18, 0, 157}, - dictWord{147, 0, 63}, - dictWord{10, 0, 599}, - dictWord{11, 0, 33}, - dictWord{12, 0, 571}, - dictWord{149, 0, 1}, - dictWord{6, 11, 324}, - dictWord{ - 6, - 11, - 520, - }, - dictWord{7, 11, 338}, - dictWord{7, 11, 1616}, - dictWord{7, 11, 1729}, - dictWord{8, 11, 228}, - dictWord{9, 11, 69}, - dictWord{139, 11, 750}, - dictWord{ - 7, - 0, - 1862, - }, - dictWord{12, 0, 491}, - dictWord{12, 0, 520}, - dictWord{13, 0, 383}, - dictWord{142, 0, 244}, - dictWord{135, 11, 734}, - dictWord{134, 10, 1692}, - dictWord{10, 0, 448}, - dictWord{11, 0, 630}, - dictWord{17, 0, 117}, - dictWord{6, 10, 202}, - dictWord{7, 11, 705}, - dictWord{12, 10, 360}, - dictWord{17, 10, 118}, - dictWord{18, 10, 27}, - dictWord{148, 10, 67}, - dictWord{4, 11, 73}, - dictWord{6, 11, 612}, - dictWord{7, 11, 927}, - dictWord{7, 11, 1822}, - dictWord{8, 11, 217}, - dictWord{ - 9, - 11, - 472, - }, - dictWord{9, 11, 765}, - dictWord{9, 11, 766}, - dictWord{10, 11, 408}, - dictWord{11, 11, 51}, - dictWord{11, 11, 793}, - dictWord{12, 11, 266}, - dictWord{ - 15, - 11, - 158, - }, - dictWord{20, 11, 89}, - dictWord{150, 11, 32}, - dictWord{4, 0, 190}, - dictWord{133, 0, 554}, - dictWord{133, 0, 1001}, - dictWord{5, 11, 389}, - dictWord{ - 8, - 11, - 636, - }, - dictWord{137, 11, 229}, - dictWord{5, 0, 446}, - dictWord{7, 10, 872}, - dictWord{10, 10, 516}, - dictWord{139, 10, 167}, - dictWord{137, 10, 313}, - dictWord{132, 10, 224}, - dictWord{134, 0, 1313}, - dictWord{5, 10, 546}, - dictWord{7, 10, 35}, - dictWord{8, 10, 11}, - dictWord{8, 10, 12}, - dictWord{9, 10, 315}, - dictWord{9, 10, 533}, - dictWord{10, 10, 802}, - dictWord{11, 10, 166}, - dictWord{12, 10, 525}, - dictWord{142, 10, 243}, - dictWord{6, 0, 636}, - dictWord{137, 0, 837}, - dictWord{5, 10, 241}, - dictWord{8, 10, 242}, - dictWord{9, 10, 451}, - dictWord{10, 10, 667}, - dictWord{11, 10, 598}, - dictWord{140, 10, 429}, - dictWord{22, 10, 46}, - dictWord{150, 11, 46}, - dictWord{136, 11, 472}, - dictWord{11, 0, 278}, - dictWord{142, 0, 73}, - dictWord{141, 11, 185}, - dictWord{132, 0, 868}, - dictWord{ - 134, - 0, - 972, - }, - dictWord{4, 10, 366}, - dictWord{137, 10, 516}, - dictWord{138, 0, 1010}, - dictWord{5, 11, 189}, - dictWord{6, 10, 1736}, - dictWord{7, 11, 442}, - dictWord{ - 7, - 11, - 443, - }, - dictWord{8, 11, 281}, - dictWord{12, 11, 174}, - dictWord{13, 11, 83}, - dictWord{141, 11, 261}, - dictWord{139, 11, 384}, - dictWord{6, 11, 2}, - dictWord{ - 7, - 11, - 191, - }, - dictWord{7, 11, 446}, - dictWord{7, 11, 758}, - dictWord{7, 11, 1262}, - dictWord{7, 11, 1737}, - dictWord{8, 11, 22}, - dictWord{8, 11, 270}, - dictWord{ - 8, - 11, - 612, - }, - dictWord{9, 11, 4}, - dictWord{9, 11, 167}, - dictWord{9, 11, 312}, - dictWord{9, 11, 436}, - dictWord{10, 11, 156}, - dictWord{10, 11, 216}, - dictWord{ - 10, - 11, - 311, - }, - dictWord{10, 11, 623}, - dictWord{11, 11, 72}, - dictWord{11, 11, 330}, - dictWord{11, 11, 455}, - dictWord{12, 11, 101}, - dictWord{12, 11, 321}, - dictWord{ - 12, - 11, - 504, - }, - dictWord{12, 11, 530}, - dictWord{12, 11, 543}, - dictWord{13, 11, 17}, - dictWord{13, 11, 156}, - dictWord{13, 11, 334}, - dictWord{14, 11, 48}, - dictWord{15, 11, 70}, - dictWord{17, 11, 60}, - dictWord{148, 11, 64}, - dictWord{6, 10, 331}, - dictWord{136, 10, 623}, - dictWord{135, 0, 1231}, - dictWord{132, 0, 304}, - dictWord{6, 11, 60}, - dictWord{7, 11, 670}, - dictWord{7, 11, 1327}, - dictWord{8, 11, 411}, - dictWord{8, 11, 435}, - dictWord{9, 11, 653}, - dictWord{9, 11, 740}, - dictWord{10, 11, 385}, - dictWord{11, 11, 222}, - dictWord{11, 11, 324}, - dictWord{11, 11, 829}, - dictWord{140, 11, 611}, - dictWord{7, 0, 506}, - dictWord{6, 11, 166}, - dictWord{7, 11, 374}, - dictWord{135, 11, 1174}, - dictWord{14, 11, 43}, - dictWord{146, 11, 21}, - dictWord{135, 11, 1694}, - dictWord{135, 10, 1888}, - dictWord{ - 5, - 11, - 206, - }, - dictWord{134, 11, 398}, - dictWord{135, 11, 50}, - dictWord{150, 0, 26}, - dictWord{6, 0, 53}, - dictWord{6, 0, 199}, - dictWord{7, 0, 1408}, - dictWord{ - 8, - 0, - 32, - }, - dictWord{8, 0, 93}, - dictWord{10, 0, 397}, - dictWord{10, 0, 629}, - dictWord{11, 0, 593}, - dictWord{11, 0, 763}, - dictWord{13, 0, 326}, - dictWord{145, 0, 35}, - dictWord{134, 0, 105}, - dictWord{132, 10, 394}, - dictWord{4, 0, 843}, - dictWord{138, 0, 794}, - dictWord{11, 0, 704}, - dictWord{141, 0, 396}, - dictWord{5, 0, 114}, - dictWord{5, 0, 255}, - dictWord{141, 0, 285}, - dictWord{6, 0, 619}, - dictWord{7, 0, 898}, - dictWord{7, 0, 1092}, - dictWord{8, 0, 485}, - dictWord{18, 0, 28}, - dictWord{ - 19, - 0, - 116, - }, - dictWord{135, 10, 1931}, - dictWord{9, 0, 145}, - dictWord{7, 10, 574}, - dictWord{135, 10, 1719}, - dictWord{7, 0, 2035}, - dictWord{8, 0, 19}, - dictWord{ - 9, - 0, - 89, - }, - dictWord{138, 0, 831}, - dictWord{132, 10, 658}, - dictWord{6, 11, 517}, - dictWord{7, 11, 1159}, - dictWord{10, 11, 621}, - dictWord{139, 11, 192}, - dictWord{ - 7, - 0, - 1933, - }, - dictWord{7, 11, 1933}, - dictWord{9, 10, 781}, - dictWord{10, 10, 144}, - dictWord{11, 10, 385}, - dictWord{13, 10, 161}, - dictWord{13, 10, 228}, - dictWord{13, 10, 268}, - dictWord{148, 10, 107}, - dictWord{136, 10, 374}, - dictWord{10, 11, 223}, - dictWord{139, 11, 645}, - dictWord{135, 0, 1728}, - dictWord{ - 7, - 11, - 64, - }, - dictWord{7, 11, 289}, - dictWord{136, 11, 245}, - dictWord{4, 10, 344}, - dictWord{6, 10, 498}, - dictWord{139, 10, 323}, - dictWord{136, 0, 746}, - dictWord{ - 135, - 10, - 1063, - }, - dictWord{137, 10, 155}, - dictWord{4, 0, 987}, - dictWord{6, 0, 1964}, - dictWord{6, 0, 1974}, - dictWord{6, 0, 1990}, - dictWord{136, 0, 995}, - dictWord{133, 11, 609}, - dictWord{133, 10, 906}, - dictWord{134, 0, 1550}, - dictWord{134, 0, 874}, - dictWord{5, 11, 129}, - dictWord{6, 11, 61}, - dictWord{ - 135, - 11, - 947, - }, - dictWord{4, 0, 1018}, - dictWord{6, 0, 1938}, - dictWord{6, 0, 2021}, - dictWord{134, 0, 2039}, - dictWord{132, 0, 814}, - dictWord{11, 0, 126}, - dictWord{ - 139, - 0, - 287, - }, - dictWord{134, 0, 1264}, - dictWord{5, 0, 955}, - dictWord{136, 0, 814}, - dictWord{141, 11, 506}, - dictWord{132, 11, 314}, - dictWord{6, 0, 981}, - dictWord{139, 11, 1000}, - dictWord{5, 0, 56}, - dictWord{8, 0, 892}, - dictWord{8, 0, 915}, - dictWord{140, 0, 776}, - dictWord{148, 0, 100}, - dictWord{10, 0, 4}, - dictWord{ - 10, - 0, - 13, - }, - dictWord{11, 0, 638}, - dictWord{148, 0, 57}, - dictWord{148, 11, 74}, - dictWord{5, 0, 738}, - dictWord{132, 10, 616}, - dictWord{133, 11, 637}, - dictWord{ - 136, - 10, - 692, - }, - dictWord{133, 0, 758}, - dictWord{132, 10, 305}, - dictWord{137, 11, 590}, - dictWord{5, 11, 280}, - dictWord{135, 11, 1226}, - dictWord{ - 134, - 11, - 494, - }, - dictWord{135, 0, 1112}, - dictWord{133, 11, 281}, - dictWord{13, 0, 44}, - dictWord{14, 0, 214}, - dictWord{5, 10, 214}, - dictWord{7, 10, 603}, - dictWord{ - 8, - 10, - 611, - }, - dictWord{9, 10, 686}, - dictWord{10, 10, 88}, - dictWord{11, 10, 459}, - dictWord{11, 10, 496}, - dictWord{12, 10, 463}, - dictWord{140, 10, 590}, - dictWord{ - 139, - 0, - 328, - }, - dictWord{135, 11, 1064}, - dictWord{137, 0, 133}, - dictWord{7, 0, 168}, - dictWord{13, 0, 196}, - dictWord{141, 0, 237}, - dictWord{134, 10, 1703}, - dictWord{134, 0, 1152}, - dictWord{135, 0, 1245}, - dictWord{5, 0, 110}, - dictWord{6, 0, 169}, - dictWord{6, 0, 1702}, - dictWord{7, 0, 400}, - dictWord{8, 0, 538}, - dictWord{ - 9, - 0, - 184, - }, - dictWord{9, 0, 524}, - dictWord{140, 0, 218}, - dictWord{6, 0, 1816}, - dictWord{10, 0, 871}, - dictWord{12, 0, 769}, - dictWord{140, 0, 785}, - dictWord{ - 132, - 11, - 630, - }, - dictWord{7, 11, 33}, - dictWord{7, 11, 120}, - dictWord{8, 11, 489}, - dictWord{9, 11, 319}, - dictWord{10, 11, 820}, - dictWord{11, 11, 1004}, - dictWord{ - 12, - 11, - 379, - }, - dictWord{13, 11, 117}, - dictWord{13, 11, 412}, - dictWord{14, 11, 25}, - dictWord{15, 11, 52}, - dictWord{15, 11, 161}, - dictWord{16, 11, 47}, - dictWord{149, 11, 2}, - dictWord{6, 0, 133}, - dictWord{8, 0, 413}, - dictWord{9, 0, 353}, - dictWord{139, 0, 993}, - dictWord{145, 10, 19}, - dictWord{4, 11, 937}, - dictWord{ - 133, - 11, - 801, - }, - dictWord{134, 0, 978}, - dictWord{6, 0, 93}, - dictWord{6, 0, 1508}, - dictWord{7, 0, 1422}, - dictWord{7, 0, 1851}, - dictWord{8, 0, 673}, - dictWord{9, 0, 529}, - dictWord{140, 0, 43}, - dictWord{6, 0, 317}, - dictWord{10, 0, 512}, - dictWord{4, 10, 737}, - dictWord{11, 10, 294}, - dictWord{12, 10, 60}, - dictWord{12, 10, 437}, - dictWord{13, 10, 64}, - dictWord{13, 10, 380}, - dictWord{142, 10, 430}, - dictWord{9, 0, 371}, - dictWord{7, 11, 1591}, - dictWord{144, 11, 43}, - dictWord{6, 10, 1758}, - dictWord{8, 10, 520}, - dictWord{9, 10, 345}, - dictWord{9, 10, 403}, - dictWord{142, 10, 350}, - dictWord{5, 0, 526}, - dictWord{10, 10, 242}, - dictWord{ - 138, - 10, - 579, - }, - dictWord{9, 0, 25}, - dictWord{10, 0, 467}, - dictWord{138, 0, 559}, - dictWord{5, 10, 139}, - dictWord{7, 10, 1168}, - dictWord{138, 10, 539}, - dictWord{ - 4, - 0, - 335, - }, - dictWord{135, 0, 942}, - dictWord{140, 0, 754}, - dictWord{132, 11, 365}, - dictWord{11, 0, 182}, - dictWord{142, 0, 195}, - dictWord{142, 11, 29}, - dictWord{ - 5, - 11, - 7, - }, - dictWord{139, 11, 774}, - dictWord{4, 11, 746}, - dictWord{135, 11, 1090}, - dictWord{8, 0, 39}, - dictWord{10, 0, 773}, - dictWord{11, 0, 84}, - dictWord{ - 12, - 0, - 205, - }, - dictWord{142, 0, 1}, - dictWord{5, 0, 601}, - dictWord{5, 0, 870}, - dictWord{5, 11, 360}, - dictWord{136, 11, 237}, - dictWord{132, 0, 181}, - dictWord{ - 136, - 0, - 370, - }, - dictWord{134, 0, 1652}, - dictWord{8, 0, 358}, - dictWord{4, 10, 107}, - dictWord{7, 10, 613}, - dictWord{8, 10, 439}, - dictWord{8, 10, 504}, - dictWord{ - 9, - 10, - 501, - }, - dictWord{10, 10, 383}, - dictWord{139, 10, 477}, - dictWord{132, 10, 229}, - dictWord{137, 11, 785}, - dictWord{4, 0, 97}, - dictWord{5, 0, 147}, - dictWord{ - 6, - 0, - 286, - }, - dictWord{7, 0, 1362}, - dictWord{141, 0, 176}, - dictWord{6, 0, 537}, - dictWord{7, 0, 788}, - dictWord{7, 0, 1816}, - dictWord{132, 10, 903}, - dictWord{ - 140, - 10, - 71, - }, - dictWord{6, 0, 743}, - dictWord{134, 0, 1223}, - dictWord{6, 0, 375}, - dictWord{7, 0, 169}, - dictWord{7, 0, 254}, - dictWord{8, 0, 780}, - dictWord{135, 11, 1493}, - dictWord{7, 0, 1714}, - dictWord{4, 10, 47}, - dictWord{6, 10, 373}, - dictWord{7, 10, 452}, - dictWord{7, 10, 543}, - dictWord{7, 10, 1856}, - dictWord{9, 10, 6}, - dictWord{ - 11, - 10, - 257, - }, - dictWord{139, 10, 391}, - dictWord{6, 0, 896}, - dictWord{136, 0, 1003}, - dictWord{135, 0, 1447}, - dictWord{137, 11, 341}, - dictWord{5, 10, 980}, - dictWord{134, 10, 1754}, - dictWord{145, 11, 22}, - dictWord{4, 11, 277}, - dictWord{5, 11, 608}, - dictWord{6, 11, 493}, - dictWord{7, 11, 457}, - dictWord{ - 140, - 11, - 384, - }, - dictWord{7, 10, 536}, - dictWord{7, 10, 1331}, - dictWord{136, 10, 143}, - dictWord{140, 0, 744}, - dictWord{7, 11, 27}, - dictWord{135, 11, 316}, - dictWord{ - 18, - 0, - 126, - }, - dictWord{5, 10, 19}, - dictWord{134, 10, 533}, - dictWord{4, 0, 788}, - dictWord{11, 0, 41}, - dictWord{5, 11, 552}, - dictWord{5, 11, 586}, - dictWord{ - 5, - 11, - 676, - }, - dictWord{6, 11, 448}, - dictWord{8, 11, 244}, - dictWord{11, 11, 1}, - dictWord{11, 11, 41}, - dictWord{13, 11, 3}, - dictWord{16, 11, 54}, - dictWord{17, 11, 4}, - dictWord{146, 11, 13}, - dictWord{4, 0, 985}, - dictWord{6, 0, 1801}, - dictWord{4, 11, 401}, - dictWord{137, 11, 264}, - dictWord{5, 10, 395}, - dictWord{5, 10, 951}, - dictWord{134, 10, 1776}, - dictWord{5, 0, 629}, - dictWord{135, 0, 1549}, - dictWord{11, 10, 663}, - dictWord{12, 10, 210}, - dictWord{13, 10, 166}, - dictWord{ - 13, - 10, - 310, - }, - dictWord{14, 10, 373}, - dictWord{147, 10, 43}, - dictWord{9, 11, 543}, - dictWord{10, 11, 524}, - dictWord{11, 11, 30}, - dictWord{12, 11, 524}, - dictWord{ - 14, - 11, - 315, - }, - dictWord{16, 11, 18}, - dictWord{20, 11, 26}, - dictWord{148, 11, 65}, - dictWord{4, 11, 205}, - dictWord{5, 11, 623}, - dictWord{7, 11, 104}, - dictWord{ - 136, - 11, - 519, - }, - dictWord{5, 0, 293}, - dictWord{134, 0, 601}, - dictWord{7, 11, 579}, - dictWord{9, 11, 41}, - dictWord{9, 11, 244}, - dictWord{9, 11, 669}, - dictWord{ - 10, - 11, - 5, - }, - dictWord{11, 11, 861}, - dictWord{11, 11, 951}, - dictWord{139, 11, 980}, - dictWord{132, 11, 717}, - dictWord{132, 10, 695}, - dictWord{7, 10, 497}, - dictWord{ - 9, - 10, - 387, - }, - dictWord{147, 10, 81}, - dictWord{132, 0, 420}, - dictWord{142, 0, 37}, - dictWord{6, 0, 1134}, - dictWord{6, 0, 1900}, - dictWord{12, 0, 830}, - dictWord{ - 12, - 0, - 878, - }, - dictWord{12, 0, 894}, - dictWord{15, 0, 221}, - dictWord{143, 0, 245}, - dictWord{132, 11, 489}, - dictWord{7, 0, 1570}, - dictWord{140, 0, 542}, - dictWord{ - 8, - 0, - 933, - }, - dictWord{136, 0, 957}, - dictWord{6, 0, 1371}, - dictWord{7, 0, 31}, - dictWord{8, 0, 373}, - dictWord{5, 10, 284}, - dictWord{6, 10, 49}, - dictWord{6, 10, 350}, - dictWord{7, 10, 377}, - dictWord{7, 10, 1693}, - dictWord{8, 10, 678}, - dictWord{9, 10, 161}, - dictWord{9, 10, 585}, - dictWord{9, 10, 671}, - dictWord{9, 10, 839}, - dictWord{11, 10, 912}, - dictWord{141, 10, 427}, - dictWord{135, 11, 892}, - dictWord{4, 0, 325}, - dictWord{138, 0, 125}, - dictWord{139, 11, 47}, - dictWord{ - 132, - 10, - 597, - }, - dictWord{138, 0, 323}, - dictWord{6, 0, 1547}, - dictWord{7, 11, 1605}, - dictWord{9, 11, 473}, - dictWord{11, 11, 962}, - dictWord{146, 11, 139}, - dictWord{ - 139, - 10, - 908, - }, - dictWord{7, 11, 819}, - dictWord{9, 11, 26}, - dictWord{9, 11, 392}, - dictWord{10, 11, 152}, - dictWord{10, 11, 226}, - dictWord{11, 11, 19}, - dictWord{ - 12, - 11, - 276, - }, - dictWord{12, 11, 426}, - dictWord{12, 11, 589}, - dictWord{13, 11, 460}, - dictWord{15, 11, 97}, - dictWord{19, 11, 48}, - dictWord{148, 11, 104}, - dictWord{135, 11, 51}, - dictWord{4, 0, 718}, - dictWord{135, 0, 1216}, - dictWord{6, 0, 1896}, - dictWord{6, 0, 1905}, - dictWord{6, 0, 1912}, - dictWord{9, 0, 947}, - dictWord{ - 9, - 0, - 974, - }, - dictWord{12, 0, 809}, - dictWord{12, 0, 850}, - dictWord{12, 0, 858}, - dictWord{12, 0, 874}, - dictWord{12, 0, 887}, - dictWord{12, 0, 904}, - dictWord{ - 12, - 0, - 929, - }, - dictWord{12, 0, 948}, - dictWord{12, 0, 952}, - dictWord{15, 0, 198}, - dictWord{15, 0, 206}, - dictWord{15, 0, 220}, - dictWord{15, 0, 227}, - dictWord{15, 0, 247}, - dictWord{18, 0, 188}, - dictWord{21, 0, 48}, - dictWord{21, 0, 50}, - dictWord{24, 0, 25}, - dictWord{24, 0, 29}, - dictWord{7, 11, 761}, - dictWord{7, 11, 1051}, - dictWord{ - 137, - 11, - 545, - }, - dictWord{5, 0, 124}, - dictWord{5, 0, 144}, - dictWord{6, 0, 548}, - dictWord{7, 0, 15}, - dictWord{7, 0, 153}, - dictWord{137, 0, 629}, - dictWord{ - 135, - 11, - 606, - }, - dictWord{135, 10, 2014}, - dictWord{7, 10, 2007}, - dictWord{9, 11, 46}, - dictWord{9, 10, 101}, - dictWord{9, 10, 450}, - dictWord{10, 10, 66}, - dictWord{ - 10, - 10, - 842, - }, - dictWord{11, 10, 536}, - dictWord{140, 10, 587}, - dictWord{6, 0, 75}, - dictWord{7, 0, 1531}, - dictWord{8, 0, 416}, - dictWord{9, 0, 240}, - dictWord{9, 0, 275}, - dictWord{10, 0, 100}, - dictWord{11, 0, 658}, - dictWord{11, 0, 979}, - dictWord{12, 0, 86}, - dictWord{14, 0, 207}, - dictWord{15, 0, 20}, - dictWord{143, 0, 25}, - dictWord{ - 5, - 0, - 141, - }, - dictWord{5, 0, 915}, - dictWord{6, 0, 1783}, - dictWord{7, 0, 211}, - dictWord{7, 0, 698}, - dictWord{7, 0, 1353}, - dictWord{9, 0, 83}, - dictWord{9, 0, 281}, - dictWord{ - 10, - 0, - 376, - }, - dictWord{10, 0, 431}, - dictWord{11, 0, 543}, - dictWord{12, 0, 664}, - dictWord{13, 0, 280}, - dictWord{13, 0, 428}, - dictWord{14, 0, 61}, - dictWord{ - 14, - 0, - 128, - }, - dictWord{17, 0, 52}, - dictWord{145, 0, 81}, - dictWord{132, 11, 674}, - dictWord{135, 0, 533}, - dictWord{149, 0, 6}, - dictWord{132, 11, 770}, - dictWord{ - 133, - 0, - 538, - }, - dictWord{5, 11, 79}, - dictWord{7, 11, 1027}, - dictWord{7, 11, 1477}, - dictWord{139, 11, 52}, - dictWord{139, 10, 62}, - dictWord{4, 0, 338}, - dictWord{ - 133, - 0, - 400, - }, - dictWord{5, 11, 789}, - dictWord{134, 11, 195}, - dictWord{4, 11, 251}, - dictWord{4, 11, 688}, - dictWord{7, 11, 513}, - dictWord{7, 11, 1284}, - dictWord{ - 9, - 11, - 87, - }, - dictWord{138, 11, 365}, - dictWord{134, 10, 1766}, - dictWord{6, 0, 0}, - dictWord{7, 0, 84}, - dictWord{11, 0, 895}, - dictWord{145, 0, 11}, - dictWord{ - 139, - 0, - 892, - }, - dictWord{4, 0, 221}, - dictWord{5, 0, 659}, - dictWord{7, 0, 697}, - dictWord{7, 0, 1211}, - dictWord{138, 0, 284}, - dictWord{133, 0, 989}, - dictWord{ - 133, - 11, - 889, - }, - dictWord{4, 11, 160}, - dictWord{5, 11, 330}, - dictWord{7, 11, 1434}, - dictWord{136, 11, 174}, - dictWord{6, 10, 1665}, - dictWord{7, 10, 256}, - dictWord{ - 7, - 10, - 1388, - }, - dictWord{10, 10, 499}, - dictWord{139, 10, 670}, - dictWord{7, 0, 848}, - dictWord{4, 10, 22}, - dictWord{5, 10, 10}, - dictWord{136, 10, 97}, - dictWord{ - 138, - 0, - 507, - }, - dictWord{133, 10, 481}, - dictWord{4, 0, 188}, - dictWord{135, 0, 805}, - dictWord{5, 0, 884}, - dictWord{6, 0, 732}, - dictWord{139, 0, 991}, - dictWord{ - 135, - 11, - 968, - }, - dictWord{11, 11, 636}, - dictWord{15, 11, 145}, - dictWord{17, 11, 34}, - dictWord{19, 11, 50}, - dictWord{151, 11, 20}, - dictWord{7, 0, 959}, - dictWord{ - 16, - 0, - 60, - }, - dictWord{6, 10, 134}, - dictWord{7, 10, 437}, - dictWord{9, 10, 37}, - dictWord{14, 10, 285}, - dictWord{142, 10, 371}, - dictWord{7, 10, 486}, - dictWord{ - 8, - 10, - 155, - }, - dictWord{11, 10, 93}, - dictWord{140, 10, 164}, - dictWord{134, 0, 1653}, - dictWord{7, 0, 337}, - dictWord{133, 10, 591}, - dictWord{6, 0, 1989}, - dictWord{ - 8, - 0, - 922, - }, - dictWord{8, 0, 978}, - dictWord{133, 11, 374}, - dictWord{132, 0, 638}, - dictWord{138, 0, 500}, - dictWord{133, 11, 731}, - dictWord{5, 10, 380}, - dictWord{ - 5, - 10, - 650, - }, - dictWord{136, 10, 310}, - dictWord{138, 11, 381}, - dictWord{4, 10, 364}, - dictWord{7, 10, 1156}, - dictWord{7, 10, 1187}, - dictWord{137, 10, 409}, - dictWord{137, 11, 224}, - dictWord{140, 0, 166}, - dictWord{134, 10, 482}, - dictWord{4, 11, 626}, - dictWord{5, 11, 642}, - dictWord{6, 11, 425}, - dictWord{ - 10, - 11, - 202, - }, - dictWord{139, 11, 141}, - dictWord{4, 10, 781}, - dictWord{6, 10, 487}, - dictWord{7, 10, 926}, - dictWord{8, 10, 263}, - dictWord{139, 10, 500}, - dictWord{ - 135, - 0, - 418, - }, - dictWord{4, 10, 94}, - dictWord{135, 10, 1265}, - dictWord{136, 0, 760}, - dictWord{132, 10, 417}, - dictWord{136, 11, 835}, - dictWord{5, 10, 348}, - dictWord{134, 10, 522}, - dictWord{6, 0, 1277}, - dictWord{134, 0, 1538}, - dictWord{139, 11, 541}, - dictWord{135, 11, 1597}, - dictWord{5, 11, 384}, - dictWord{ - 8, - 11, - 455, - }, - dictWord{140, 11, 48}, - dictWord{136, 0, 770}, - dictWord{5, 11, 264}, - dictWord{134, 11, 184}, - dictWord{4, 0, 89}, - dictWord{5, 0, 489}, - dictWord{ - 6, - 0, - 315, - }, - dictWord{7, 0, 553}, - dictWord{7, 0, 1745}, - dictWord{138, 0, 243}, - dictWord{4, 10, 408}, - dictWord{4, 10, 741}, - dictWord{135, 10, 500}, - dictWord{ - 134, - 0, - 1396, - }, - dictWord{133, 0, 560}, - dictWord{6, 0, 1658}, - dictWord{9, 0, 3}, - dictWord{10, 0, 154}, - dictWord{11, 0, 641}, - dictWord{13, 0, 85}, - dictWord{13, 0, 201}, - dictWord{141, 0, 346}, - dictWord{135, 11, 1595}, - dictWord{5, 11, 633}, - dictWord{6, 11, 28}, - dictWord{7, 11, 219}, - dictWord{135, 11, 1323}, - dictWord{ - 9, - 11, - 769, - }, - dictWord{140, 11, 185}, - dictWord{135, 11, 785}, - dictWord{7, 11, 359}, - dictWord{8, 11, 243}, - dictWord{140, 11, 175}, - dictWord{138, 0, 586}, - dictWord{ - 7, - 0, - 1271, - }, - dictWord{134, 10, 73}, - dictWord{132, 11, 105}, - dictWord{4, 0, 166}, - dictWord{5, 0, 505}, - dictWord{134, 0, 1670}, - dictWord{133, 10, 576}, - dictWord{4, 11, 324}, - dictWord{138, 11, 104}, - dictWord{142, 10, 231}, - dictWord{6, 0, 637}, - dictWord{7, 10, 1264}, - dictWord{7, 10, 1678}, - dictWord{ - 11, - 10, - 945, - }, - dictWord{12, 10, 341}, - dictWord{12, 10, 471}, - dictWord{12, 10, 569}, - dictWord{23, 11, 21}, - dictWord{151, 11, 23}, - dictWord{8, 11, 559}, - dictWord{ - 141, - 11, - 109, - }, - dictWord{134, 0, 1947}, - dictWord{7, 0, 445}, - dictWord{8, 0, 307}, - dictWord{8, 0, 704}, - dictWord{10, 0, 41}, - dictWord{10, 0, 439}, - dictWord{ - 11, - 0, - 237, - }, - dictWord{11, 0, 622}, - dictWord{140, 0, 201}, - dictWord{135, 11, 963}, - dictWord{135, 0, 1977}, - dictWord{4, 0, 189}, - dictWord{5, 0, 713}, - dictWord{ - 136, - 0, - 57, - }, - dictWord{138, 0, 371}, - dictWord{135, 10, 538}, - dictWord{132, 0, 552}, - dictWord{6, 0, 883}, - dictWord{133, 10, 413}, - dictWord{6, 0, 923}, - dictWord{ - 132, - 11, - 758, - }, - dictWord{138, 11, 215}, - dictWord{136, 10, 495}, - dictWord{7, 10, 54}, - dictWord{8, 10, 312}, - dictWord{10, 10, 191}, - dictWord{10, 10, 614}, - dictWord{140, 10, 567}, - dictWord{7, 11, 351}, - dictWord{139, 11, 128}, - dictWord{7, 0, 875}, - dictWord{6, 10, 468}, - dictWord{7, 10, 1478}, - dictWord{8, 10, 530}, - dictWord{142, 10, 290}, - dictWord{135, 0, 1788}, - dictWord{17, 0, 49}, - dictWord{133, 11, 918}, - dictWord{12, 11, 398}, - dictWord{20, 11, 39}, - dictWord{ - 21, - 11, - 11, - }, - dictWord{150, 11, 41}, - dictWord{10, 0, 661}, - dictWord{6, 10, 484}, - dictWord{135, 10, 822}, - dictWord{135, 0, 1945}, - dictWord{134, 0, 794}, - dictWord{ - 137, - 10, - 900, - }, - dictWord{135, 10, 1335}, - dictWord{6, 10, 1724}, - dictWord{135, 10, 2022}, - dictWord{132, 11, 340}, - dictWord{134, 0, 1135}, - dictWord{ - 4, - 0, - 784, - }, - dictWord{133, 0, 745}, - dictWord{5, 0, 84}, - dictWord{134, 0, 163}, - dictWord{133, 0, 410}, - dictWord{4, 0, 976}, - dictWord{5, 11, 985}, - dictWord{7, 11, 509}, - dictWord{7, 11, 529}, - dictWord{145, 11, 96}, - dictWord{132, 10, 474}, - dictWord{134, 0, 703}, - dictWord{135, 11, 1919}, - dictWord{5, 0, 322}, - dictWord{ - 8, - 0, - 186, - }, - dictWord{9, 0, 262}, - dictWord{10, 0, 187}, - dictWord{142, 0, 208}, - dictWord{135, 10, 1504}, - dictWord{133, 0, 227}, - dictWord{9, 0, 560}, - dictWord{ - 13, - 0, - 208, - }, - dictWord{133, 10, 305}, - dictWord{132, 11, 247}, - dictWord{7, 0, 1395}, - dictWord{8, 0, 486}, - dictWord{9, 0, 236}, - dictWord{9, 0, 878}, - dictWord{ - 10, - 0, - 218, - }, - dictWord{11, 0, 95}, - dictWord{19, 0, 17}, - dictWord{147, 0, 31}, - dictWord{7, 0, 2043}, - dictWord{8, 0, 672}, - dictWord{141, 0, 448}, - dictWord{4, 11, 184}, - dictWord{5, 11, 390}, - dictWord{6, 11, 337}, - dictWord{7, 11, 23}, - dictWord{7, 11, 494}, - dictWord{7, 11, 618}, - dictWord{7, 11, 1456}, - dictWord{8, 11, 27}, - dictWord{ - 8, - 11, - 599, - }, - dictWord{10, 11, 153}, - dictWord{139, 11, 710}, - dictWord{135, 0, 466}, - dictWord{135, 10, 1236}, - dictWord{6, 0, 167}, - dictWord{7, 0, 186}, - dictWord{7, 0, 656}, - dictWord{10, 0, 643}, - dictWord{4, 10, 480}, - dictWord{6, 10, 302}, - dictWord{6, 10, 1642}, - dictWord{7, 10, 837}, - dictWord{7, 10, 1547}, - dictWord{ - 7, - 10, - 1657, - }, - dictWord{8, 10, 429}, - dictWord{9, 10, 228}, - dictWord{13, 10, 289}, - dictWord{13, 10, 343}, - dictWord{147, 10, 101}, - dictWord{134, 0, 1428}, - dictWord{134, 0, 1440}, - dictWord{5, 0, 412}, - dictWord{7, 10, 278}, - dictWord{10, 10, 739}, - dictWord{11, 10, 708}, - dictWord{141, 10, 348}, - dictWord{ - 134, - 0, - 1118, - }, - dictWord{136, 0, 562}, - dictWord{148, 11, 46}, - dictWord{9, 0, 316}, - dictWord{139, 0, 256}, - dictWord{134, 0, 1771}, - dictWord{135, 0, 1190}, - dictWord{137, 0, 132}, - dictWord{10, 11, 227}, - dictWord{11, 11, 497}, - dictWord{11, 11, 709}, - dictWord{140, 11, 415}, - dictWord{143, 0, 66}, - dictWord{6, 11, 360}, - dictWord{7, 11, 1664}, - dictWord{136, 11, 478}, - dictWord{144, 10, 28}, - dictWord{4, 0, 317}, - dictWord{135, 0, 1279}, - dictWord{5, 0, 63}, - dictWord{ - 133, - 0, - 509, - }, - dictWord{136, 11, 699}, - dictWord{145, 10, 36}, - dictWord{134, 0, 1475}, - dictWord{11, 11, 343}, - dictWord{142, 11, 127}, - dictWord{132, 11, 739}, - dictWord{132, 0, 288}, - dictWord{135, 11, 1757}, - dictWord{8, 0, 89}, - dictWord{8, 0, 620}, - dictWord{9, 0, 608}, - dictWord{11, 0, 628}, - dictWord{12, 0, 322}, - dictWord{143, 0, 124}, - dictWord{134, 0, 1225}, - dictWord{7, 0, 1189}, - dictWord{4, 11, 67}, - dictWord{5, 11, 422}, - dictWord{6, 10, 363}, - dictWord{7, 11, 1037}, - dictWord{7, 11, 1289}, - dictWord{7, 11, 1555}, - dictWord{7, 10, 1955}, - dictWord{8, 10, 725}, - dictWord{9, 11, 741}, - dictWord{145, 11, 108}, - dictWord{ - 134, - 0, - 1468, - }, - dictWord{6, 0, 689}, - dictWord{134, 0, 1451}, - dictWord{138, 0, 120}, - dictWord{151, 0, 1}, - dictWord{137, 10, 805}, - dictWord{142, 0, 329}, - dictWord{ - 5, - 10, - 813, - }, - dictWord{135, 10, 2046}, - dictWord{135, 0, 226}, - dictWord{138, 11, 96}, - dictWord{7, 0, 1855}, - dictWord{5, 10, 712}, - dictWord{11, 10, 17}, - dictWord{13, 10, 321}, - dictWord{144, 10, 67}, - dictWord{9, 0, 461}, - dictWord{6, 10, 320}, - dictWord{7, 10, 781}, - dictWord{7, 10, 1921}, - dictWord{9, 10, 55}, - dictWord{ - 10, - 10, - 186, - }, - dictWord{10, 10, 273}, - dictWord{10, 10, 664}, - dictWord{10, 10, 801}, - dictWord{11, 10, 996}, - dictWord{11, 10, 997}, - dictWord{13, 10, 157}, - dictWord{142, 10, 170}, - dictWord{8, 11, 203}, - dictWord{8, 10, 271}, - dictWord{11, 11, 823}, - dictWord{11, 11, 846}, - dictWord{12, 11, 482}, - dictWord{ - 13, - 11, - 133, - }, - dictWord{13, 11, 277}, - dictWord{13, 11, 302}, - dictWord{13, 11, 464}, - dictWord{14, 11, 205}, - dictWord{142, 11, 221}, - dictWord{135, 0, 1346}, - dictWord{4, 11, 449}, - dictWord{133, 11, 718}, - dictWord{134, 0, 85}, - dictWord{14, 0, 299}, - dictWord{7, 10, 103}, - dictWord{7, 10, 863}, - dictWord{11, 10, 184}, - dictWord{145, 10, 62}, - dictWord{4, 11, 355}, - dictWord{6, 11, 311}, - dictWord{9, 11, 256}, - dictWord{138, 11, 404}, - dictWord{137, 10, 659}, - dictWord{ - 138, - 11, - 758, - }, - dictWord{133, 11, 827}, - dictWord{5, 11, 64}, - dictWord{140, 11, 581}, - dictWord{134, 0, 1171}, - dictWord{4, 11, 442}, - dictWord{7, 11, 1047}, - dictWord{ - 7, - 11, - 1352, - }, - dictWord{135, 11, 1643}, - dictWord{132, 0, 980}, - dictWord{5, 11, 977}, - dictWord{6, 11, 288}, - dictWord{7, 11, 528}, - dictWord{135, 11, 1065}, - dictWord{5, 0, 279}, - dictWord{6, 0, 235}, - dictWord{7, 0, 468}, - dictWord{8, 0, 446}, - dictWord{9, 0, 637}, - dictWord{10, 0, 717}, - dictWord{11, 0, 738}, - dictWord{ - 140, - 0, - 514, - }, - dictWord{132, 0, 293}, - dictWord{11, 10, 337}, - dictWord{142, 10, 303}, - dictWord{136, 11, 285}, - dictWord{5, 0, 17}, - dictWord{6, 0, 371}, - dictWord{ - 9, - 0, - 528, - }, - dictWord{12, 0, 364}, - dictWord{132, 11, 254}, - dictWord{5, 10, 77}, - dictWord{7, 10, 1455}, - dictWord{10, 10, 843}, - dictWord{147, 10, 73}, - dictWord{ - 150, - 0, - 5, - }, - dictWord{132, 10, 458}, - dictWord{6, 11, 12}, - dictWord{7, 11, 1219}, - dictWord{145, 11, 73}, - dictWord{135, 10, 1420}, - dictWord{6, 10, 109}, - dictWord{138, 10, 382}, - dictWord{135, 11, 125}, - dictWord{6, 10, 330}, - dictWord{7, 10, 1084}, - dictWord{139, 10, 142}, - dictWord{6, 11, 369}, - dictWord{ - 6, - 11, - 502, - }, - dictWord{7, 11, 1036}, - dictWord{8, 11, 348}, - dictWord{9, 11, 452}, - dictWord{10, 11, 26}, - dictWord{11, 11, 224}, - dictWord{11, 11, 387}, - dictWord{ - 11, - 11, - 772, - }, - dictWord{12, 11, 95}, - dictWord{12, 11, 629}, - dictWord{13, 11, 195}, - dictWord{13, 11, 207}, - dictWord{13, 11, 241}, - dictWord{14, 11, 260}, - dictWord{ - 14, - 11, - 270, - }, - dictWord{143, 11, 140}, - dictWord{132, 11, 269}, - dictWord{5, 11, 480}, - dictWord{7, 11, 532}, - dictWord{7, 11, 1197}, - dictWord{7, 11, 1358}, - dictWord{8, 11, 291}, - dictWord{11, 11, 349}, - dictWord{142, 11, 396}, - dictWord{150, 0, 48}, - dictWord{10, 0, 601}, - dictWord{13, 0, 353}, - dictWord{141, 0, 376}, - dictWord{5, 0, 779}, - dictWord{5, 0, 807}, - dictWord{6, 0, 1655}, - dictWord{134, 0, 1676}, - dictWord{142, 11, 223}, - dictWord{4, 0, 196}, - dictWord{5, 0, 558}, - dictWord{133, 0, 949}, - dictWord{148, 11, 15}, - dictWord{135, 11, 1764}, - dictWord{134, 0, 1322}, - dictWord{132, 0, 752}, - dictWord{139, 0, 737}, - dictWord{ - 135, - 11, - 657, - }, - dictWord{136, 11, 533}, - dictWord{135, 0, 412}, - dictWord{4, 0, 227}, - dictWord{5, 0, 159}, - dictWord{5, 0, 409}, - dictWord{7, 0, 80}, - dictWord{8, 0, 556}, - dictWord{10, 0, 479}, - dictWord{12, 0, 418}, - dictWord{14, 0, 50}, - dictWord{14, 0, 123}, - dictWord{14, 0, 192}, - dictWord{14, 0, 249}, - dictWord{14, 0, 295}, - dictWord{143, 0, 27}, - dictWord{7, 0, 1470}, - dictWord{8, 0, 66}, - dictWord{8, 0, 137}, - dictWord{8, 0, 761}, - dictWord{9, 0, 638}, - dictWord{11, 0, 80}, - dictWord{11, 0, 212}, - dictWord{11, 0, 368}, - dictWord{11, 0, 418}, - dictWord{12, 0, 8}, - dictWord{13, 0, 15}, - dictWord{16, 0, 61}, - dictWord{17, 0, 59}, - dictWord{19, 0, 28}, - dictWord{ - 148, - 0, - 84, - }, - dictWord{135, 10, 1985}, - dictWord{4, 11, 211}, - dictWord{4, 11, 332}, - dictWord{5, 11, 335}, - dictWord{6, 11, 238}, - dictWord{7, 11, 269}, - dictWord{ - 7, - 11, - 811, - }, - dictWord{7, 11, 1797}, - dictWord{8, 10, 122}, - dictWord{8, 11, 836}, - dictWord{9, 11, 507}, - dictWord{141, 11, 242}, - dictWord{6, 0, 683}, - dictWord{ - 134, - 0, - 1252, - }, - dictWord{4, 0, 873}, - dictWord{132, 10, 234}, - dictWord{134, 0, 835}, - dictWord{6, 0, 38}, - dictWord{7, 0, 1220}, - dictWord{8, 0, 185}, - dictWord{8, 0, 256}, - dictWord{9, 0, 22}, - dictWord{9, 0, 331}, - dictWord{10, 0, 738}, - dictWord{11, 0, 205}, - dictWord{11, 0, 540}, - dictWord{11, 0, 746}, - dictWord{13, 0, 465}, - dictWord{ - 14, - 0, - 88, - }, - dictWord{142, 0, 194}, - dictWord{138, 0, 986}, - dictWord{5, 11, 1009}, - dictWord{12, 11, 582}, - dictWord{146, 11, 131}, - dictWord{4, 0, 159}, - dictWord{ - 6, - 0, - 115, - }, - dictWord{7, 0, 252}, - dictWord{7, 0, 257}, - dictWord{7, 0, 1928}, - dictWord{8, 0, 69}, - dictWord{9, 0, 384}, - dictWord{10, 0, 91}, - dictWord{10, 0, 615}, - dictWord{ - 12, - 0, - 375, - }, - dictWord{14, 0, 235}, - dictWord{18, 0, 117}, - dictWord{147, 0, 123}, - dictWord{133, 0, 911}, - dictWord{136, 0, 278}, - dictWord{5, 10, 430}, - dictWord{ - 5, - 10, - 932, - }, - dictWord{6, 10, 131}, - dictWord{7, 10, 417}, - dictWord{9, 10, 522}, - dictWord{11, 10, 314}, - dictWord{141, 10, 390}, - dictWord{14, 10, 149}, - dictWord{14, 10, 399}, - dictWord{143, 10, 57}, - dictWord{4, 0, 151}, - dictWord{7, 0, 1567}, - dictWord{136, 0, 749}, - dictWord{5, 11, 228}, - dictWord{6, 11, 203}, - dictWord{ - 7, - 11, - 156, - }, - dictWord{8, 11, 347}, - dictWord{137, 11, 265}, - dictWord{132, 10, 507}, - dictWord{10, 0, 989}, - dictWord{140, 0, 956}, - dictWord{133, 0, 990}, - dictWord{5, 0, 194}, - dictWord{6, 0, 927}, - dictWord{7, 0, 1662}, - dictWord{9, 0, 90}, - dictWord{140, 0, 564}, - dictWord{4, 10, 343}, - dictWord{133, 10, 511}, - dictWord{133, 0, 425}, - dictWord{7, 10, 455}, - dictWord{138, 10, 591}, - dictWord{4, 0, 774}, - dictWord{7, 11, 476}, - dictWord{7, 11, 1592}, - dictWord{138, 11, 87}, - dictWord{5, 0, 971}, - dictWord{135, 10, 1381}, - dictWord{5, 11, 318}, - dictWord{147, 11, 121}, - dictWord{5, 11, 291}, - dictWord{7, 11, 765}, - dictWord{9, 11, 389}, - dictWord{140, 11, 548}, - dictWord{134, 10, 575}, - dictWord{4, 0, 827}, - dictWord{12, 0, 646}, - dictWord{12, 0, 705}, - dictWord{12, 0, 712}, - dictWord{140, 0, 714}, - dictWord{139, 0, 752}, - dictWord{137, 0, 662}, - dictWord{5, 0, 72}, - dictWord{6, 0, 264}, - dictWord{7, 0, 21}, - dictWord{7, 0, 46}, - dictWord{7, 0, 2013}, - dictWord{ - 8, - 0, - 215, - }, - dictWord{8, 0, 513}, - dictWord{10, 0, 266}, - dictWord{139, 0, 22}, - dictWord{139, 11, 522}, - dictWord{6, 0, 239}, - dictWord{7, 0, 118}, - dictWord{10, 0, 95}, - dictWord{11, 0, 603}, - dictWord{13, 0, 443}, - dictWord{14, 0, 160}, - dictWord{143, 0, 4}, - dictWord{6, 0, 431}, - dictWord{134, 0, 669}, - dictWord{7, 10, 1127}, - dictWord{ - 7, - 10, - 1572, - }, - dictWord{10, 10, 297}, - dictWord{10, 10, 422}, - dictWord{11, 10, 764}, - dictWord{11, 10, 810}, - dictWord{12, 10, 264}, - dictWord{13, 10, 102}, - dictWord{13, 10, 300}, - dictWord{13, 10, 484}, - dictWord{14, 10, 147}, - dictWord{14, 10, 229}, - dictWord{17, 10, 71}, - dictWord{18, 10, 118}, - dictWord{ - 147, - 10, - 120, - }, - dictWord{5, 0, 874}, - dictWord{6, 0, 1677}, - dictWord{15, 0, 0}, - dictWord{10, 11, 525}, - dictWord{139, 11, 82}, - dictWord{6, 0, 65}, - dictWord{7, 0, 939}, - dictWord{ - 7, - 0, - 1172, - }, - dictWord{7, 0, 1671}, - dictWord{9, 0, 540}, - dictWord{10, 0, 696}, - dictWord{11, 0, 265}, - dictWord{11, 0, 732}, - dictWord{11, 0, 928}, - dictWord{ - 11, - 0, - 937, - }, - dictWord{141, 0, 438}, - dictWord{134, 0, 1350}, - dictWord{136, 11, 547}, - dictWord{132, 11, 422}, - dictWord{5, 11, 355}, - dictWord{145, 11, 0}, - dictWord{137, 11, 905}, - dictWord{5, 0, 682}, - dictWord{135, 0, 1887}, - dictWord{132, 0, 809}, - dictWord{4, 0, 696}, - dictWord{133, 11, 865}, - dictWord{6, 0, 1074}, - dictWord{6, 0, 1472}, - dictWord{14, 10, 35}, - dictWord{142, 10, 191}, - dictWord{5, 11, 914}, - dictWord{134, 11, 1625}, - dictWord{133, 11, 234}, - dictWord{ - 135, - 11, - 1383, - }, - dictWord{137, 11, 780}, - dictWord{132, 10, 125}, - dictWord{4, 0, 726}, - dictWord{133, 0, 630}, - dictWord{8, 0, 802}, - dictWord{136, 0, 838}, - dictWord{132, 10, 721}, - dictWord{6, 0, 1337}, - dictWord{7, 0, 776}, - dictWord{19, 0, 56}, - dictWord{136, 10, 145}, - dictWord{132, 0, 970}, - dictWord{7, 10, 792}, - dictWord{8, 10, 147}, - dictWord{10, 10, 821}, - dictWord{139, 10, 1021}, - dictWord{139, 10, 970}, - dictWord{8, 0, 940}, - dictWord{137, 0, 797}, - dictWord{ - 135, - 11, - 1312, - }, - dictWord{9, 0, 248}, - dictWord{10, 0, 400}, - dictWord{7, 11, 816}, - dictWord{7, 11, 1241}, - dictWord{7, 10, 1999}, - dictWord{9, 11, 283}, - dictWord{ - 9, - 11, - 520, - }, - dictWord{10, 11, 213}, - dictWord{10, 11, 307}, - dictWord{10, 11, 463}, - dictWord{10, 11, 671}, - dictWord{10, 11, 746}, - dictWord{11, 11, 401}, - dictWord{ - 11, - 11, - 794, - }, - dictWord{12, 11, 517}, - dictWord{18, 11, 107}, - dictWord{147, 11, 115}, - dictWord{6, 0, 1951}, - dictWord{134, 0, 2040}, - dictWord{ - 135, - 11, - 339, - }, - dictWord{13, 0, 41}, - dictWord{15, 0, 93}, - dictWord{5, 10, 168}, - dictWord{5, 10, 930}, - dictWord{8, 10, 74}, - dictWord{9, 10, 623}, - dictWord{12, 10, 500}, - dictWord{140, 10, 579}, - dictWord{6, 0, 118}, - dictWord{7, 0, 215}, - dictWord{7, 0, 1521}, - dictWord{140, 0, 11}, - dictWord{6, 10, 220}, - dictWord{7, 10, 1101}, - dictWord{141, 10, 105}, - dictWord{6, 11, 421}, - dictWord{7, 11, 61}, - dictWord{7, 11, 1540}, - dictWord{10, 11, 11}, - dictWord{138, 11, 501}, - dictWord{7, 0, 615}, - dictWord{138, 0, 251}, - dictWord{140, 11, 631}, - dictWord{135, 0, 1044}, - dictWord{6, 10, 19}, - dictWord{7, 10, 1413}, - dictWord{139, 10, 428}, - dictWord{ - 133, - 0, - 225, - }, - dictWord{7, 10, 96}, - dictWord{8, 10, 401}, - dictWord{8, 10, 703}, - dictWord{137, 10, 896}, - dictWord{145, 10, 116}, - dictWord{6, 11, 102}, - dictWord{ - 7, - 11, - 72, - }, - dictWord{15, 11, 142}, - dictWord{147, 11, 67}, - dictWord{7, 10, 1961}, - dictWord{7, 10, 1965}, - dictWord{8, 10, 702}, - dictWord{136, 10, 750}, - dictWord{ - 7, - 10, - 2030, - }, - dictWord{8, 10, 150}, - dictWord{8, 10, 737}, - dictWord{12, 10, 366}, - dictWord{151, 11, 30}, - dictWord{4, 0, 370}, - dictWord{5, 0, 756}, - dictWord{ - 7, - 0, - 1326, - }, - dictWord{135, 11, 823}, - dictWord{8, 10, 800}, - dictWord{9, 10, 148}, - dictWord{9, 10, 872}, - dictWord{9, 10, 890}, - dictWord{11, 10, 309}, - dictWord{ - 11, - 10, - 1001, - }, - dictWord{13, 10, 267}, - dictWord{141, 10, 323}, - dictWord{6, 0, 1662}, - dictWord{7, 0, 48}, - dictWord{8, 0, 771}, - dictWord{10, 0, 116}, - dictWord{ - 13, - 0, - 104, - }, - dictWord{14, 0, 105}, - dictWord{14, 0, 184}, - dictWord{15, 0, 168}, - dictWord{19, 0, 92}, - dictWord{148, 0, 68}, - dictWord{10, 0, 209}, - dictWord{ - 135, - 11, - 1870, - }, - dictWord{7, 11, 68}, - dictWord{8, 11, 48}, - dictWord{8, 11, 88}, - dictWord{8, 11, 582}, - dictWord{8, 11, 681}, - dictWord{9, 11, 373}, - dictWord{9, 11, 864}, - dictWord{11, 11, 157}, - dictWord{11, 11, 336}, - dictWord{11, 11, 843}, - dictWord{148, 11, 27}, - dictWord{134, 0, 930}, - dictWord{4, 11, 88}, - dictWord{5, 11, 137}, - dictWord{5, 11, 174}, - dictWord{5, 11, 777}, - dictWord{6, 11, 1664}, - dictWord{6, 11, 1725}, - dictWord{7, 11, 77}, - dictWord{7, 11, 426}, - dictWord{7, 11, 1317}, - dictWord{7, 11, 1355}, - dictWord{8, 11, 126}, - dictWord{8, 11, 563}, - dictWord{9, 11, 523}, - dictWord{9, 11, 750}, - dictWord{10, 11, 310}, - dictWord{10, 11, 836}, - dictWord{11, 11, 42}, - dictWord{11, 11, 318}, - dictWord{11, 11, 731}, - dictWord{12, 11, 68}, - dictWord{12, 11, 92}, - dictWord{12, 11, 507}, - dictWord{12, 11, 692}, - dictWord{13, 11, 81}, - dictWord{13, 11, 238}, - dictWord{13, 11, 374}, - dictWord{18, 11, 138}, - dictWord{19, 11, 78}, - dictWord{19, 11, 111}, - dictWord{20, 11, 55}, - dictWord{20, 11, 77}, - dictWord{148, 11, 92}, - dictWord{4, 11, 938}, - dictWord{135, 11, 1831}, - dictWord{5, 10, 547}, - dictWord{7, 10, 424}, - dictWord{ - 8, - 11, - 617, - }, - dictWord{138, 11, 351}, - dictWord{6, 0, 1286}, - dictWord{6, 11, 1668}, - dictWord{7, 11, 1499}, - dictWord{8, 11, 117}, - dictWord{9, 11, 314}, - dictWord{ - 138, - 11, - 174, - }, - dictWord{6, 0, 759}, - dictWord{6, 0, 894}, - dictWord{7, 11, 707}, - dictWord{139, 11, 563}, - dictWord{4, 0, 120}, - dictWord{135, 0, 1894}, - dictWord{ - 9, - 0, - 385, - }, - dictWord{149, 0, 17}, - dictWord{138, 0, 429}, - dictWord{133, 11, 403}, - dictWord{5, 0, 820}, - dictWord{135, 0, 931}, - dictWord{10, 0, 199}, - dictWord{ - 133, - 10, - 133, - }, - dictWord{6, 0, 151}, - dictWord{6, 0, 1675}, - dictWord{7, 0, 383}, - dictWord{151, 0, 10}, - dictWord{6, 0, 761}, - dictWord{136, 10, 187}, - dictWord{ - 8, - 0, - 365, - }, - dictWord{10, 10, 0}, - dictWord{10, 10, 818}, - dictWord{139, 10, 988}, - dictWord{4, 11, 44}, - dictWord{5, 11, 311}, - dictWord{6, 11, 156}, - dictWord{ - 7, - 11, - 639, - }, - dictWord{7, 11, 762}, - dictWord{7, 11, 1827}, - dictWord{9, 11, 8}, - dictWord{9, 11, 462}, - dictWord{148, 11, 83}, - dictWord{4, 11, 346}, - dictWord{7, 11, 115}, - dictWord{9, 11, 180}, - dictWord{9, 11, 456}, - dictWord{138, 11, 363}, - dictWord{136, 10, 685}, - dictWord{7, 0, 1086}, - dictWord{145, 0, 46}, - dictWord{ - 6, - 0, - 1624, - }, - dictWord{11, 0, 11}, - dictWord{12, 0, 422}, - dictWord{13, 0, 444}, - dictWord{142, 0, 360}, - dictWord{6, 0, 1020}, - dictWord{6, 0, 1260}, - dictWord{ - 134, - 0, - 1589, - }, - dictWord{4, 0, 43}, - dictWord{5, 0, 344}, - dictWord{5, 0, 357}, - dictWord{14, 0, 472}, - dictWord{150, 0, 58}, - dictWord{6, 0, 1864}, - dictWord{6, 0, 1866}, - dictWord{6, 0, 1868}, - dictWord{6, 0, 1869}, - dictWord{6, 0, 1874}, - dictWord{6, 0, 1877}, - dictWord{6, 0, 1903}, - dictWord{6, 0, 1911}, - dictWord{9, 0, 920}, - dictWord{ - 9, - 0, - 921, - }, - dictWord{9, 0, 924}, - dictWord{9, 0, 946}, - dictWord{9, 0, 959}, - dictWord{9, 0, 963}, - dictWord{9, 0, 970}, - dictWord{9, 0, 997}, - dictWord{9, 0, 1008}, - dictWord{ - 9, - 0, - 1017, - }, - dictWord{12, 0, 795}, - dictWord{12, 0, 797}, - dictWord{12, 0, 798}, - dictWord{12, 0, 800}, - dictWord{12, 0, 803}, - dictWord{12, 0, 811}, - dictWord{ - 12, - 0, - 820, - }, - dictWord{12, 0, 821}, - dictWord{12, 0, 839}, - dictWord{12, 0, 841}, - dictWord{12, 0, 848}, - dictWord{12, 0, 911}, - dictWord{12, 0, 921}, - dictWord{12, 0, 922}, - dictWord{12, 0, 925}, - dictWord{12, 0, 937}, - dictWord{12, 0, 944}, - dictWord{12, 0, 945}, - dictWord{12, 0, 953}, - dictWord{15, 0, 184}, - dictWord{15, 0, 191}, - dictWord{15, 0, 199}, - dictWord{15, 0, 237}, - dictWord{15, 0, 240}, - dictWord{15, 0, 243}, - dictWord{15, 0, 246}, - dictWord{18, 0, 203}, - dictWord{21, 0, 40}, - dictWord{ - 21, - 0, - 52, - }, - dictWord{21, 0, 57}, - dictWord{24, 0, 23}, - dictWord{24, 0, 28}, - dictWord{152, 0, 30}, - dictWord{134, 0, 725}, - dictWord{145, 11, 58}, - dictWord{133, 0, 888}, - dictWord{137, 10, 874}, - dictWord{4, 0, 711}, - dictWord{8, 10, 774}, - dictWord{10, 10, 670}, - dictWord{140, 10, 51}, - dictWord{144, 11, 40}, - dictWord{ - 6, - 11, - 185, - }, - dictWord{7, 11, 1899}, - dictWord{139, 11, 673}, - dictWord{137, 10, 701}, - dictWord{137, 0, 440}, - dictWord{4, 11, 327}, - dictWord{5, 11, 478}, - dictWord{ - 7, - 11, - 1332, - }, - dictWord{8, 11, 753}, - dictWord{140, 11, 227}, - dictWord{4, 10, 127}, - dictWord{5, 10, 350}, - dictWord{6, 10, 356}, - dictWord{8, 10, 426}, - dictWord{ - 9, - 10, - 572, - }, - dictWord{10, 10, 247}, - dictWord{139, 10, 312}, - dictWord{5, 11, 1020}, - dictWord{133, 11, 1022}, - dictWord{4, 11, 103}, - dictWord{ - 133, - 11, - 401, - }, - dictWord{6, 0, 1913}, - dictWord{6, 0, 1926}, - dictWord{6, 0, 1959}, - dictWord{9, 0, 914}, - dictWord{9, 0, 939}, - dictWord{9, 0, 952}, - dictWord{9, 0, 979}, - dictWord{ - 9, - 0, - 990, - }, - dictWord{9, 0, 998}, - dictWord{9, 0, 1003}, - dictWord{9, 0, 1023}, - dictWord{12, 0, 827}, - dictWord{12, 0, 834}, - dictWord{12, 0, 845}, - dictWord{ - 12, - 0, - 912, - }, - dictWord{12, 0, 935}, - dictWord{12, 0, 951}, - dictWord{15, 0, 172}, - dictWord{15, 0, 174}, - dictWord{18, 0, 198}, - dictWord{149, 0, 63}, - dictWord{5, 0, 958}, - dictWord{5, 0, 987}, - dictWord{4, 11, 499}, - dictWord{135, 11, 1421}, - dictWord{7, 0, 885}, - dictWord{6, 10, 59}, - dictWord{6, 10, 1762}, - dictWord{9, 10, 603}, - dictWord{141, 10, 397}, - dictWord{10, 11, 62}, - dictWord{141, 11, 164}, - dictWord{4, 0, 847}, - dictWord{135, 0, 326}, - dictWord{11, 0, 276}, - dictWord{142, 0, 293}, - dictWord{4, 0, 65}, - dictWord{5, 0, 479}, - dictWord{5, 0, 1004}, - dictWord{7, 0, 1913}, - dictWord{8, 0, 317}, - dictWord{9, 0, 302}, - dictWord{10, 0, 612}, - dictWord{ - 13, - 0, - 22, - }, - dictWord{132, 11, 96}, - dictWord{4, 0, 261}, - dictWord{135, 0, 510}, - dictWord{135, 0, 1514}, - dictWord{6, 10, 111}, - dictWord{7, 10, 4}, - dictWord{8, 10, 163}, - dictWord{8, 10, 776}, - dictWord{138, 10, 566}, - dictWord{4, 0, 291}, - dictWord{9, 0, 515}, - dictWord{12, 0, 152}, - dictWord{12, 0, 443}, - dictWord{13, 0, 392}, - dictWord{142, 0, 357}, - dictWord{7, 11, 399}, - dictWord{135, 11, 1492}, - dictWord{4, 0, 589}, - dictWord{139, 0, 282}, - dictWord{6, 11, 563}, - dictWord{ - 135, - 10, - 1994, - }, - dictWord{5, 10, 297}, - dictWord{135, 10, 1038}, - dictWord{4, 0, 130}, - dictWord{7, 0, 843}, - dictWord{135, 0, 1562}, - dictWord{5, 0, 42}, - dictWord{ - 5, - 0, - 879, - }, - dictWord{7, 0, 245}, - dictWord{7, 0, 324}, - dictWord{7, 0, 1532}, - dictWord{11, 0, 463}, - dictWord{11, 0, 472}, - dictWord{13, 0, 363}, - dictWord{144, 0, 52}, - dictWord{4, 0, 134}, - dictWord{133, 0, 372}, - dictWord{133, 0, 680}, - dictWord{136, 10, 363}, - dictWord{6, 0, 1997}, - dictWord{8, 0, 935}, - dictWord{136, 0, 977}, - dictWord{4, 0, 810}, - dictWord{135, 0, 1634}, - dictWord{135, 10, 1675}, - dictWord{7, 0, 1390}, - dictWord{4, 11, 910}, - dictWord{133, 11, 832}, - dictWord{ - 7, - 10, - 808, - }, - dictWord{8, 11, 266}, - dictWord{139, 11, 578}, - dictWord{132, 0, 644}, - dictWord{4, 0, 982}, - dictWord{138, 0, 867}, - dictWord{132, 10, 280}, - dictWord{ - 135, - 0, - 540, - }, - dictWord{140, 10, 54}, - dictWord{135, 0, 123}, - dictWord{134, 0, 1978}, - dictWord{4, 10, 421}, - dictWord{133, 10, 548}, - dictWord{6, 0, 623}, - dictWord{136, 0, 789}, - dictWord{4, 0, 908}, - dictWord{5, 0, 359}, - dictWord{5, 0, 508}, - dictWord{6, 0, 1723}, - dictWord{7, 0, 343}, - dictWord{7, 0, 1996}, - dictWord{ - 135, - 0, - 2026, - }, - dictWord{134, 0, 1220}, - dictWord{4, 0, 341}, - dictWord{135, 0, 480}, - dictWord{6, 10, 254}, - dictWord{9, 10, 109}, - dictWord{138, 10, 103}, - dictWord{ - 134, - 0, - 888, - }, - dictWord{8, 11, 528}, - dictWord{137, 11, 348}, - dictWord{7, 0, 1995}, - dictWord{8, 0, 299}, - dictWord{11, 0, 890}, - dictWord{12, 0, 674}, - dictWord{ - 4, - 11, - 20, - }, - dictWord{133, 11, 616}, - dictWord{135, 11, 1094}, - dictWord{134, 10, 1630}, - dictWord{4, 0, 238}, - dictWord{5, 0, 503}, - dictWord{6, 0, 179}, - dictWord{ - 7, - 0, - 2003, - }, - dictWord{8, 0, 381}, - dictWord{8, 0, 473}, - dictWord{9, 0, 149}, - dictWord{10, 0, 788}, - dictWord{15, 0, 45}, - dictWord{15, 0, 86}, - dictWord{20, 0, 110}, - dictWord{150, 0, 57}, - dictWord{133, 10, 671}, - dictWord{4, 11, 26}, - dictWord{5, 11, 429}, - dictWord{6, 11, 245}, - dictWord{7, 11, 704}, - dictWord{7, 11, 1379}, - dictWord{135, 11, 1474}, - dictWord{4, 0, 121}, - dictWord{5, 0, 156}, - dictWord{5, 0, 349}, - dictWord{9, 0, 431}, - dictWord{10, 0, 605}, - dictWord{142, 0, 342}, - dictWord{ - 7, - 11, - 943, - }, - dictWord{139, 11, 614}, - dictWord{132, 10, 889}, - dictWord{132, 11, 621}, - dictWord{7, 10, 1382}, - dictWord{7, 11, 1382}, - dictWord{ - 135, - 10, - 1910, - }, - dictWord{132, 10, 627}, - dictWord{133, 10, 775}, - dictWord{133, 11, 542}, - dictWord{133, 11, 868}, - dictWord{136, 11, 433}, - dictWord{6, 0, 1373}, - dictWord{7, 0, 1011}, - dictWord{11, 10, 362}, - dictWord{11, 10, 948}, - dictWord{140, 10, 388}, - dictWord{6, 0, 80}, - dictWord{7, 0, 173}, - dictWord{9, 0, 547}, - dictWord{10, 0, 730}, - dictWord{14, 0, 18}, - dictWord{22, 0, 39}, - dictWord{135, 11, 1495}, - dictWord{6, 0, 1694}, - dictWord{135, 0, 1974}, - dictWord{140, 0, 196}, - dictWord{4, 0, 923}, - dictWord{6, 0, 507}, - dictWord{6, 0, 1711}, - dictWord{7, 10, 451}, - dictWord{8, 10, 389}, - dictWord{12, 10, 490}, - dictWord{13, 10, 16}, - dictWord{ - 13, - 10, - 215, - }, - dictWord{13, 10, 351}, - dictWord{18, 10, 132}, - dictWord{147, 10, 125}, - dictWord{6, 0, 646}, - dictWord{134, 0, 1047}, - dictWord{135, 10, 841}, - dictWord{136, 10, 566}, - dictWord{6, 0, 1611}, - dictWord{135, 0, 1214}, - dictWord{139, 0, 926}, - dictWord{132, 11, 525}, - dictWord{132, 0, 595}, - dictWord{ - 5, - 0, - 240, - }, - dictWord{6, 0, 459}, - dictWord{7, 0, 12}, - dictWord{7, 0, 114}, - dictWord{7, 0, 949}, - dictWord{7, 0, 1753}, - dictWord{7, 0, 1805}, - dictWord{8, 0, 658}, - dictWord{ - 9, - 0, - 1, - }, - dictWord{11, 0, 959}, - dictWord{141, 0, 446}, - dictWord{5, 10, 912}, - dictWord{134, 10, 1695}, - dictWord{132, 0, 446}, - dictWord{7, 11, 62}, - dictWord{ - 12, - 11, - 45, - }, - dictWord{147, 11, 112}, - dictWord{5, 10, 236}, - dictWord{6, 10, 572}, - dictWord{8, 10, 492}, - dictWord{11, 10, 618}, - dictWord{144, 10, 56}, - dictWord{ - 5, - 10, - 190, - }, - dictWord{136, 10, 318}, - dictWord{135, 10, 1376}, - dictWord{4, 11, 223}, - dictWord{6, 11, 359}, - dictWord{11, 11, 3}, - dictWord{13, 11, 108}, - dictWord{ - 14, - 11, - 89, - }, - dictWord{144, 11, 22}, - dictWord{132, 11, 647}, - dictWord{134, 0, 490}, - dictWord{134, 0, 491}, - dictWord{134, 0, 1584}, - dictWord{ - 135, - 11, - 685, - }, - dictWord{138, 11, 220}, - dictWord{7, 0, 250}, - dictWord{136, 0, 507}, - dictWord{132, 0, 158}, - dictWord{4, 0, 140}, - dictWord{7, 0, 362}, - dictWord{8, 0, 209}, - dictWord{9, 0, 10}, - dictWord{9, 0, 160}, - dictWord{9, 0, 503}, - dictWord{9, 0, 614}, - dictWord{10, 0, 689}, - dictWord{11, 0, 327}, - dictWord{11, 0, 553}, - dictWord{ - 11, - 0, - 725, - }, - dictWord{11, 0, 767}, - dictWord{12, 0, 252}, - dictWord{12, 0, 583}, - dictWord{13, 0, 192}, - dictWord{14, 0, 269}, - dictWord{14, 0, 356}, - dictWord{148, 0, 50}, - dictWord{19, 0, 1}, - dictWord{19, 0, 26}, - dictWord{150, 0, 9}, - dictWord{132, 11, 109}, - dictWord{6, 0, 228}, - dictWord{7, 0, 1341}, - dictWord{9, 0, 408}, - dictWord{ - 138, - 0, - 343, - }, - dictWord{4, 0, 373}, - dictWord{5, 0, 283}, - dictWord{6, 0, 480}, - dictWord{7, 0, 609}, - dictWord{10, 0, 860}, - dictWord{138, 0, 878}, - dictWord{6, 0, 779}, - dictWord{134, 0, 1209}, - dictWord{4, 0, 557}, - dictWord{7, 11, 263}, - dictWord{7, 11, 628}, - dictWord{136, 11, 349}, - dictWord{132, 0, 548}, - dictWord{7, 0, 197}, - dictWord{8, 0, 142}, - dictWord{8, 0, 325}, - dictWord{9, 0, 150}, - dictWord{9, 0, 596}, - dictWord{10, 0, 350}, - dictWord{10, 0, 353}, - dictWord{11, 0, 74}, - dictWord{ - 11, - 0, - 315, - }, - dictWord{12, 0, 662}, - dictWord{12, 0, 681}, - dictWord{14, 0, 423}, - dictWord{143, 0, 141}, - dictWord{4, 11, 40}, - dictWord{10, 11, 67}, - dictWord{ - 11, - 11, - 117, - }, - dictWord{11, 11, 768}, - dictWord{139, 11, 935}, - dictWord{7, 11, 992}, - dictWord{8, 11, 301}, - dictWord{9, 11, 722}, - dictWord{12, 11, 63}, - dictWord{ - 13, - 11, - 29, - }, - dictWord{14, 11, 161}, - dictWord{143, 11, 18}, - dictWord{6, 0, 1490}, - dictWord{138, 11, 532}, - dictWord{5, 0, 580}, - dictWord{7, 0, 378}, - dictWord{ - 7, - 0, - 674, - }, - dictWord{7, 0, 1424}, - dictWord{15, 0, 83}, - dictWord{16, 0, 11}, - dictWord{15, 11, 83}, - dictWord{144, 11, 11}, - dictWord{6, 0, 1057}, - dictWord{6, 0, 1335}, - dictWord{10, 0, 316}, - dictWord{7, 10, 85}, - dictWord{7, 10, 247}, - dictWord{8, 10, 585}, - dictWord{138, 10, 163}, - dictWord{4, 0, 169}, - dictWord{5, 0, 83}, - dictWord{ - 6, - 0, - 399, - }, - dictWord{6, 0, 579}, - dictWord{6, 0, 1513}, - dictWord{7, 0, 692}, - dictWord{7, 0, 846}, - dictWord{7, 0, 1015}, - dictWord{7, 0, 1799}, - dictWord{8, 0, 403}, - dictWord{9, 0, 394}, - dictWord{10, 0, 133}, - dictWord{12, 0, 4}, - dictWord{12, 0, 297}, - dictWord{12, 0, 452}, - dictWord{16, 0, 81}, - dictWord{18, 0, 25}, - dictWord{21, 0, 14}, - dictWord{22, 0, 12}, - dictWord{151, 0, 18}, - dictWord{134, 0, 1106}, - dictWord{7, 0, 1546}, - dictWord{11, 0, 299}, - dictWord{142, 0, 407}, - dictWord{134, 0, 1192}, - dictWord{132, 0, 177}, - dictWord{5, 0, 411}, - dictWord{135, 0, 653}, - dictWord{7, 0, 439}, - dictWord{10, 0, 727}, - dictWord{11, 0, 260}, - dictWord{139, 0, 684}, - dictWord{138, 10, 145}, - dictWord{147, 10, 83}, - dictWord{5, 0, 208}, - dictWord{7, 0, 753}, - dictWord{135, 0, 1528}, - dictWord{137, 11, 617}, - dictWord{ - 135, - 10, - 1922, - }, - dictWord{135, 11, 825}, - dictWord{11, 0, 422}, - dictWord{13, 0, 389}, - dictWord{4, 10, 124}, - dictWord{10, 10, 457}, - dictWord{11, 10, 121}, - dictWord{ - 11, - 10, - 169, - }, - dictWord{11, 10, 870}, - dictWord{12, 10, 214}, - dictWord{14, 10, 187}, - dictWord{143, 10, 77}, - dictWord{11, 0, 615}, - dictWord{15, 0, 58}, - dictWord{ - 11, - 11, - 615, - }, - dictWord{143, 11, 58}, - dictWord{9, 0, 618}, - dictWord{138, 0, 482}, - dictWord{6, 0, 1952}, - dictWord{6, 0, 1970}, - dictWord{142, 0, 505}, - dictWord{ - 7, - 10, - 1193, - }, - dictWord{135, 11, 1838}, - dictWord{133, 0, 242}, - dictWord{135, 10, 1333}, - dictWord{6, 10, 107}, - dictWord{7, 10, 638}, - dictWord{ - 7, - 10, - 1632, - }, - dictWord{137, 10, 396}, - dictWord{133, 0, 953}, - dictWord{5, 10, 370}, - dictWord{134, 10, 1756}, - dictWord{5, 11, 28}, - dictWord{6, 11, 204}, - dictWord{ - 10, - 11, - 320, - }, - dictWord{10, 11, 583}, - dictWord{13, 11, 502}, - dictWord{14, 11, 72}, - dictWord{14, 11, 274}, - dictWord{14, 11, 312}, - dictWord{14, 11, 344}, - dictWord{15, 11, 159}, - dictWord{16, 11, 62}, - dictWord{16, 11, 69}, - dictWord{17, 11, 30}, - dictWord{18, 11, 42}, - dictWord{18, 11, 53}, - dictWord{18, 11, 84}, - dictWord{18, 11, 140}, - dictWord{19, 11, 68}, - dictWord{19, 11, 85}, - dictWord{20, 11, 5}, - dictWord{20, 11, 45}, - dictWord{20, 11, 101}, - dictWord{22, 11, 7}, - dictWord{ - 150, - 11, - 20, - }, - dictWord{4, 11, 558}, - dictWord{6, 11, 390}, - dictWord{7, 11, 162}, - dictWord{7, 11, 689}, - dictWord{9, 11, 360}, - dictWord{138, 11, 653}, - dictWord{ - 11, - 0, - 802, - }, - dictWord{141, 0, 67}, - dictWord{133, 10, 204}, - dictWord{133, 0, 290}, - dictWord{5, 10, 970}, - dictWord{134, 10, 1706}, - dictWord{132, 0, 380}, - dictWord{5, 0, 52}, - dictWord{7, 0, 277}, - dictWord{9, 0, 368}, - dictWord{139, 0, 791}, - dictWord{5, 11, 856}, - dictWord{6, 11, 1672}, - dictWord{6, 11, 1757}, - dictWord{ - 6, - 11, - 1781, - }, - dictWord{7, 11, 1150}, - dictWord{7, 11, 1425}, - dictWord{7, 11, 1453}, - dictWord{140, 11, 513}, - dictWord{5, 11, 92}, - dictWord{7, 10, 3}, - dictWord{ - 10, - 11, - 736, - }, - dictWord{140, 11, 102}, - dictWord{4, 0, 112}, - dictWord{5, 0, 653}, - dictWord{5, 10, 483}, - dictWord{5, 10, 685}, - dictWord{6, 10, 489}, - dictWord{ - 7, - 10, - 1204, - }, - dictWord{136, 10, 394}, - dictWord{132, 10, 921}, - dictWord{6, 0, 1028}, - dictWord{133, 10, 1007}, - dictWord{5, 11, 590}, - dictWord{9, 11, 213}, - dictWord{145, 11, 91}, - dictWord{135, 10, 1696}, - dictWord{10, 0, 138}, - dictWord{139, 0, 476}, - dictWord{5, 0, 725}, - dictWord{5, 0, 727}, - dictWord{135, 0, 1811}, - dictWord{4, 0, 979}, - dictWord{6, 0, 1821}, - dictWord{6, 0, 1838}, - dictWord{8, 0, 876}, - dictWord{8, 0, 883}, - dictWord{8, 0, 889}, - dictWord{8, 0, 893}, - dictWord{ - 8, - 0, - 895, - }, - dictWord{10, 0, 934}, - dictWord{12, 0, 720}, - dictWord{14, 0, 459}, - dictWord{148, 0, 123}, - dictWord{135, 11, 551}, - dictWord{4, 0, 38}, - dictWord{6, 0, 435}, - dictWord{7, 0, 307}, - dictWord{7, 0, 999}, - dictWord{7, 0, 1481}, - dictWord{7, 0, 1732}, - dictWord{7, 0, 1738}, - dictWord{8, 0, 371}, - dictWord{9, 0, 414}, - dictWord{ - 11, - 0, - 316, - }, - dictWord{12, 0, 52}, - dictWord{13, 0, 420}, - dictWord{147, 0, 100}, - dictWord{135, 0, 1296}, - dictWord{132, 10, 712}, - dictWord{134, 10, 1629}, - dictWord{133, 0, 723}, - dictWord{134, 0, 651}, - dictWord{136, 11, 191}, - dictWord{9, 11, 791}, - dictWord{10, 11, 93}, - dictWord{11, 11, 301}, - dictWord{16, 11, 13}, - dictWord{17, 11, 23}, - dictWord{18, 11, 135}, - dictWord{19, 11, 12}, - dictWord{20, 11, 1}, - dictWord{20, 11, 12}, - dictWord{148, 11, 14}, - dictWord{136, 11, 503}, - dictWord{6, 11, 466}, - dictWord{135, 11, 671}, - dictWord{6, 0, 1200}, - dictWord{134, 0, 1330}, - dictWord{135, 0, 1255}, - dictWord{134, 0, 986}, - dictWord{ - 5, - 0, - 109, - }, - dictWord{6, 0, 1784}, - dictWord{7, 0, 1895}, - dictWord{12, 0, 296}, - dictWord{140, 0, 302}, - dictWord{135, 11, 983}, - dictWord{133, 10, 485}, - dictWord{ - 134, - 0, - 660, - }, - dictWord{134, 0, 800}, - dictWord{5, 0, 216}, - dictWord{5, 0, 294}, - dictWord{6, 0, 591}, - dictWord{7, 0, 1879}, - dictWord{9, 0, 141}, - dictWord{9, 0, 270}, - dictWord{9, 0, 679}, - dictWord{10, 0, 159}, - dictWord{11, 0, 197}, - dictWord{11, 0, 438}, - dictWord{12, 0, 538}, - dictWord{12, 0, 559}, - dictWord{14, 0, 144}, - dictWord{ - 14, - 0, - 167, - }, - dictWord{15, 0, 67}, - dictWord{4, 10, 285}, - dictWord{5, 10, 317}, - dictWord{6, 10, 301}, - dictWord{7, 10, 7}, - dictWord{8, 10, 153}, - dictWord{ - 10, - 10, - 766, - }, - dictWord{11, 10, 468}, - dictWord{12, 10, 467}, - dictWord{141, 10, 143}, - dictWord{136, 0, 945}, - dictWord{134, 0, 1090}, - dictWord{137, 0, 81}, - dictWord{12, 11, 468}, - dictWord{19, 11, 96}, - dictWord{148, 11, 24}, - dictWord{134, 0, 391}, - dictWord{138, 11, 241}, - dictWord{7, 0, 322}, - dictWord{136, 0, 249}, - dictWord{134, 0, 1412}, - dictWord{135, 11, 795}, - dictWord{5, 0, 632}, - dictWord{138, 0, 526}, - dictWord{136, 10, 819}, - dictWord{6, 0, 144}, - dictWord{7, 0, 948}, - dictWord{7, 0, 1042}, - dictWord{8, 0, 235}, - dictWord{8, 0, 461}, - dictWord{9, 0, 453}, - dictWord{9, 0, 796}, - dictWord{10, 0, 354}, - dictWord{17, 0, 77}, - dictWord{ - 135, - 11, - 954, - }, - dictWord{139, 10, 917}, - dictWord{6, 0, 940}, - dictWord{134, 0, 1228}, - dictWord{4, 0, 362}, - dictWord{7, 0, 52}, - dictWord{135, 0, 303}, - dictWord{ - 6, - 11, - 549, - }, - dictWord{8, 11, 34}, - dictWord{8, 11, 283}, - dictWord{9, 11, 165}, - dictWord{138, 11, 475}, - dictWord{7, 11, 370}, - dictWord{7, 11, 1007}, - dictWord{ - 7, - 11, - 1177, - }, - dictWord{135, 11, 1565}, - dictWord{5, 11, 652}, - dictWord{5, 11, 701}, - dictWord{135, 11, 449}, - dictWord{5, 0, 196}, - dictWord{6, 0, 486}, - dictWord{ - 7, - 0, - 212, - }, - dictWord{8, 0, 309}, - dictWord{136, 0, 346}, - dictWord{6, 10, 1719}, - dictWord{6, 10, 1735}, - dictWord{7, 10, 2016}, - dictWord{7, 10, 2020}, - dictWord{ - 8, - 10, - 837, - }, - dictWord{137, 10, 852}, - dictWord{6, 11, 159}, - dictWord{6, 11, 364}, - dictWord{7, 11, 516}, - dictWord{7, 11, 1439}, - dictWord{137, 11, 518}, - dictWord{135, 0, 1912}, - dictWord{135, 0, 1290}, - dictWord{132, 0, 686}, - dictWord{141, 11, 151}, - dictWord{138, 0, 625}, - dictWord{136, 0, 706}, - dictWord{ - 138, - 10, - 568, - }, - dictWord{139, 0, 412}, - dictWord{4, 0, 30}, - dictWord{133, 0, 43}, - dictWord{8, 10, 67}, - dictWord{138, 10, 419}, - dictWord{7, 0, 967}, - dictWord{ - 141, - 0, - 11, - }, - dictWord{12, 0, 758}, - dictWord{14, 0, 441}, - dictWord{142, 0, 462}, - dictWord{10, 10, 657}, - dictWord{14, 10, 297}, - dictWord{142, 10, 361}, - dictWord{ - 139, - 10, - 729, - }, - dictWord{4, 0, 220}, - dictWord{135, 0, 1535}, - dictWord{7, 11, 501}, - dictWord{9, 11, 111}, - dictWord{10, 11, 141}, - dictWord{11, 11, 332}, - dictWord{ - 13, - 11, - 43, - }, - dictWord{13, 11, 429}, - dictWord{14, 11, 130}, - dictWord{14, 11, 415}, - dictWord{145, 11, 102}, - dictWord{4, 0, 950}, - dictWord{6, 0, 1859}, - dictWord{ - 7, - 0, - 11, - }, - dictWord{8, 0, 873}, - dictWord{12, 0, 710}, - dictWord{12, 0, 718}, - dictWord{12, 0, 748}, - dictWord{12, 0, 765}, - dictWord{148, 0, 124}, - dictWord{ - 5, - 11, - 149, - }, - dictWord{5, 11, 935}, - dictWord{136, 11, 233}, - dictWord{142, 11, 291}, - dictWord{134, 0, 1579}, - dictWord{7, 0, 890}, - dictWord{8, 10, 51}, - dictWord{ - 9, - 10, - 868, - }, - dictWord{10, 10, 833}, - dictWord{12, 10, 481}, - dictWord{12, 10, 570}, - dictWord{148, 10, 106}, - dictWord{141, 0, 2}, - dictWord{132, 10, 445}, - dictWord{136, 11, 801}, - dictWord{135, 0, 1774}, - dictWord{7, 0, 1725}, - dictWord{138, 0, 393}, - dictWord{5, 0, 263}, - dictWord{134, 0, 414}, - dictWord{ - 132, - 11, - 322, - }, - dictWord{133, 10, 239}, - dictWord{7, 0, 456}, - dictWord{7, 10, 1990}, - dictWord{8, 10, 130}, - dictWord{139, 10, 720}, - dictWord{137, 0, 818}, - dictWord{ - 5, - 10, - 123, - }, - dictWord{6, 10, 530}, - dictWord{7, 10, 348}, - dictWord{135, 10, 1419}, - dictWord{135, 10, 2024}, - dictWord{6, 0, 178}, - dictWord{6, 0, 1750}, - dictWord{8, 0, 251}, - dictWord{9, 0, 690}, - dictWord{10, 0, 155}, - dictWord{10, 0, 196}, - dictWord{10, 0, 373}, - dictWord{11, 0, 698}, - dictWord{13, 0, 155}, - dictWord{ - 148, - 0, - 93, - }, - dictWord{5, 0, 97}, - dictWord{137, 0, 393}, - dictWord{134, 0, 674}, - dictWord{11, 0, 223}, - dictWord{140, 0, 168}, - dictWord{132, 10, 210}, - dictWord{ - 139, - 11, - 464, - }, - dictWord{6, 0, 1639}, - dictWord{146, 0, 159}, - dictWord{139, 11, 2}, - dictWord{7, 0, 934}, - dictWord{8, 0, 647}, - dictWord{17, 0, 97}, - dictWord{19, 0, 59}, - dictWord{150, 0, 2}, - dictWord{132, 0, 191}, - dictWord{5, 0, 165}, - dictWord{9, 0, 346}, - dictWord{10, 0, 655}, - dictWord{11, 0, 885}, - dictWord{4, 10, 430}, - dictWord{135, 11, 357}, - dictWord{133, 0, 877}, - dictWord{5, 10, 213}, - dictWord{133, 11, 406}, - dictWord{8, 0, 128}, - dictWord{139, 0, 179}, - dictWord{6, 11, 69}, - dictWord{135, 11, 117}, - dictWord{135, 0, 1297}, - dictWord{11, 11, 43}, - dictWord{13, 11, 72}, - dictWord{141, 11, 142}, - dictWord{135, 11, 1830}, - dictWord{ - 142, - 0, - 164, - }, - dictWord{5, 0, 57}, - dictWord{6, 0, 101}, - dictWord{6, 0, 586}, - dictWord{6, 0, 1663}, - dictWord{7, 0, 132}, - dictWord{7, 0, 1154}, - dictWord{7, 0, 1415}, - dictWord{7, 0, 1507}, - dictWord{12, 0, 493}, - dictWord{15, 0, 105}, - dictWord{151, 0, 15}, - dictWord{5, 0, 459}, - dictWord{7, 0, 1073}, - dictWord{8, 0, 241}, - dictWord{ - 136, - 0, - 334, - }, - dictWord{133, 11, 826}, - dictWord{133, 10, 108}, - dictWord{5, 10, 219}, - dictWord{10, 11, 132}, - dictWord{11, 11, 191}, - dictWord{11, 11, 358}, - dictWord{139, 11, 460}, - dictWord{6, 0, 324}, - dictWord{6, 0, 520}, - dictWord{7, 0, 338}, - dictWord{7, 0, 1729}, - dictWord{8, 0, 228}, - dictWord{139, 0, 750}, - dictWord{ - 21, - 0, - 30, - }, - dictWord{22, 0, 53}, - dictWord{4, 10, 193}, - dictWord{5, 10, 916}, - dictWord{7, 10, 364}, - dictWord{10, 10, 398}, - dictWord{10, 10, 726}, - dictWord{ - 11, - 10, - 317, - }, - dictWord{11, 10, 626}, - dictWord{12, 10, 142}, - dictWord{12, 10, 288}, - dictWord{12, 10, 678}, - dictWord{13, 10, 313}, - dictWord{15, 10, 113}, - dictWord{146, 10, 114}, - dictWord{6, 11, 110}, - dictWord{135, 11, 1681}, - dictWord{135, 0, 910}, - dictWord{6, 10, 241}, - dictWord{7, 10, 907}, - dictWord{8, 10, 832}, - dictWord{9, 10, 342}, - dictWord{10, 10, 729}, - dictWord{11, 10, 284}, - dictWord{11, 10, 445}, - dictWord{11, 10, 651}, - dictWord{11, 10, 863}, - dictWord{ - 13, - 10, - 398, - }, - dictWord{146, 10, 99}, - dictWord{7, 0, 705}, - dictWord{9, 0, 734}, - dictWord{5, 11, 1000}, - dictWord{7, 11, 733}, - dictWord{137, 11, 583}, - dictWord{4, 0, 73}, - dictWord{6, 0, 612}, - dictWord{7, 0, 927}, - dictWord{7, 0, 1822}, - dictWord{8, 0, 217}, - dictWord{9, 0, 765}, - dictWord{9, 0, 766}, - dictWord{10, 0, 408}, - dictWord{ - 11, - 0, - 51, - }, - dictWord{11, 0, 793}, - dictWord{12, 0, 266}, - dictWord{15, 0, 158}, - dictWord{20, 0, 89}, - dictWord{150, 0, 32}, - dictWord{7, 0, 1330}, - dictWord{4, 11, 297}, - dictWord{6, 11, 529}, - dictWord{7, 11, 152}, - dictWord{7, 11, 713}, - dictWord{7, 11, 1845}, - dictWord{8, 11, 710}, - dictWord{8, 11, 717}, - dictWord{140, 11, 639}, - dictWord{5, 0, 389}, - dictWord{136, 0, 636}, - dictWord{134, 0, 1409}, - dictWord{4, 10, 562}, - dictWord{9, 10, 254}, - dictWord{139, 10, 879}, - dictWord{134, 0, 893}, - dictWord{132, 10, 786}, - dictWord{4, 11, 520}, - dictWord{135, 11, 575}, - dictWord{136, 0, 21}, - dictWord{140, 0, 721}, - dictWord{136, 0, 959}, - dictWord{ - 7, - 11, - 1428, - }, - dictWord{7, 11, 1640}, - dictWord{9, 11, 169}, - dictWord{9, 11, 182}, - dictWord{9, 11, 367}, - dictWord{9, 11, 478}, - dictWord{9, 11, 506}, - dictWord{ - 9, - 11, - 551, - }, - dictWord{9, 11, 648}, - dictWord{9, 11, 651}, - dictWord{9, 11, 697}, - dictWord{9, 11, 705}, - dictWord{9, 11, 725}, - dictWord{9, 11, 787}, - dictWord{9, 11, 794}, - dictWord{10, 11, 198}, - dictWord{10, 11, 214}, - dictWord{10, 11, 267}, - dictWord{10, 11, 275}, - dictWord{10, 11, 456}, - dictWord{10, 11, 551}, - dictWord{ - 10, - 11, - 561, - }, - dictWord{10, 11, 613}, - dictWord{10, 11, 627}, - dictWord{10, 11, 668}, - dictWord{10, 11, 675}, - dictWord{10, 11, 691}, - dictWord{10, 11, 695}, - dictWord{10, 11, 707}, - dictWord{10, 11, 715}, - dictWord{11, 11, 183}, - dictWord{11, 11, 201}, - dictWord{11, 11, 244}, - dictWord{11, 11, 262}, - dictWord{ - 11, - 11, - 352, - }, - dictWord{11, 11, 439}, - dictWord{11, 11, 493}, - dictWord{11, 11, 572}, - dictWord{11, 11, 591}, - dictWord{11, 11, 608}, - dictWord{11, 11, 611}, - dictWord{ - 11, - 11, - 646, - }, - dictWord{11, 11, 674}, - dictWord{11, 11, 711}, - dictWord{11, 11, 751}, - dictWord{11, 11, 761}, - dictWord{11, 11, 776}, - dictWord{11, 11, 785}, - dictWord{11, 11, 850}, - dictWord{11, 11, 853}, - dictWord{11, 11, 862}, - dictWord{11, 11, 865}, - dictWord{11, 11, 868}, - dictWord{11, 11, 898}, - dictWord{ - 11, - 11, - 902, - }, - dictWord{11, 11, 903}, - dictWord{11, 11, 910}, - dictWord{11, 11, 932}, - dictWord{11, 11, 942}, - dictWord{11, 11, 957}, - dictWord{11, 11, 967}, - dictWord{ - 11, - 11, - 972, - }, - dictWord{12, 11, 148}, - dictWord{12, 11, 195}, - dictWord{12, 11, 220}, - dictWord{12, 11, 237}, - dictWord{12, 11, 318}, - dictWord{12, 11, 339}, - dictWord{12, 11, 393}, - dictWord{12, 11, 445}, - dictWord{12, 11, 450}, - dictWord{12, 11, 474}, - dictWord{12, 11, 509}, - dictWord{12, 11, 533}, - dictWord{ - 12, - 11, - 591, - }, - dictWord{12, 11, 594}, - dictWord{12, 11, 597}, - dictWord{12, 11, 621}, - dictWord{12, 11, 633}, - dictWord{12, 11, 642}, - dictWord{13, 11, 59}, - dictWord{ - 13, - 11, - 60, - }, - dictWord{13, 11, 145}, - dictWord{13, 11, 239}, - dictWord{13, 11, 250}, - dictWord{13, 11, 273}, - dictWord{13, 11, 329}, - dictWord{13, 11, 344}, - dictWord{13, 11, 365}, - dictWord{13, 11, 372}, - dictWord{13, 11, 387}, - dictWord{13, 11, 403}, - dictWord{13, 11, 414}, - dictWord{13, 11, 456}, - dictWord{ - 13, - 11, - 478, - }, - dictWord{13, 11, 483}, - dictWord{13, 11, 489}, - dictWord{14, 11, 55}, - dictWord{14, 11, 57}, - dictWord{14, 11, 81}, - dictWord{14, 11, 90}, - dictWord{ - 14, - 11, - 148, - }, - dictWord{14, 11, 239}, - dictWord{14, 11, 266}, - dictWord{14, 11, 321}, - dictWord{14, 11, 326}, - dictWord{14, 11, 327}, - dictWord{14, 11, 330}, - dictWord{ - 14, - 11, - 347, - }, - dictWord{14, 11, 355}, - dictWord{14, 11, 401}, - dictWord{14, 11, 411}, - dictWord{14, 11, 414}, - dictWord{14, 11, 416}, - dictWord{14, 11, 420}, - dictWord{15, 11, 61}, - dictWord{15, 11, 74}, - dictWord{15, 11, 87}, - dictWord{15, 11, 88}, - dictWord{15, 11, 94}, - dictWord{15, 11, 96}, - dictWord{15, 11, 116}, - dictWord{15, 11, 149}, - dictWord{15, 11, 154}, - dictWord{16, 11, 50}, - dictWord{16, 11, 63}, - dictWord{16, 11, 73}, - dictWord{17, 11, 2}, - dictWord{17, 11, 66}, - dictWord{ - 17, - 11, - 92, - }, - dictWord{17, 11, 103}, - dictWord{17, 11, 112}, - dictWord{18, 11, 50}, - dictWord{18, 11, 54}, - dictWord{18, 11, 82}, - dictWord{18, 11, 86}, - dictWord{ - 18, - 11, - 90, - }, - dictWord{18, 11, 111}, - dictWord{18, 11, 115}, - dictWord{18, 11, 156}, - dictWord{19, 11, 40}, - dictWord{19, 11, 79}, - dictWord{20, 11, 78}, - dictWord{ - 149, - 11, - 22, - }, - dictWord{137, 11, 170}, - dictWord{134, 0, 1433}, - dictWord{135, 11, 1307}, - dictWord{139, 11, 411}, - dictWord{5, 0, 189}, - dictWord{7, 0, 442}, - dictWord{7, 0, 443}, - dictWord{8, 0, 281}, - dictWord{12, 0, 174}, - dictWord{141, 0, 261}, - dictWord{6, 10, 216}, - dictWord{7, 10, 901}, - dictWord{7, 10, 1343}, - dictWord{136, 10, 493}, - dictWord{5, 11, 397}, - dictWord{6, 11, 154}, - dictWord{7, 10, 341}, - dictWord{7, 11, 676}, - dictWord{8, 11, 443}, - dictWord{8, 11, 609}, - dictWord{ - 9, - 11, - 24, - }, - dictWord{9, 11, 325}, - dictWord{10, 11, 35}, - dictWord{11, 10, 219}, - dictWord{11, 11, 535}, - dictWord{11, 11, 672}, - dictWord{11, 11, 1018}, - dictWord{12, 11, 637}, - dictWord{144, 11, 30}, - dictWord{6, 0, 2}, - dictWord{7, 0, 191}, - dictWord{7, 0, 446}, - dictWord{7, 0, 1262}, - dictWord{7, 0, 1737}, - dictWord{8, 0, 22}, - dictWord{8, 0, 270}, - dictWord{8, 0, 612}, - dictWord{9, 0, 4}, - dictWord{9, 0, 312}, - dictWord{9, 0, 436}, - dictWord{9, 0, 626}, - dictWord{10, 0, 216}, - dictWord{10, 0, 311}, - dictWord{10, 0, 521}, - dictWord{10, 0, 623}, - dictWord{11, 0, 72}, - dictWord{11, 0, 330}, - dictWord{11, 0, 455}, - dictWord{12, 0, 321}, - dictWord{12, 0, 504}, - dictWord{12, 0, 530}, - dictWord{12, 0, 543}, - dictWord{13, 0, 17}, - dictWord{13, 0, 156}, - dictWord{13, 0, 334}, - dictWord{14, 0, 131}, - dictWord{17, 0, 60}, - dictWord{ - 148, - 0, - 64, - }, - dictWord{7, 0, 354}, - dictWord{10, 0, 410}, - dictWord{139, 0, 815}, - dictWord{139, 10, 130}, - dictWord{7, 10, 1734}, - dictWord{137, 11, 631}, - dictWord{ - 12, - 0, - 425, - }, - dictWord{15, 0, 112}, - dictWord{10, 10, 115}, - dictWord{11, 10, 420}, - dictWord{13, 10, 404}, - dictWord{14, 10, 346}, - dictWord{143, 10, 54}, - dictWord{ - 6, - 0, - 60, - }, - dictWord{6, 0, 166}, - dictWord{7, 0, 374}, - dictWord{7, 0, 670}, - dictWord{7, 0, 1327}, - dictWord{8, 0, 411}, - dictWord{8, 0, 435}, - dictWord{9, 0, 653}, - dictWord{ - 9, - 0, - 740, - }, - dictWord{10, 0, 385}, - dictWord{11, 0, 222}, - dictWord{11, 0, 324}, - dictWord{11, 0, 829}, - dictWord{140, 0, 611}, - dictWord{7, 0, 1611}, - dictWord{ - 13, - 0, - 14, - }, - dictWord{15, 0, 44}, - dictWord{19, 0, 13}, - dictWord{148, 0, 76}, - dictWord{133, 11, 981}, - dictWord{4, 11, 56}, - dictWord{7, 11, 1791}, - dictWord{8, 11, 607}, - dictWord{8, 11, 651}, - dictWord{11, 11, 465}, - dictWord{11, 11, 835}, - dictWord{12, 11, 337}, - dictWord{141, 11, 480}, - dictWord{6, 0, 1478}, - dictWord{ - 5, - 10, - 1011, - }, - dictWord{136, 10, 701}, - dictWord{139, 0, 596}, - dictWord{5, 0, 206}, - dictWord{134, 0, 398}, - dictWord{4, 10, 54}, - dictWord{5, 10, 666}, - dictWord{ - 7, - 10, - 1039, - }, - dictWord{7, 10, 1130}, - dictWord{9, 10, 195}, - dictWord{138, 10, 302}, - dictWord{7, 0, 50}, - dictWord{9, 11, 158}, - dictWord{138, 11, 411}, - dictWord{ - 135, - 11, - 1120, - }, - dictWord{6, 0, 517}, - dictWord{7, 0, 1159}, - dictWord{10, 0, 621}, - dictWord{11, 0, 192}, - dictWord{134, 10, 1669}, - dictWord{4, 0, 592}, - dictWord{ - 6, - 0, - 600, - }, - dictWord{135, 0, 1653}, - dictWord{10, 0, 223}, - dictWord{139, 0, 645}, - dictWord{136, 11, 139}, - dictWord{7, 0, 64}, - dictWord{136, 0, 245}, - dictWord{ - 142, - 0, - 278, - }, - dictWord{6, 11, 622}, - dictWord{135, 11, 1030}, - dictWord{136, 0, 604}, - dictWord{134, 0, 1502}, - dictWord{138, 0, 265}, - dictWord{ - 141, - 11, - 168, - }, - dictWord{7, 0, 1763}, - dictWord{140, 0, 310}, - dictWord{7, 10, 798}, - dictWord{139, 11, 719}, - dictWord{7, 11, 160}, - dictWord{10, 11, 624}, - dictWord{ - 142, - 11, - 279, - }, - dictWord{132, 11, 363}, - dictWord{7, 10, 122}, - dictWord{9, 10, 259}, - dictWord{10, 10, 84}, - dictWord{11, 10, 470}, - dictWord{12, 10, 541}, - dictWord{141, 10, 379}, - dictWord{5, 0, 129}, - dictWord{6, 0, 61}, - dictWord{135, 0, 947}, - dictWord{134, 0, 1356}, - dictWord{135, 11, 1191}, - dictWord{13, 0, 505}, - dictWord{141, 0, 506}, - dictWord{11, 0, 1000}, - dictWord{5, 10, 82}, - dictWord{5, 10, 131}, - dictWord{7, 10, 1755}, - dictWord{8, 10, 31}, - dictWord{9, 10, 168}, - dictWord{9, 10, 764}, - dictWord{139, 10, 869}, - dictWord{134, 0, 966}, - dictWord{134, 10, 605}, - dictWord{134, 11, 292}, - dictWord{5, 11, 177}, - dictWord{ - 6, - 11, - 616, - }, - dictWord{7, 11, 827}, - dictWord{9, 11, 525}, - dictWord{138, 11, 656}, - dictWord{135, 11, 1486}, - dictWord{138, 11, 31}, - dictWord{5, 10, 278}, - dictWord{137, 10, 68}, - dictWord{4, 10, 163}, - dictWord{5, 10, 201}, - dictWord{5, 10, 307}, - dictWord{5, 10, 310}, - dictWord{6, 10, 335}, - dictWord{7, 10, 284}, - dictWord{136, 10, 165}, - dictWord{6, 0, 839}, - dictWord{135, 10, 1660}, - dictWord{136, 10, 781}, - dictWord{6, 10, 33}, - dictWord{135, 10, 1244}, - dictWord{ - 133, - 0, - 637, - }, - dictWord{4, 11, 161}, - dictWord{133, 11, 631}, - dictWord{137, 0, 590}, - dictWord{7, 10, 1953}, - dictWord{136, 10, 720}, - dictWord{5, 0, 280}, - dictWord{ - 7, - 0, - 1226, - }, - dictWord{138, 10, 203}, - dictWord{134, 0, 1386}, - dictWord{5, 0, 281}, - dictWord{6, 0, 1026}, - dictWord{6, 10, 326}, - dictWord{7, 10, 677}, - dictWord{ - 137, - 10, - 425, - }, - dictWord{7, 11, 1557}, - dictWord{135, 11, 1684}, - dictWord{135, 0, 1064}, - dictWord{9, 11, 469}, - dictWord{9, 11, 709}, - dictWord{12, 11, 512}, - dictWord{14, 11, 65}, - dictWord{145, 11, 12}, - dictWord{134, 0, 917}, - dictWord{10, 11, 229}, - dictWord{11, 11, 73}, - dictWord{11, 11, 376}, - dictWord{ - 139, - 11, - 433, - }, - dictWord{7, 0, 555}, - dictWord{9, 0, 192}, - dictWord{13, 0, 30}, - dictWord{13, 0, 49}, - dictWord{15, 0, 150}, - dictWord{16, 0, 76}, - dictWord{20, 0, 52}, - dictWord{ - 7, - 10, - 1316, - }, - dictWord{7, 10, 1412}, - dictWord{7, 10, 1839}, - dictWord{9, 10, 589}, - dictWord{11, 10, 241}, - dictWord{11, 10, 676}, - dictWord{11, 10, 811}, - dictWord{11, 10, 891}, - dictWord{12, 10, 140}, - dictWord{12, 10, 346}, - dictWord{12, 10, 479}, - dictWord{13, 10, 381}, - dictWord{14, 10, 188}, - dictWord{ - 146, - 10, - 30, - }, - dictWord{149, 0, 15}, - dictWord{6, 0, 1882}, - dictWord{6, 0, 1883}, - dictWord{6, 0, 1897}, - dictWord{9, 0, 945}, - dictWord{9, 0, 1014}, - dictWord{9, 0, 1020}, - dictWord{12, 0, 823}, - dictWord{12, 0, 842}, - dictWord{12, 0, 866}, - dictWord{12, 0, 934}, - dictWord{15, 0, 242}, - dictWord{146, 0, 208}, - dictWord{6, 0, 965}, - dictWord{134, 0, 1499}, - dictWord{7, 0, 33}, - dictWord{7, 0, 120}, - dictWord{8, 0, 489}, - dictWord{9, 0, 319}, - dictWord{10, 0, 820}, - dictWord{11, 0, 1004}, - dictWord{ - 12, - 0, - 379, - }, - dictWord{12, 0, 679}, - dictWord{13, 0, 117}, - dictWord{13, 0, 412}, - dictWord{14, 0, 25}, - dictWord{15, 0, 52}, - dictWord{15, 0, 161}, - dictWord{16, 0, 47}, - dictWord{149, 0, 2}, - dictWord{6, 11, 558}, - dictWord{7, 11, 651}, - dictWord{8, 11, 421}, - dictWord{9, 11, 0}, - dictWord{138, 11, 34}, - dictWord{4, 0, 937}, - dictWord{ - 5, - 0, - 801, - }, - dictWord{7, 0, 473}, - dictWord{5, 10, 358}, - dictWord{7, 10, 1184}, - dictWord{10, 10, 662}, - dictWord{13, 10, 212}, - dictWord{13, 10, 304}, - dictWord{ - 13, - 10, - 333, - }, - dictWord{145, 10, 98}, - dictWord{132, 0, 877}, - dictWord{6, 0, 693}, - dictWord{134, 0, 824}, - dictWord{132, 0, 365}, - dictWord{7, 11, 1832}, - dictWord{ - 138, - 11, - 374, - }, - dictWord{5, 0, 7}, - dictWord{139, 0, 774}, - dictWord{4, 0, 734}, - dictWord{5, 0, 662}, - dictWord{134, 0, 430}, - dictWord{4, 0, 746}, - dictWord{ - 135, - 0, - 1090, - }, - dictWord{5, 0, 360}, - dictWord{8, 0, 237}, - dictWord{10, 0, 231}, - dictWord{147, 0, 124}, - dictWord{138, 11, 348}, - dictWord{6, 11, 6}, - dictWord{7, 11, 81}, - dictWord{7, 11, 771}, - dictWord{7, 11, 1731}, - dictWord{9, 11, 405}, - dictWord{138, 11, 421}, - dictWord{6, 0, 740}, - dictWord{137, 0, 822}, - dictWord{ - 133, - 10, - 946, - }, - dictWord{7, 0, 1485}, - dictWord{136, 0, 929}, - dictWord{7, 10, 411}, - dictWord{8, 10, 631}, - dictWord{9, 10, 323}, - dictWord{10, 10, 355}, - dictWord{ - 11, - 10, - 491, - }, - dictWord{12, 10, 143}, - dictWord{12, 10, 402}, - dictWord{13, 10, 73}, - dictWord{14, 10, 408}, - dictWord{15, 10, 107}, - dictWord{146, 10, 71}, - dictWord{ - 135, - 10, - 590, - }, - dictWord{5, 11, 881}, - dictWord{133, 11, 885}, - dictWord{150, 11, 25}, - dictWord{4, 0, 852}, - dictWord{5, 11, 142}, - dictWord{134, 11, 546}, - dictWord{7, 10, 1467}, - dictWord{8, 10, 328}, - dictWord{10, 10, 544}, - dictWord{11, 10, 955}, - dictWord{13, 10, 320}, - dictWord{145, 10, 83}, - dictWord{9, 0, 17}, - dictWord{10, 0, 291}, - dictWord{11, 10, 511}, - dictWord{13, 10, 394}, - dictWord{14, 10, 298}, - dictWord{14, 10, 318}, - dictWord{146, 10, 103}, - dictWord{5, 11, 466}, - dictWord{11, 11, 571}, - dictWord{12, 11, 198}, - dictWord{13, 11, 283}, - dictWord{14, 11, 186}, - dictWord{15, 11, 21}, - dictWord{143, 11, 103}, - dictWord{ - 134, - 0, - 1001, - }, - dictWord{4, 11, 185}, - dictWord{5, 11, 257}, - dictWord{5, 11, 839}, - dictWord{5, 11, 936}, - dictWord{7, 11, 171}, - dictWord{9, 11, 399}, - dictWord{ - 10, - 11, - 258, - }, - dictWord{10, 11, 395}, - dictWord{10, 11, 734}, - dictWord{11, 11, 1014}, - dictWord{12, 11, 23}, - dictWord{13, 11, 350}, - dictWord{14, 11, 150}, - dictWord{147, 11, 6}, - dictWord{143, 0, 35}, - dictWord{132, 0, 831}, - dictWord{5, 10, 835}, - dictWord{134, 10, 483}, - dictWord{4, 0, 277}, - dictWord{5, 0, 608}, - dictWord{ - 6, - 0, - 493, - }, - dictWord{7, 0, 457}, - dictWord{12, 0, 384}, - dictWord{7, 11, 404}, - dictWord{7, 11, 1377}, - dictWord{7, 11, 1430}, - dictWord{7, 11, 2017}, - dictWord{ - 8, - 11, - 149, - }, - dictWord{8, 11, 239}, - dictWord{8, 11, 512}, - dictWord{8, 11, 793}, - dictWord{8, 11, 818}, - dictWord{9, 11, 474}, - dictWord{9, 11, 595}, - dictWord{ - 10, - 11, - 122, - }, - dictWord{10, 11, 565}, - dictWord{10, 11, 649}, - dictWord{10, 11, 783}, - dictWord{11, 11, 239}, - dictWord{11, 11, 295}, - dictWord{11, 11, 447}, - dictWord{ - 11, - 11, - 528, - }, - dictWord{11, 11, 639}, - dictWord{11, 11, 800}, - dictWord{11, 11, 936}, - dictWord{12, 11, 25}, - dictWord{12, 11, 73}, - dictWord{12, 11, 77}, - dictWord{12, 11, 157}, - dictWord{12, 11, 316}, - dictWord{12, 11, 390}, - dictWord{12, 11, 391}, - dictWord{12, 11, 394}, - dictWord{12, 11, 395}, - dictWord{ - 12, - 11, - 478, - }, - dictWord{12, 11, 503}, - dictWord{12, 11, 592}, - dictWord{12, 11, 680}, - dictWord{13, 11, 50}, - dictWord{13, 11, 53}, - dictWord{13, 11, 132}, - dictWord{ - 13, - 11, - 198, - }, - dictWord{13, 11, 275}, - dictWord{13, 11, 322}, - dictWord{13, 11, 415}, - dictWord{14, 11, 71}, - dictWord{14, 11, 257}, - dictWord{14, 11, 395}, - dictWord{15, 11, 71}, - dictWord{15, 11, 136}, - dictWord{17, 11, 123}, - dictWord{18, 11, 93}, - dictWord{147, 11, 58}, - dictWord{134, 0, 1351}, - dictWord{7, 0, 27}, - dictWord{135, 0, 316}, - dictWord{136, 11, 712}, - dictWord{136, 0, 984}, - dictWord{133, 0, 552}, - dictWord{137, 0, 264}, - dictWord{132, 0, 401}, - dictWord{6, 0, 710}, - dictWord{6, 0, 1111}, - dictWord{134, 0, 1343}, - dictWord{134, 0, 1211}, - dictWord{9, 0, 543}, - dictWord{10, 0, 524}, - dictWord{11, 0, 108}, - dictWord{11, 0, 653}, - dictWord{12, 0, 524}, - dictWord{13, 0, 123}, - dictWord{14, 0, 252}, - dictWord{16, 0, 18}, - dictWord{19, 0, 38}, - dictWord{20, 0, 26}, - dictWord{20, 0, 65}, - dictWord{ - 21, - 0, - 3, - }, - dictWord{151, 0, 11}, - dictWord{4, 0, 205}, - dictWord{5, 0, 623}, - dictWord{7, 0, 104}, - dictWord{8, 0, 519}, - dictWord{137, 0, 716}, - dictWord{132, 10, 677}, - dictWord{4, 11, 377}, - dictWord{152, 11, 13}, - dictWord{135, 11, 1673}, - dictWord{7, 0, 579}, - dictWord{9, 0, 41}, - dictWord{9, 0, 244}, - dictWord{9, 0, 669}, - dictWord{ - 10, - 0, - 5, - }, - dictWord{11, 0, 861}, - dictWord{11, 0, 951}, - dictWord{139, 0, 980}, - dictWord{132, 0, 717}, - dictWord{136, 0, 1011}, - dictWord{132, 0, 805}, - dictWord{ - 4, - 11, - 180, - }, - dictWord{135, 11, 1906}, - dictWord{132, 10, 777}, - dictWord{132, 10, 331}, - dictWord{132, 0, 489}, - dictWord{6, 0, 1024}, - dictWord{4, 11, 491}, - dictWord{133, 10, 747}, - dictWord{135, 11, 1182}, - dictWord{4, 11, 171}, - dictWord{138, 11, 234}, - dictWord{4, 11, 586}, - dictWord{7, 11, 1186}, - dictWord{ - 138, - 11, - 631, - }, - dictWord{135, 0, 892}, - dictWord{135, 11, 336}, - dictWord{9, 11, 931}, - dictWord{10, 11, 334}, - dictWord{148, 11, 71}, - dictWord{137, 0, 473}, - dictWord{6, 0, 864}, - dictWord{12, 0, 659}, - dictWord{139, 11, 926}, - dictWord{7, 0, 819}, - dictWord{9, 0, 26}, - dictWord{9, 0, 392}, - dictWord{10, 0, 152}, - dictWord{ - 10, - 0, - 226, - }, - dictWord{11, 0, 19}, - dictWord{12, 0, 276}, - dictWord{12, 0, 426}, - dictWord{12, 0, 589}, - dictWord{13, 0, 460}, - dictWord{15, 0, 97}, - dictWord{19, 0, 48}, - dictWord{148, 0, 104}, - dictWord{135, 0, 51}, - dictWord{133, 10, 326}, - dictWord{4, 10, 691}, - dictWord{146, 10, 16}, - dictWord{9, 0, 130}, - dictWord{11, 0, 765}, - dictWord{10, 10, 680}, - dictWord{10, 10, 793}, - dictWord{141, 10, 357}, - dictWord{133, 11, 765}, - dictWord{8, 0, 229}, - dictWord{6, 10, 32}, - dictWord{7, 10, 385}, - dictWord{7, 10, 757}, - dictWord{7, 10, 1916}, - dictWord{8, 10, 94}, - dictWord{8, 10, 711}, - dictWord{9, 10, 541}, - dictWord{10, 10, 162}, - dictWord{10, 10, 795}, - dictWord{11, 10, 989}, - dictWord{11, 10, 1010}, - dictWord{12, 10, 14}, - dictWord{142, 10, 308}, - dictWord{7, 11, 474}, - dictWord{137, 11, 578}, - dictWord{ - 132, - 0, - 674, - }, - dictWord{132, 0, 770}, - dictWord{5, 0, 79}, - dictWord{7, 0, 1027}, - dictWord{7, 0, 1477}, - dictWord{139, 0, 52}, - dictWord{133, 11, 424}, - dictWord{ - 134, - 0, - 1666, - }, - dictWord{6, 0, 409}, - dictWord{6, 10, 349}, - dictWord{6, 10, 1682}, - dictWord{7, 10, 1252}, - dictWord{8, 10, 112}, - dictWord{8, 11, 714}, - dictWord{ - 9, - 10, - 435, - }, - dictWord{9, 10, 668}, - dictWord{10, 10, 290}, - dictWord{10, 10, 319}, - dictWord{10, 10, 815}, - dictWord{11, 10, 180}, - dictWord{11, 10, 837}, - dictWord{ - 12, - 10, - 240, - }, - dictWord{13, 10, 152}, - dictWord{13, 10, 219}, - dictWord{142, 10, 158}, - dictWord{5, 0, 789}, - dictWord{134, 0, 195}, - dictWord{4, 0, 251}, - dictWord{ - 4, - 0, - 688, - }, - dictWord{7, 0, 513}, - dictWord{135, 0, 1284}, - dictWord{132, 10, 581}, - dictWord{9, 11, 420}, - dictWord{10, 11, 269}, - dictWord{10, 11, 285}, - dictWord{10, 11, 576}, - dictWord{11, 11, 397}, - dictWord{13, 11, 175}, - dictWord{145, 11, 90}, - dictWord{6, 10, 126}, - dictWord{7, 10, 573}, - dictWord{8, 10, 397}, - dictWord{142, 10, 44}, - dictWord{132, 11, 429}, - dictWord{133, 0, 889}, - dictWord{4, 0, 160}, - dictWord{5, 0, 330}, - dictWord{7, 0, 1434}, - dictWord{136, 0, 174}, - dictWord{7, 11, 18}, - dictWord{7, 11, 699}, - dictWord{7, 11, 1966}, - dictWord{8, 11, 752}, - dictWord{9, 11, 273}, - dictWord{9, 11, 412}, - dictWord{9, 11, 703}, - dictWord{ - 10, - 11, - 71, - }, - dictWord{10, 11, 427}, - dictWord{10, 11, 508}, - dictWord{146, 11, 97}, - dictWord{6, 0, 872}, - dictWord{134, 0, 899}, - dictWord{133, 10, 926}, - dictWord{134, 0, 1126}, - dictWord{134, 0, 918}, - dictWord{4, 11, 53}, - dictWord{5, 11, 186}, - dictWord{135, 11, 752}, - dictWord{7, 0, 268}, - dictWord{136, 0, 569}, - dictWord{134, 0, 1224}, - dictWord{6, 0, 1361}, - dictWord{7, 10, 1232}, - dictWord{137, 10, 531}, - dictWord{8, 11, 575}, - dictWord{10, 11, 289}, - dictWord{ - 139, - 11, - 319, - }, - dictWord{133, 10, 670}, - dictWord{132, 11, 675}, - dictWord{133, 0, 374}, - dictWord{135, 10, 1957}, - dictWord{133, 0, 731}, - dictWord{11, 0, 190}, - dictWord{15, 0, 49}, - dictWord{11, 11, 190}, - dictWord{143, 11, 49}, - dictWord{4, 0, 626}, - dictWord{5, 0, 506}, - dictWord{5, 0, 642}, - dictWord{6, 0, 425}, - dictWord{ - 10, - 0, - 202, - }, - dictWord{139, 0, 141}, - dictWord{137, 0, 444}, - dictWord{7, 10, 242}, - dictWord{135, 10, 1942}, - dictWord{6, 11, 209}, - dictWord{8, 11, 468}, - dictWord{ - 9, - 11, - 210, - }, - dictWord{11, 11, 36}, - dictWord{12, 11, 28}, - dictWord{12, 11, 630}, - dictWord{13, 11, 21}, - dictWord{13, 11, 349}, - dictWord{14, 11, 7}, - dictWord{ - 145, - 11, - 13, - }, - dictWord{4, 11, 342}, - dictWord{135, 11, 1179}, - dictWord{5, 10, 834}, - dictWord{7, 10, 1202}, - dictWord{8, 10, 14}, - dictWord{9, 10, 481}, - dictWord{ - 137, - 10, - 880, - }, - dictWord{4, 11, 928}, - dictWord{133, 11, 910}, - dictWord{4, 11, 318}, - dictWord{4, 11, 496}, - dictWord{7, 11, 856}, - dictWord{139, 11, 654}, - dictWord{136, 0, 835}, - dictWord{7, 0, 1526}, - dictWord{138, 10, 465}, - dictWord{151, 0, 17}, - dictWord{135, 0, 477}, - dictWord{4, 10, 357}, - dictWord{6, 10, 172}, - dictWord{7, 10, 143}, - dictWord{137, 10, 413}, - dictWord{6, 0, 1374}, - dictWord{138, 0, 994}, - dictWord{18, 0, 76}, - dictWord{132, 10, 590}, - dictWord{7, 0, 287}, - dictWord{8, 0, 355}, - dictWord{9, 0, 293}, - dictWord{137, 0, 743}, - dictWord{134, 0, 1389}, - dictWord{7, 11, 915}, - dictWord{8, 11, 247}, - dictWord{147, 11, 0}, - dictWord{ - 4, - 11, - 202, - }, - dictWord{5, 11, 382}, - dictWord{6, 11, 454}, - dictWord{7, 11, 936}, - dictWord{7, 11, 1803}, - dictWord{8, 11, 758}, - dictWord{9, 11, 375}, - dictWord{ - 9, - 11, - 895, - }, - dictWord{10, 11, 743}, - dictWord{10, 11, 792}, - dictWord{11, 11, 978}, - dictWord{11, 11, 1012}, - dictWord{142, 11, 109}, - dictWord{5, 0, 384}, - dictWord{8, 0, 455}, - dictWord{140, 0, 48}, - dictWord{132, 11, 390}, - dictWord{5, 10, 169}, - dictWord{7, 10, 333}, - dictWord{136, 10, 45}, - dictWord{5, 0, 264}, - dictWord{134, 0, 184}, - dictWord{138, 11, 791}, - dictWord{133, 11, 717}, - dictWord{132, 10, 198}, - dictWord{6, 11, 445}, - dictWord{7, 11, 332}, - dictWord{ - 137, - 11, - 909, - }, - dictWord{136, 0, 1001}, - dictWord{4, 10, 24}, - dictWord{5, 10, 140}, - dictWord{5, 10, 185}, - dictWord{7, 10, 1500}, - dictWord{11, 10, 565}, - dictWord{ - 139, - 10, - 838, - }, - dictWord{134, 11, 578}, - dictWord{5, 0, 633}, - dictWord{6, 0, 28}, - dictWord{135, 0, 1323}, - dictWord{132, 0, 851}, - dictWord{136, 11, 267}, - dictWord{ - 7, - 0, - 359, - }, - dictWord{8, 0, 243}, - dictWord{140, 0, 175}, - dictWord{4, 10, 334}, - dictWord{133, 10, 593}, - dictWord{141, 11, 87}, - dictWord{136, 11, 766}, - dictWord{10, 0, 287}, - dictWord{12, 0, 138}, - dictWord{10, 11, 287}, - dictWord{140, 11, 138}, - dictWord{4, 0, 105}, - dictWord{132, 0, 740}, - dictWord{140, 10, 116}, - dictWord{134, 0, 857}, - dictWord{135, 11, 1841}, - dictWord{6, 0, 1402}, - dictWord{137, 0, 819}, - dictWord{132, 11, 584}, - dictWord{132, 10, 709}, - dictWord{ - 133, - 10, - 897, - }, - dictWord{5, 0, 224}, - dictWord{13, 0, 174}, - dictWord{146, 0, 52}, - dictWord{135, 10, 1840}, - dictWord{4, 10, 608}, - dictWord{133, 10, 497}, - dictWord{139, 11, 60}, - dictWord{4, 0, 758}, - dictWord{135, 0, 1649}, - dictWord{4, 11, 226}, - dictWord{4, 11, 326}, - dictWord{135, 11, 1770}, - dictWord{5, 11, 426}, - dictWord{8, 11, 30}, - dictWord{9, 11, 2}, - dictWord{11, 11, 549}, - dictWord{147, 11, 122}, - dictWord{135, 10, 2039}, - dictWord{6, 10, 540}, - dictWord{ - 136, - 10, - 136, - }, - dictWord{4, 0, 573}, - dictWord{8, 0, 655}, - dictWord{4, 10, 897}, - dictWord{133, 10, 786}, - dictWord{7, 0, 351}, - dictWord{139, 0, 128}, - dictWord{ - 133, - 10, - 999, - }, - dictWord{4, 10, 299}, - dictWord{135, 10, 1004}, - dictWord{133, 0, 918}, - dictWord{132, 11, 345}, - dictWord{4, 11, 385}, - dictWord{7, 11, 265}, - dictWord{135, 11, 587}, - dictWord{133, 10, 456}, - dictWord{136, 10, 180}, - dictWord{6, 0, 687}, - dictWord{134, 0, 1537}, - dictWord{4, 11, 347}, - dictWord{ - 5, - 11, - 423, - }, - dictWord{5, 11, 996}, - dictWord{135, 11, 1329}, - dictWord{132, 10, 755}, - dictWord{7, 11, 1259}, - dictWord{9, 11, 125}, - dictWord{11, 11, 65}, - dictWord{140, 11, 285}, - dictWord{5, 11, 136}, - dictWord{6, 11, 136}, - dictWord{136, 11, 644}, - dictWord{134, 0, 1525}, - dictWord{4, 0, 1009}, - dictWord{ - 135, - 0, - 1139, - }, - dictWord{139, 10, 338}, - dictWord{132, 0, 340}, - dictWord{135, 10, 1464}, - dictWord{8, 0, 847}, - dictWord{10, 0, 861}, - dictWord{10, 0, 876}, - dictWord{ - 10, - 0, - 889, - }, - dictWord{10, 0, 922}, - dictWord{10, 0, 929}, - dictWord{10, 0, 933}, - dictWord{12, 0, 784}, - dictWord{140, 0, 791}, - dictWord{139, 0, 176}, - dictWord{ - 9, - 11, - 134, - }, - dictWord{10, 11, 2}, - dictWord{10, 11, 27}, - dictWord{10, 11, 333}, - dictWord{11, 11, 722}, - dictWord{143, 11, 1}, - dictWord{4, 11, 433}, - dictWord{ - 133, - 11, - 719, - }, - dictWord{5, 0, 985}, - dictWord{7, 0, 509}, - dictWord{7, 0, 529}, - dictWord{145, 0, 96}, - dictWord{132, 0, 615}, - dictWord{4, 10, 890}, - dictWord{ - 5, - 10, - 805, - }, - dictWord{5, 10, 819}, - dictWord{5, 10, 961}, - dictWord{6, 10, 396}, - dictWord{6, 10, 1631}, - dictWord{6, 10, 1678}, - dictWord{7, 10, 1967}, - dictWord{ - 7, - 10, - 2041, - }, - dictWord{9, 10, 630}, - dictWord{11, 10, 8}, - dictWord{11, 10, 1019}, - dictWord{12, 10, 176}, - dictWord{13, 10, 225}, - dictWord{14, 10, 292}, - dictWord{ - 149, - 10, - 24, - }, - dictWord{135, 0, 1919}, - dictWord{134, 0, 1131}, - dictWord{144, 11, 21}, - dictWord{144, 11, 51}, - dictWord{135, 10, 1815}, - dictWord{4, 0, 247}, - dictWord{7, 10, 1505}, - dictWord{10, 10, 190}, - dictWord{10, 10, 634}, - dictWord{11, 10, 792}, - dictWord{12, 10, 358}, - dictWord{140, 10, 447}, - dictWord{ - 5, - 10, - 0, - }, - dictWord{6, 10, 536}, - dictWord{7, 10, 604}, - dictWord{13, 10, 445}, - dictWord{145, 10, 126}, - dictWord{4, 0, 184}, - dictWord{5, 0, 390}, - dictWord{6, 0, 337}, - dictWord{7, 0, 23}, - dictWord{7, 0, 494}, - dictWord{7, 0, 618}, - dictWord{7, 0, 1456}, - dictWord{8, 0, 27}, - dictWord{8, 0, 599}, - dictWord{10, 0, 153}, - dictWord{ - 139, - 0, - 710, - }, - dictWord{6, 10, 232}, - dictWord{6, 10, 412}, - dictWord{7, 10, 1074}, - dictWord{8, 10, 9}, - dictWord{8, 10, 157}, - dictWord{8, 10, 786}, - dictWord{9, 10, 196}, - dictWord{9, 10, 352}, - dictWord{9, 10, 457}, - dictWord{10, 10, 337}, - dictWord{11, 10, 232}, - dictWord{11, 10, 877}, - dictWord{12, 10, 480}, - dictWord{ - 140, - 10, - 546, - }, - dictWord{13, 0, 38}, - dictWord{135, 10, 958}, - dictWord{4, 10, 382}, - dictWord{136, 10, 579}, - dictWord{4, 10, 212}, - dictWord{135, 10, 1206}, - dictWord{ - 4, - 11, - 555, - }, - dictWord{8, 11, 536}, - dictWord{138, 11, 288}, - dictWord{11, 11, 139}, - dictWord{139, 11, 171}, - dictWord{9, 11, 370}, - dictWord{138, 11, 90}, - dictWord{132, 0, 1015}, - dictWord{134, 0, 1088}, - dictWord{5, 10, 655}, - dictWord{135, 11, 977}, - dictWord{134, 0, 1585}, - dictWord{17, 10, 67}, - dictWord{ - 147, - 10, - 74, - }, - dictWord{10, 0, 227}, - dictWord{11, 0, 497}, - dictWord{11, 0, 709}, - dictWord{140, 0, 415}, - dictWord{6, 0, 360}, - dictWord{7, 0, 1664}, - dictWord{ - 136, - 0, - 478, - }, - dictWord{7, 0, 95}, - dictWord{6, 10, 231}, - dictWord{136, 10, 423}, - dictWord{140, 11, 65}, - dictWord{4, 11, 257}, - dictWord{135, 11, 2031}, - dictWord{ - 135, - 11, - 1768, - }, - dictWord{133, 10, 300}, - dictWord{139, 11, 211}, - dictWord{136, 0, 699}, - dictWord{6, 10, 237}, - dictWord{7, 10, 611}, - dictWord{8, 10, 100}, - dictWord{9, 10, 416}, - dictWord{11, 10, 335}, - dictWord{12, 10, 173}, - dictWord{146, 10, 101}, - dictWord{14, 0, 26}, - dictWord{146, 0, 150}, - dictWord{6, 0, 581}, - dictWord{135, 0, 1119}, - dictWord{135, 10, 1208}, - dictWord{132, 0, 739}, - dictWord{6, 11, 83}, - dictWord{6, 11, 1733}, - dictWord{135, 11, 1389}, - dictWord{ - 137, - 0, - 869, - }, - dictWord{4, 0, 67}, - dictWord{5, 0, 422}, - dictWord{7, 0, 1037}, - dictWord{7, 0, 1289}, - dictWord{7, 0, 1555}, - dictWord{9, 0, 741}, - dictWord{145, 0, 108}, - dictWord{133, 10, 199}, - dictWord{12, 10, 427}, - dictWord{146, 10, 38}, - dictWord{136, 0, 464}, - dictWord{142, 0, 42}, - dictWord{10, 0, 96}, - dictWord{8, 11, 501}, - dictWord{137, 11, 696}, - dictWord{134, 11, 592}, - dictWord{4, 0, 512}, - dictWord{4, 0, 966}, - dictWord{5, 0, 342}, - dictWord{6, 0, 1855}, - dictWord{8, 0, 869}, - dictWord{8, 0, 875}, - dictWord{8, 0, 901}, - dictWord{144, 0, 26}, - dictWord{8, 0, 203}, - dictWord{11, 0, 823}, - dictWord{11, 0, 846}, - dictWord{12, 0, 482}, - dictWord{ - 13, - 0, - 277, - }, - dictWord{13, 0, 302}, - dictWord{13, 0, 464}, - dictWord{14, 0, 205}, - dictWord{142, 0, 221}, - dictWord{4, 0, 449}, - dictWord{133, 0, 718}, - dictWord{ - 7, - 11, - 1718, - }, - dictWord{9, 11, 95}, - dictWord{9, 11, 274}, - dictWord{10, 11, 279}, - dictWord{10, 11, 317}, - dictWord{10, 11, 420}, - dictWord{11, 11, 303}, - dictWord{ - 11, - 11, - 808, - }, - dictWord{12, 11, 134}, - dictWord{12, 11, 367}, - dictWord{13, 11, 149}, - dictWord{13, 11, 347}, - dictWord{14, 11, 349}, - dictWord{14, 11, 406}, - dictWord{18, 11, 22}, - dictWord{18, 11, 89}, - dictWord{18, 11, 122}, - dictWord{147, 11, 47}, - dictWord{133, 11, 26}, - dictWord{4, 0, 355}, - dictWord{6, 0, 311}, - dictWord{ - 9, - 0, - 256, - }, - dictWord{138, 0, 404}, - dictWord{132, 11, 550}, - dictWord{10, 0, 758}, - dictWord{6, 10, 312}, - dictWord{6, 10, 1715}, - dictWord{10, 10, 584}, - dictWord{11, 10, 546}, - dictWord{11, 10, 692}, - dictWord{12, 10, 259}, - dictWord{12, 10, 295}, - dictWord{13, 10, 46}, - dictWord{141, 10, 154}, - dictWord{ - 136, - 11, - 822, - }, - dictWord{5, 0, 827}, - dictWord{4, 11, 902}, - dictWord{5, 11, 809}, - dictWord{6, 11, 122}, - dictWord{135, 11, 896}, - dictWord{5, 0, 64}, - dictWord{140, 0, 581}, - dictWord{4, 0, 442}, - dictWord{6, 0, 739}, - dictWord{7, 0, 1047}, - dictWord{7, 0, 1352}, - dictWord{7, 0, 1643}, - dictWord{7, 11, 1911}, - dictWord{9, 11, 449}, - dictWord{10, 11, 192}, - dictWord{138, 11, 740}, - dictWord{135, 11, 262}, - dictWord{132, 10, 588}, - dictWord{133, 11, 620}, - dictWord{5, 0, 977}, - dictWord{ - 6, - 0, - 288, - }, - dictWord{7, 0, 528}, - dictWord{4, 11, 34}, - dictWord{5, 11, 574}, - dictWord{7, 11, 279}, - dictWord{7, 11, 1624}, - dictWord{136, 11, 601}, - dictWord{ - 6, - 0, - 1375, - }, - dictWord{4, 10, 231}, - dictWord{5, 10, 61}, - dictWord{6, 10, 104}, - dictWord{7, 10, 729}, - dictWord{7, 10, 964}, - dictWord{7, 10, 1658}, - dictWord{ - 140, - 10, - 414, - }, - dictWord{6, 10, 263}, - dictWord{138, 10, 757}, - dictWord{132, 10, 320}, - dictWord{4, 0, 254}, - dictWord{7, 0, 1309}, - dictWord{5, 11, 332}, - dictWord{ - 135, - 11, - 1309, - }, - dictWord{6, 11, 261}, - dictWord{8, 11, 182}, - dictWord{139, 11, 943}, - dictWord{132, 10, 225}, - dictWord{6, 0, 12}, - dictWord{135, 0, 1219}, - dictWord{4, 0, 275}, - dictWord{12, 0, 376}, - dictWord{6, 11, 1721}, - dictWord{141, 11, 490}, - dictWord{4, 11, 933}, - dictWord{133, 11, 880}, - dictWord{6, 0, 951}, - dictWord{6, 0, 1109}, - dictWord{6, 0, 1181}, - dictWord{7, 0, 154}, - dictWord{4, 10, 405}, - dictWord{7, 10, 817}, - dictWord{14, 10, 58}, - dictWord{17, 10, 37}, - dictWord{ - 146, - 10, - 124, - }, - dictWord{6, 0, 1520}, - dictWord{133, 10, 974}, - dictWord{134, 0, 1753}, - dictWord{6, 0, 369}, - dictWord{6, 0, 502}, - dictWord{7, 0, 1036}, - dictWord{ - 8, - 0, - 348, - }, - dictWord{9, 0, 452}, - dictWord{10, 0, 26}, - dictWord{11, 0, 224}, - dictWord{11, 0, 387}, - dictWord{11, 0, 772}, - dictWord{12, 0, 95}, - dictWord{12, 0, 629}, - dictWord{13, 0, 195}, - dictWord{13, 0, 207}, - dictWord{13, 0, 241}, - dictWord{14, 0, 260}, - dictWord{14, 0, 270}, - dictWord{143, 0, 140}, - dictWord{132, 0, 269}, - dictWord{5, 0, 480}, - dictWord{7, 0, 532}, - dictWord{7, 0, 1197}, - dictWord{7, 0, 1358}, - dictWord{8, 0, 291}, - dictWord{11, 0, 349}, - dictWord{142, 0, 396}, - dictWord{ - 5, - 10, - 235, - }, - dictWord{7, 10, 1239}, - dictWord{11, 10, 131}, - dictWord{140, 10, 370}, - dictWord{7, 10, 956}, - dictWord{7, 10, 1157}, - dictWord{7, 10, 1506}, - dictWord{ - 7, - 10, - 1606, - }, - dictWord{7, 10, 1615}, - dictWord{7, 10, 1619}, - dictWord{7, 10, 1736}, - dictWord{7, 10, 1775}, - dictWord{8, 10, 590}, - dictWord{9, 10, 324}, - dictWord{9, 10, 736}, - dictWord{9, 10, 774}, - dictWord{9, 10, 776}, - dictWord{9, 10, 784}, - dictWord{10, 10, 567}, - dictWord{10, 10, 708}, - dictWord{11, 10, 518}, - dictWord{11, 10, 613}, - dictWord{11, 10, 695}, - dictWord{11, 10, 716}, - dictWord{11, 10, 739}, - dictWord{11, 10, 770}, - dictWord{11, 10, 771}, - dictWord{ - 11, - 10, - 848, - }, - dictWord{11, 10, 857}, - dictWord{11, 10, 931}, - dictWord{11, 10, 947}, - dictWord{12, 10, 326}, - dictWord{12, 10, 387}, - dictWord{12, 10, 484}, - dictWord{ - 12, - 10, - 528, - }, - dictWord{12, 10, 552}, - dictWord{12, 10, 613}, - dictWord{13, 10, 189}, - dictWord{13, 10, 256}, - dictWord{13, 10, 340}, - dictWord{13, 10, 432}, - dictWord{13, 10, 436}, - dictWord{13, 10, 440}, - dictWord{13, 10, 454}, - dictWord{14, 10, 174}, - dictWord{14, 10, 220}, - dictWord{14, 10, 284}, - dictWord{ - 14, - 10, - 390, - }, - dictWord{145, 10, 121}, - dictWord{8, 11, 598}, - dictWord{9, 11, 664}, - dictWord{138, 11, 441}, - dictWord{9, 10, 137}, - dictWord{138, 10, 221}, - dictWord{133, 11, 812}, - dictWord{148, 0, 15}, - dictWord{134, 0, 1341}, - dictWord{6, 0, 1017}, - dictWord{4, 11, 137}, - dictWord{7, 11, 1178}, - dictWord{ - 135, - 11, - 1520, - }, - dictWord{7, 10, 390}, - dictWord{138, 10, 140}, - dictWord{7, 11, 1260}, - dictWord{135, 11, 1790}, - dictWord{137, 11, 191}, - dictWord{ - 135, - 10, - 1144, - }, - dictWord{6, 0, 1810}, - dictWord{7, 0, 657}, - dictWord{8, 0, 886}, - dictWord{10, 0, 857}, - dictWord{14, 0, 440}, - dictWord{144, 0, 96}, - dictWord{8, 0, 533}, - dictWord{6, 11, 1661}, - dictWord{7, 11, 1975}, - dictWord{7, 11, 2009}, - dictWord{135, 11, 2011}, - dictWord{6, 0, 1453}, - dictWord{134, 10, 464}, - dictWord{ - 132, - 11, - 715, - }, - dictWord{5, 10, 407}, - dictWord{11, 10, 204}, - dictWord{11, 10, 243}, - dictWord{11, 10, 489}, - dictWord{12, 10, 293}, - dictWord{19, 10, 37}, - dictWord{20, 10, 73}, - dictWord{150, 10, 38}, - dictWord{133, 11, 703}, - dictWord{4, 0, 211}, - dictWord{7, 0, 1483}, - dictWord{5, 10, 325}, - dictWord{8, 10, 5}, - dictWord{ - 8, - 10, - 227, - }, - dictWord{9, 10, 105}, - dictWord{10, 10, 585}, - dictWord{140, 10, 614}, - dictWord{4, 0, 332}, - dictWord{5, 0, 335}, - dictWord{6, 0, 238}, - dictWord{ - 7, - 0, - 269, - }, - dictWord{7, 0, 811}, - dictWord{7, 0, 1797}, - dictWord{8, 0, 836}, - dictWord{9, 0, 507}, - dictWord{141, 0, 242}, - dictWord{5, 11, 89}, - dictWord{7, 11, 1915}, - dictWord{9, 11, 185}, - dictWord{9, 11, 235}, - dictWord{9, 11, 496}, - dictWord{10, 11, 64}, - dictWord{10, 11, 270}, - dictWord{10, 11, 403}, - dictWord{10, 11, 469}, - dictWord{10, 11, 529}, - dictWord{10, 11, 590}, - dictWord{11, 11, 140}, - dictWord{11, 11, 860}, - dictWord{13, 11, 1}, - dictWord{13, 11, 422}, - dictWord{14, 11, 341}, - dictWord{14, 11, 364}, - dictWord{17, 11, 93}, - dictWord{18, 11, 113}, - dictWord{19, 11, 97}, - dictWord{147, 11, 113}, - dictWord{133, 11, 695}, - dictWord{ - 16, - 0, - 19, - }, - dictWord{5, 11, 6}, - dictWord{6, 11, 183}, - dictWord{6, 10, 621}, - dictWord{7, 11, 680}, - dictWord{7, 11, 978}, - dictWord{7, 11, 1013}, - dictWord{7, 11, 1055}, - dictWord{12, 11, 230}, - dictWord{13, 11, 172}, - dictWord{13, 10, 504}, - dictWord{146, 11, 29}, - dictWord{136, 0, 156}, - dictWord{133, 0, 1009}, - dictWord{ - 6, - 11, - 29, - }, - dictWord{139, 11, 63}, - dictWord{134, 0, 820}, - dictWord{134, 10, 218}, - dictWord{7, 10, 454}, - dictWord{7, 10, 782}, - dictWord{8, 10, 768}, - dictWord{ - 140, - 10, - 686, - }, - dictWord{5, 0, 228}, - dictWord{6, 0, 203}, - dictWord{7, 0, 156}, - dictWord{8, 0, 347}, - dictWord{9, 0, 265}, - dictWord{18, 0, 39}, - dictWord{20, 0, 54}, - dictWord{21, 0, 31}, - dictWord{22, 0, 3}, - dictWord{23, 0, 0}, - dictWord{15, 11, 8}, - dictWord{18, 11, 39}, - dictWord{20, 11, 54}, - dictWord{21, 11, 31}, - dictWord{22, 11, 3}, - dictWord{151, 11, 0}, - dictWord{7, 0, 1131}, - dictWord{135, 0, 1468}, - dictWord{144, 10, 0}, - dictWord{134, 0, 1276}, - dictWord{10, 10, 676}, - dictWord{ - 140, - 10, - 462, - }, - dictWord{132, 11, 311}, - dictWord{134, 11, 1740}, - dictWord{7, 11, 170}, - dictWord{8, 11, 90}, - dictWord{8, 11, 177}, - dictWord{8, 11, 415}, - dictWord{ - 11, - 11, - 714, - }, - dictWord{142, 11, 281}, - dictWord{134, 10, 164}, - dictWord{6, 0, 1792}, - dictWord{138, 0, 849}, - dictWord{150, 10, 50}, - dictWord{5, 0, 291}, - dictWord{5, 0, 318}, - dictWord{7, 0, 765}, - dictWord{9, 0, 389}, - dictWord{12, 0, 548}, - dictWord{8, 11, 522}, - dictWord{142, 11, 328}, - dictWord{11, 11, 91}, - dictWord{ - 13, - 11, - 129, - }, - dictWord{15, 11, 101}, - dictWord{145, 11, 125}, - dictWord{4, 11, 494}, - dictWord{6, 11, 74}, - dictWord{7, 11, 44}, - dictWord{7, 11, 407}, - dictWord{ - 8, - 11, - 551, - }, - dictWord{12, 11, 17}, - dictWord{15, 11, 5}, - dictWord{148, 11, 11}, - dictWord{4, 11, 276}, - dictWord{133, 11, 296}, - dictWord{6, 10, 343}, - dictWord{ - 7, - 10, - 195, - }, - dictWord{7, 11, 1777}, - dictWord{9, 10, 226}, - dictWord{10, 10, 197}, - dictWord{10, 10, 575}, - dictWord{11, 10, 502}, - dictWord{139, 10, 899}, - dictWord{ - 10, - 0, - 525, - }, - dictWord{139, 0, 82}, - dictWord{14, 0, 453}, - dictWord{4, 11, 7}, - dictWord{5, 11, 90}, - dictWord{5, 11, 158}, - dictWord{6, 11, 542}, - dictWord{7, 11, 221}, - dictWord{7, 11, 1574}, - dictWord{9, 11, 490}, - dictWord{10, 11, 540}, - dictWord{11, 11, 443}, - dictWord{139, 11, 757}, - dictWord{135, 0, 666}, - dictWord{ - 22, - 10, - 29, - }, - dictWord{150, 11, 29}, - dictWord{4, 0, 422}, - dictWord{147, 10, 8}, - dictWord{5, 0, 355}, - dictWord{145, 0, 0}, - dictWord{6, 0, 1873}, - dictWord{9, 0, 918}, - dictWord{7, 11, 588}, - dictWord{9, 11, 175}, - dictWord{138, 11, 530}, - dictWord{143, 11, 31}, - dictWord{11, 0, 165}, - dictWord{7, 10, 1125}, - dictWord{9, 10, 143}, - dictWord{14, 10, 405}, - dictWord{150, 10, 21}, - dictWord{9, 0, 260}, - dictWord{137, 0, 905}, - dictWord{5, 11, 872}, - dictWord{6, 11, 57}, - dictWord{6, 11, 479}, - dictWord{ - 6, - 11, - 562, - }, - dictWord{7, 11, 471}, - dictWord{7, 11, 1060}, - dictWord{9, 11, 447}, - dictWord{9, 11, 454}, - dictWord{141, 11, 6}, - dictWord{138, 11, 704}, - dictWord{133, 0, 865}, - dictWord{5, 0, 914}, - dictWord{134, 0, 1625}, - dictWord{133, 0, 234}, - dictWord{7, 0, 1383}, - dictWord{5, 11, 31}, - dictWord{6, 11, 614}, - dictWord{145, 11, 61}, - dictWord{7, 11, 1200}, - dictWord{138, 11, 460}, - dictWord{6, 11, 424}, - dictWord{135, 11, 1866}, - dictWord{136, 0, 306}, - dictWord{ - 5, - 10, - 959, - }, - dictWord{12, 11, 30}, - dictWord{13, 11, 148}, - dictWord{14, 11, 87}, - dictWord{14, 11, 182}, - dictWord{16, 11, 42}, - dictWord{18, 11, 92}, - dictWord{ - 148, - 11, - 70, - }, - dictWord{6, 0, 1919}, - dictWord{6, 0, 1921}, - dictWord{9, 0, 923}, - dictWord{9, 0, 930}, - dictWord{9, 0, 941}, - dictWord{9, 0, 949}, - dictWord{9, 0, 987}, - dictWord{ - 9, - 0, - 988, - }, - dictWord{9, 0, 992}, - dictWord{12, 0, 802}, - dictWord{12, 0, 815}, - dictWord{12, 0, 856}, - dictWord{12, 0, 885}, - dictWord{12, 0, 893}, - dictWord{ - 12, - 0, - 898, - }, - dictWord{12, 0, 919}, - dictWord{12, 0, 920}, - dictWord{12, 0, 941}, - dictWord{12, 0, 947}, - dictWord{15, 0, 183}, - dictWord{15, 0, 185}, - dictWord{15, 0, 189}, - dictWord{15, 0, 197}, - dictWord{15, 0, 202}, - dictWord{15, 0, 233}, - dictWord{18, 0, 218}, - dictWord{18, 0, 219}, - dictWord{18, 0, 233}, - dictWord{143, 11, 156}, - dictWord{135, 10, 1759}, - dictWord{136, 10, 173}, - dictWord{13, 0, 163}, - dictWord{13, 0, 180}, - dictWord{18, 0, 78}, - dictWord{20, 0, 35}, - dictWord{5, 11, 13}, - dictWord{134, 11, 142}, - dictWord{134, 10, 266}, - dictWord{6, 11, 97}, - dictWord{7, 11, 116}, - dictWord{8, 11, 322}, - dictWord{8, 11, 755}, - dictWord{9, 11, 548}, - dictWord{10, 11, 714}, - dictWord{11, 11, 884}, - dictWord{141, 11, 324}, - dictWord{135, 0, 1312}, - dictWord{9, 0, 814}, - dictWord{137, 11, 676}, - dictWord{ - 133, - 0, - 707, - }, - dictWord{135, 0, 1493}, - dictWord{6, 0, 421}, - dictWord{7, 0, 61}, - dictWord{7, 0, 1540}, - dictWord{10, 0, 11}, - dictWord{138, 0, 501}, - dictWord{12, 0, 733}, - dictWord{12, 0, 766}, - dictWord{7, 11, 866}, - dictWord{135, 11, 1163}, - dictWord{137, 0, 341}, - dictWord{142, 0, 98}, - dictWord{145, 11, 115}, - dictWord{ - 135, - 11, - 1111, - }, - dictWord{136, 10, 300}, - dictWord{136, 0, 1014}, - dictWord{8, 11, 1}, - dictWord{9, 11, 112}, - dictWord{138, 11, 326}, - dictWord{132, 11, 730}, - dictWord{5, 11, 488}, - dictWord{6, 11, 527}, - dictWord{7, 11, 489}, - dictWord{7, 11, 1636}, - dictWord{8, 11, 121}, - dictWord{8, 11, 144}, - dictWord{8, 11, 359}, - dictWord{ - 9, - 11, - 193, - }, - dictWord{9, 11, 241}, - dictWord{9, 11, 336}, - dictWord{9, 11, 882}, - dictWord{11, 11, 266}, - dictWord{11, 11, 372}, - dictWord{11, 11, 944}, - dictWord{ - 12, - 11, - 401, - }, - dictWord{140, 11, 641}, - dictWord{6, 0, 971}, - dictWord{134, 0, 1121}, - dictWord{6, 0, 102}, - dictWord{7, 0, 72}, - dictWord{15, 0, 142}, - dictWord{ - 147, - 0, - 67, - }, - dictWord{151, 0, 30}, - dictWord{135, 0, 823}, - dictWord{134, 0, 1045}, - dictWord{5, 10, 427}, - dictWord{5, 10, 734}, - dictWord{7, 10, 478}, - dictWord{ - 136, - 10, - 52, - }, - dictWord{7, 0, 1930}, - dictWord{11, 10, 217}, - dictWord{142, 10, 165}, - dictWord{6, 0, 1512}, - dictWord{135, 0, 1870}, - dictWord{9, 11, 31}, - dictWord{ - 10, - 11, - 244, - }, - dictWord{10, 11, 699}, - dictWord{12, 11, 149}, - dictWord{141, 11, 497}, - dictWord{133, 11, 377}, - dictWord{145, 11, 101}, - dictWord{ - 10, - 11, - 158, - }, - dictWord{13, 11, 13}, - dictWord{13, 11, 137}, - dictWord{13, 11, 258}, - dictWord{14, 11, 111}, - dictWord{14, 11, 225}, - dictWord{14, 11, 253}, - dictWord{ - 14, - 11, - 304, - }, - dictWord{14, 11, 339}, - dictWord{14, 11, 417}, - dictWord{146, 11, 33}, - dictWord{6, 0, 87}, - dictWord{6, 10, 1734}, - dictWord{7, 10, 20}, - dictWord{ - 7, - 10, - 1056, - }, - dictWord{8, 10, 732}, - dictWord{9, 10, 406}, - dictWord{9, 10, 911}, - dictWord{138, 10, 694}, - dictWord{134, 0, 1243}, - dictWord{137, 0, 245}, - dictWord{ - 7, - 0, - 68, - }, - dictWord{8, 0, 48}, - dictWord{8, 0, 88}, - dictWord{8, 0, 582}, - dictWord{8, 0, 681}, - dictWord{9, 0, 373}, - dictWord{9, 0, 864}, - dictWord{11, 0, 157}, - dictWord{ - 11, - 0, - 336, - }, - dictWord{11, 0, 843}, - dictWord{148, 0, 27}, - dictWord{8, 11, 663}, - dictWord{144, 11, 8}, - dictWord{133, 10, 613}, - dictWord{4, 0, 88}, - dictWord{ - 5, - 0, - 137, - }, - dictWord{5, 0, 174}, - dictWord{5, 0, 777}, - dictWord{6, 0, 1664}, - dictWord{6, 0, 1725}, - dictWord{7, 0, 77}, - dictWord{7, 0, 426}, - dictWord{7, 0, 1317}, - dictWord{ - 7, - 0, - 1355, - }, - dictWord{8, 0, 126}, - dictWord{8, 0, 563}, - dictWord{9, 0, 523}, - dictWord{9, 0, 750}, - dictWord{10, 0, 310}, - dictWord{10, 0, 836}, - dictWord{11, 0, 42}, - dictWord{11, 0, 318}, - dictWord{11, 0, 731}, - dictWord{12, 0, 68}, - dictWord{12, 0, 92}, - dictWord{12, 0, 507}, - dictWord{12, 0, 692}, - dictWord{13, 0, 81}, - dictWord{ - 13, - 0, - 238, - }, - dictWord{13, 0, 374}, - dictWord{14, 0, 436}, - dictWord{18, 0, 138}, - dictWord{19, 0, 78}, - dictWord{19, 0, 111}, - dictWord{20, 0, 55}, - dictWord{20, 0, 77}, - dictWord{148, 0, 92}, - dictWord{141, 0, 418}, - dictWord{4, 0, 938}, - dictWord{137, 0, 625}, - dictWord{138, 0, 351}, - dictWord{5, 11, 843}, - dictWord{7, 10, 32}, - dictWord{ - 7, - 10, - 984, - }, - dictWord{8, 10, 85}, - dictWord{8, 10, 709}, - dictWord{9, 10, 579}, - dictWord{9, 10, 847}, - dictWord{9, 10, 856}, - dictWord{10, 10, 799}, - dictWord{ - 11, - 10, - 258, - }, - dictWord{11, 10, 1007}, - dictWord{12, 10, 331}, - dictWord{12, 10, 615}, - dictWord{13, 10, 188}, - dictWord{13, 10, 435}, - dictWord{14, 10, 8}, - dictWord{ - 15, - 10, - 165, - }, - dictWord{16, 10, 27}, - dictWord{148, 10, 40}, - dictWord{6, 0, 1668}, - dictWord{7, 0, 1499}, - dictWord{8, 0, 117}, - dictWord{9, 0, 314}, - dictWord{ - 138, - 0, - 174, - }, - dictWord{135, 0, 707}, - dictWord{132, 11, 554}, - dictWord{133, 11, 536}, - dictWord{5, 0, 403}, - dictWord{5, 11, 207}, - dictWord{9, 11, 79}, - dictWord{ - 11, - 11, - 625, - }, - dictWord{145, 11, 7}, - dictWord{132, 11, 424}, - dictWord{136, 11, 785}, - dictWord{4, 10, 167}, - dictWord{135, 10, 82}, - dictWord{9, 0, 7}, - dictWord{ - 23, - 0, - 6, - }, - dictWord{9, 11, 7}, - dictWord{151, 11, 6}, - dictWord{6, 0, 282}, - dictWord{5, 10, 62}, - dictWord{6, 10, 534}, - dictWord{7, 10, 74}, - dictWord{7, 10, 678}, - dictWord{ - 7, - 10, - 684, - }, - dictWord{7, 10, 1043}, - dictWord{7, 10, 1072}, - dictWord{8, 10, 280}, - dictWord{8, 10, 541}, - dictWord{8, 10, 686}, - dictWord{9, 10, 258}, - dictWord{ - 10, - 10, - 519, - }, - dictWord{11, 10, 252}, - dictWord{140, 10, 282}, - dictWord{138, 10, 33}, - dictWord{132, 10, 359}, - dictWord{4, 0, 44}, - dictWord{5, 0, 311}, - dictWord{ - 6, - 0, - 156, - }, - dictWord{7, 0, 639}, - dictWord{7, 0, 762}, - dictWord{7, 0, 1827}, - dictWord{9, 0, 8}, - dictWord{9, 0, 462}, - dictWord{148, 0, 83}, - dictWord{7, 11, 769}, - dictWord{ - 9, - 11, - 18, - }, - dictWord{138, 11, 358}, - dictWord{4, 0, 346}, - dictWord{7, 0, 115}, - dictWord{9, 0, 180}, - dictWord{9, 0, 456}, - dictWord{10, 0, 363}, - dictWord{ - 4, - 11, - 896, - }, - dictWord{134, 11, 1777}, - dictWord{133, 10, 211}, - dictWord{7, 0, 761}, - dictWord{7, 0, 1051}, - dictWord{137, 0, 545}, - dictWord{6, 10, 145}, - dictWord{ - 141, - 10, - 336, - }, - dictWord{7, 11, 750}, - dictWord{9, 11, 223}, - dictWord{11, 11, 27}, - dictWord{11, 11, 466}, - dictWord{12, 11, 624}, - dictWord{14, 11, 265}, - dictWord{146, 11, 61}, - dictWord{6, 0, 752}, - dictWord{6, 0, 768}, - dictWord{6, 0, 1195}, - dictWord{6, 0, 1254}, - dictWord{6, 0, 1619}, - dictWord{137, 0, 835}, - dictWord{ - 6, - 0, - 1936, - }, - dictWord{8, 0, 930}, - dictWord{136, 0, 960}, - dictWord{132, 10, 263}, - dictWord{132, 11, 249}, - dictWord{12, 0, 653}, - dictWord{132, 10, 916}, - dictWord{4, 11, 603}, - dictWord{133, 11, 661}, - dictWord{8, 0, 344}, - dictWord{4, 11, 11}, - dictWord{6, 11, 128}, - dictWord{7, 11, 231}, - dictWord{7, 11, 1533}, - dictWord{138, 11, 725}, - dictWord{134, 0, 1483}, - dictWord{134, 0, 875}, - dictWord{6, 0, 185}, - dictWord{7, 0, 1899}, - dictWord{9, 0, 875}, - dictWord{139, 0, 673}, - dictWord{15, 10, 155}, - dictWord{144, 10, 79}, - dictWord{7, 0, 93}, - dictWord{7, 0, 210}, - dictWord{7, 0, 1223}, - dictWord{8, 0, 451}, - dictWord{8, 0, 460}, - dictWord{ - 11, - 0, - 353, - }, - dictWord{11, 0, 475}, - dictWord{4, 10, 599}, - dictWord{6, 10, 1634}, - dictWord{7, 10, 67}, - dictWord{7, 10, 691}, - dictWord{7, 10, 979}, - dictWord{ - 7, - 10, - 1697, - }, - dictWord{8, 10, 207}, - dictWord{8, 10, 214}, - dictWord{8, 10, 231}, - dictWord{8, 10, 294}, - dictWord{8, 10, 336}, - dictWord{8, 10, 428}, - dictWord{ - 8, - 10, - 471, - }, - dictWord{8, 10, 622}, - dictWord{8, 10, 626}, - dictWord{8, 10, 679}, - dictWord{8, 10, 759}, - dictWord{8, 10, 829}, - dictWord{9, 10, 11}, - dictWord{9, 10, 246}, - dictWord{9, 10, 484}, - dictWord{9, 10, 573}, - dictWord{9, 10, 706}, - dictWord{9, 10, 762}, - dictWord{9, 10, 798}, - dictWord{9, 10, 855}, - dictWord{9, 10, 870}, - dictWord{ - 9, - 10, - 912, - }, - dictWord{10, 10, 303}, - dictWord{10, 10, 335}, - dictWord{10, 10, 424}, - dictWord{10, 10, 461}, - dictWord{10, 10, 543}, - dictWord{10, 10, 759}, - dictWord{10, 10, 814}, - dictWord{11, 10, 59}, - dictWord{11, 10, 235}, - dictWord{11, 10, 590}, - dictWord{11, 10, 929}, - dictWord{11, 10, 963}, - dictWord{ - 11, - 10, - 987, - }, - dictWord{12, 10, 114}, - dictWord{12, 10, 182}, - dictWord{12, 10, 226}, - dictWord{12, 10, 332}, - dictWord{12, 10, 439}, - dictWord{12, 10, 575}, - dictWord{ - 12, - 10, - 598, - }, - dictWord{12, 10, 675}, - dictWord{13, 10, 8}, - dictWord{13, 10, 125}, - dictWord{13, 10, 194}, - dictWord{13, 10, 287}, - dictWord{14, 10, 197}, - dictWord{14, 10, 383}, - dictWord{15, 10, 53}, - dictWord{17, 10, 63}, - dictWord{19, 10, 46}, - dictWord{19, 10, 98}, - dictWord{19, 10, 106}, - dictWord{148, 10, 85}, - dictWord{132, 11, 476}, - dictWord{4, 0, 327}, - dictWord{5, 0, 478}, - dictWord{7, 0, 1332}, - dictWord{136, 0, 753}, - dictWord{5, 0, 1020}, - dictWord{133, 0, 1022}, - dictWord{135, 11, 1807}, - dictWord{4, 0, 103}, - dictWord{133, 0, 401}, - dictWord{4, 0, 499}, - dictWord{135, 0, 1421}, - dictWord{10, 0, 207}, - dictWord{13, 0, 164}, - dictWord{147, 10, 126}, - dictWord{9, 11, 20}, - dictWord{10, 11, 324}, - dictWord{139, 11, 488}, - dictWord{132, 0, 96}, - dictWord{9, 11, 280}, - dictWord{ - 138, - 11, - 134, - }, - dictWord{135, 0, 968}, - dictWord{133, 10, 187}, - dictWord{135, 10, 1286}, - dictWord{5, 11, 112}, - dictWord{6, 11, 103}, - dictWord{134, 11, 150}, - dictWord{8, 0, 914}, - dictWord{10, 0, 3}, - dictWord{4, 10, 215}, - dictWord{9, 10, 38}, - dictWord{11, 10, 23}, - dictWord{11, 10, 127}, - dictWord{139, 10, 796}, - dictWord{ - 135, - 0, - 399, - }, - dictWord{6, 0, 563}, - dictWord{137, 0, 224}, - dictWord{6, 0, 704}, - dictWord{134, 0, 1214}, - dictWord{4, 11, 708}, - dictWord{8, 11, 15}, - dictWord{ - 9, - 11, - 50, - }, - dictWord{9, 11, 386}, - dictWord{11, 11, 18}, - dictWord{11, 11, 529}, - dictWord{140, 11, 228}, - dictWord{4, 11, 563}, - dictWord{7, 11, 109}, - dictWord{ - 7, - 11, - 592, - }, - dictWord{7, 11, 637}, - dictWord{7, 11, 770}, - dictWord{7, 11, 1701}, - dictWord{8, 11, 436}, - dictWord{8, 11, 463}, - dictWord{9, 11, 60}, - dictWord{9, 11, 335}, - dictWord{9, 11, 904}, - dictWord{10, 11, 73}, - dictWord{11, 11, 434}, - dictWord{12, 11, 585}, - dictWord{13, 11, 331}, - dictWord{18, 11, 110}, - dictWord{ - 148, - 11, - 60, - }, - dictWord{134, 0, 1559}, - dictWord{132, 11, 502}, - dictWord{6, 11, 347}, - dictWord{138, 11, 161}, - dictWord{4, 11, 33}, - dictWord{5, 11, 102}, - dictWord{ - 5, - 11, - 500, - }, - dictWord{6, 11, 284}, - dictWord{7, 11, 1079}, - dictWord{7, 11, 1423}, - dictWord{7, 11, 1702}, - dictWord{8, 11, 470}, - dictWord{9, 11, 554}, - dictWord{ - 9, - 11, - 723, - }, - dictWord{139, 11, 333}, - dictWord{7, 11, 246}, - dictWord{135, 11, 840}, - dictWord{6, 11, 10}, - dictWord{8, 11, 571}, - dictWord{9, 11, 739}, - dictWord{ - 143, - 11, - 91, - }, - dictWord{8, 0, 861}, - dictWord{10, 0, 905}, - dictWord{12, 0, 730}, - dictWord{12, 0, 789}, - dictWord{133, 11, 626}, - dictWord{134, 0, 946}, - dictWord{ - 5, - 0, - 746, - }, - dictWord{12, 0, 333}, - dictWord{14, 0, 332}, - dictWord{12, 11, 333}, - dictWord{142, 11, 332}, - dictWord{5, 11, 18}, - dictWord{6, 11, 526}, - dictWord{ - 13, - 11, - 24, - }, - dictWord{13, 11, 110}, - dictWord{19, 11, 5}, - dictWord{147, 11, 44}, - dictWord{4, 0, 910}, - dictWord{5, 0, 832}, - dictWord{135, 10, 2002}, - dictWord{ - 10, - 11, - 768, - }, - dictWord{139, 11, 787}, - dictWord{4, 11, 309}, - dictWord{5, 11, 462}, - dictWord{7, 11, 970}, - dictWord{135, 11, 1097}, - dictWord{4, 10, 28}, - dictWord{ - 5, - 10, - 440, - }, - dictWord{7, 10, 248}, - dictWord{11, 10, 833}, - dictWord{140, 10, 344}, - dictWord{134, 10, 1654}, - dictWord{6, 0, 632}, - dictWord{6, 0, 652}, - dictWord{ - 6, - 0, - 1272, - }, - dictWord{6, 0, 1384}, - dictWord{134, 0, 1560}, - dictWord{134, 11, 1704}, - dictWord{6, 0, 1393}, - dictWord{133, 10, 853}, - dictWord{6, 10, 249}, - dictWord{7, 10, 1234}, - dictWord{139, 10, 573}, - dictWord{5, 11, 86}, - dictWord{7, 11, 743}, - dictWord{9, 11, 85}, - dictWord{10, 11, 281}, - dictWord{10, 11, 432}, - dictWord{11, 11, 490}, - dictWord{12, 11, 251}, - dictWord{13, 11, 118}, - dictWord{14, 11, 378}, - dictWord{146, 11, 143}, - dictWord{5, 11, 524}, - dictWord{ - 133, - 11, - 744, - }, - dictWord{134, 0, 1514}, - dictWord{10, 0, 201}, - dictWord{142, 0, 319}, - dictWord{7, 0, 717}, - dictWord{10, 0, 510}, - dictWord{7, 10, 392}, - dictWord{ - 8, - 10, - 20, - }, - dictWord{8, 10, 172}, - dictWord{8, 10, 690}, - dictWord{9, 10, 383}, - dictWord{9, 10, 845}, - dictWord{11, 10, 293}, - dictWord{11, 10, 832}, - dictWord{ - 11, - 10, - 920, - }, - dictWord{11, 10, 984}, - dictWord{141, 10, 221}, - dictWord{134, 0, 1381}, - dictWord{5, 10, 858}, - dictWord{133, 10, 992}, - dictWord{8, 0, 528}, - dictWord{137, 0, 348}, - dictWord{10, 11, 107}, - dictWord{140, 11, 436}, - dictWord{4, 0, 20}, - dictWord{133, 0, 616}, - dictWord{134, 0, 1251}, - dictWord{ - 132, - 11, - 927, - }, - dictWord{10, 11, 123}, - dictWord{12, 11, 670}, - dictWord{13, 11, 371}, - dictWord{14, 11, 142}, - dictWord{146, 11, 94}, - dictWord{134, 0, 1163}, - dictWord{ - 7, - 11, - 1149, - }, - dictWord{137, 11, 156}, - dictWord{134, 0, 307}, - dictWord{133, 11, 778}, - dictWord{7, 0, 1091}, - dictWord{135, 0, 1765}, - dictWord{ - 5, - 11, - 502, - }, - dictWord{6, 10, 268}, - dictWord{137, 10, 62}, - dictWord{8, 11, 196}, - dictWord{10, 11, 283}, - dictWord{139, 11, 406}, - dictWord{4, 0, 26}, - dictWord{ - 5, - 0, - 429, - }, - dictWord{6, 0, 245}, - dictWord{7, 0, 704}, - dictWord{7, 0, 1379}, - dictWord{135, 0, 1474}, - dictWord{133, 11, 855}, - dictWord{132, 0, 881}, - dictWord{ - 4, - 0, - 621, - }, - dictWord{135, 11, 1596}, - dictWord{7, 11, 1400}, - dictWord{9, 11, 446}, - dictWord{138, 11, 45}, - dictWord{6, 0, 736}, - dictWord{138, 10, 106}, - dictWord{133, 0, 542}, - dictWord{134, 0, 348}, - dictWord{133, 0, 868}, - dictWord{136, 0, 433}, - dictWord{135, 0, 1495}, - dictWord{138, 0, 771}, - dictWord{ - 6, - 10, - 613, - }, - dictWord{136, 10, 223}, - dictWord{138, 0, 215}, - dictWord{141, 0, 124}, - dictWord{136, 11, 391}, - dictWord{135, 11, 172}, - dictWord{132, 10, 670}, - dictWord{140, 0, 55}, - dictWord{9, 10, 40}, - dictWord{139, 10, 136}, - dictWord{7, 0, 62}, - dictWord{147, 0, 112}, - dictWord{132, 0, 856}, - dictWord{132, 11, 568}, - dictWord{12, 0, 270}, - dictWord{139, 10, 259}, - dictWord{8, 0, 572}, - dictWord{137, 0, 698}, - dictWord{4, 11, 732}, - dictWord{9, 10, 310}, - dictWord{137, 10, 682}, - dictWord{142, 10, 296}, - dictWord{134, 0, 939}, - dictWord{136, 11, 733}, - dictWord{135, 11, 1435}, - dictWord{7, 10, 1401}, - dictWord{135, 10, 1476}, - dictWord{6, 0, 352}, - dictWord{4, 10, 296}, - dictWord{7, 10, 401}, - dictWord{7, 10, 1410}, - dictWord{7, 10, 1594}, - dictWord{7, 10, 1674}, - dictWord{8, 10, 63}, - dictWord{ - 8, - 10, - 660, - }, - dictWord{137, 10, 74}, - dictWord{4, 11, 428}, - dictWord{133, 11, 668}, - dictWord{4, 10, 139}, - dictWord{4, 10, 388}, - dictWord{140, 10, 188}, - dictWord{7, 11, 2015}, - dictWord{140, 11, 665}, - dictWord{132, 0, 647}, - dictWord{146, 0, 10}, - dictWord{138, 0, 220}, - dictWord{142, 0, 464}, - dictWord{ - 132, - 0, - 109, - }, - dictWord{134, 0, 1746}, - dictWord{6, 0, 515}, - dictWord{4, 10, 747}, - dictWord{6, 11, 1623}, - dictWord{6, 11, 1681}, - dictWord{7, 10, 649}, - dictWord{ - 7, - 10, - 1479, - }, - dictWord{135, 10, 1583}, - dictWord{133, 10, 232}, - dictWord{135, 0, 566}, - dictWord{137, 10, 887}, - dictWord{4, 0, 40}, - dictWord{10, 0, 67}, - dictWord{ - 11, - 0, - 117, - }, - dictWord{11, 0, 768}, - dictWord{139, 0, 935}, - dictWord{132, 0, 801}, - dictWord{7, 0, 992}, - dictWord{8, 0, 301}, - dictWord{9, 0, 722}, - dictWord{ - 12, - 0, - 63, - }, - dictWord{13, 0, 29}, - dictWord{14, 0, 161}, - dictWord{143, 0, 18}, - dictWord{139, 0, 923}, - dictWord{6, 11, 1748}, - dictWord{8, 11, 715}, - dictWord{9, 11, 802}, - dictWord{10, 11, 46}, - dictWord{10, 11, 819}, - dictWord{13, 11, 308}, - dictWord{14, 11, 351}, - dictWord{14, 11, 363}, - dictWord{146, 11, 67}, - dictWord{ - 137, - 11, - 745, - }, - dictWord{7, 0, 1145}, - dictWord{4, 10, 14}, - dictWord{7, 10, 1801}, - dictWord{10, 10, 748}, - dictWord{141, 10, 458}, - dictWord{4, 11, 63}, - dictWord{ - 5, - 11, - 347, - }, - dictWord{134, 11, 474}, - dictWord{135, 0, 568}, - dictWord{4, 10, 425}, - dictWord{7, 11, 577}, - dictWord{7, 11, 1432}, - dictWord{9, 11, 475}, - dictWord{ - 9, - 11, - 505, - }, - dictWord{9, 11, 526}, - dictWord{9, 11, 609}, - dictWord{9, 11, 689}, - dictWord{9, 11, 726}, - dictWord{9, 11, 735}, - dictWord{9, 11, 738}, - dictWord{ - 10, - 11, - 556, - }, - dictWord{10, 11, 674}, - dictWord{10, 11, 684}, - dictWord{11, 11, 89}, - dictWord{11, 11, 202}, - dictWord{11, 11, 272}, - dictWord{11, 11, 380}, - dictWord{ - 11, - 11, - 415, - }, - dictWord{11, 11, 505}, - dictWord{11, 11, 537}, - dictWord{11, 11, 550}, - dictWord{11, 11, 562}, - dictWord{11, 11, 640}, - dictWord{11, 11, 667}, - dictWord{11, 11, 688}, - dictWord{11, 11, 847}, - dictWord{11, 11, 927}, - dictWord{11, 11, 930}, - dictWord{11, 11, 940}, - dictWord{12, 11, 144}, - dictWord{ - 12, - 11, - 325, - }, - dictWord{12, 11, 329}, - dictWord{12, 11, 389}, - dictWord{12, 11, 403}, - dictWord{12, 11, 451}, - dictWord{12, 11, 515}, - dictWord{12, 11, 604}, - dictWord{ - 12, - 11, - 616, - }, - dictWord{12, 11, 626}, - dictWord{13, 11, 66}, - dictWord{13, 11, 131}, - dictWord{13, 11, 167}, - dictWord{13, 11, 236}, - dictWord{13, 11, 368}, - dictWord{13, 11, 411}, - dictWord{13, 11, 434}, - dictWord{13, 11, 453}, - dictWord{13, 11, 461}, - dictWord{13, 11, 474}, - dictWord{14, 11, 59}, - dictWord{14, 11, 60}, - dictWord{14, 11, 139}, - dictWord{14, 11, 152}, - dictWord{14, 11, 276}, - dictWord{14, 11, 353}, - dictWord{14, 11, 402}, - dictWord{15, 11, 28}, - dictWord{ - 15, - 11, - 81, - }, - dictWord{15, 11, 123}, - dictWord{15, 11, 152}, - dictWord{18, 11, 136}, - dictWord{148, 11, 88}, - dictWord{137, 0, 247}, - dictWord{135, 11, 1622}, - dictWord{ - 9, - 11, - 544, - }, - dictWord{11, 11, 413}, - dictWord{144, 11, 25}, - dictWord{4, 0, 645}, - dictWord{7, 0, 825}, - dictWord{6, 10, 1768}, - dictWord{135, 11, 89}, - dictWord{140, 0, 328}, - dictWord{5, 10, 943}, - dictWord{134, 10, 1779}, - dictWord{134, 0, 1363}, - dictWord{5, 10, 245}, - dictWord{6, 10, 576}, - dictWord{7, 10, 582}, - dictWord{136, 10, 225}, - dictWord{134, 0, 1280}, - dictWord{5, 11, 824}, - dictWord{133, 11, 941}, - dictWord{7, 11, 440}, - dictWord{8, 11, 230}, - dictWord{ - 139, - 11, - 106, - }, - dictWord{5, 0, 28}, - dictWord{6, 0, 204}, - dictWord{10, 0, 320}, - dictWord{10, 0, 583}, - dictWord{13, 0, 502}, - dictWord{14, 0, 72}, - dictWord{14, 0, 274}, - dictWord{14, 0, 312}, - dictWord{14, 0, 344}, - dictWord{15, 0, 159}, - dictWord{16, 0, 62}, - dictWord{16, 0, 69}, - dictWord{17, 0, 30}, - dictWord{18, 0, 42}, - dictWord{ - 18, - 0, - 53, - }, - dictWord{18, 0, 84}, - dictWord{18, 0, 140}, - dictWord{19, 0, 68}, - dictWord{19, 0, 85}, - dictWord{20, 0, 5}, - dictWord{20, 0, 45}, - dictWord{20, 0, 101}, - dictWord{ - 22, - 0, - 7, - }, - dictWord{150, 0, 20}, - dictWord{4, 0, 558}, - dictWord{6, 0, 390}, - dictWord{7, 0, 162}, - dictWord{7, 0, 689}, - dictWord{9, 0, 360}, - dictWord{138, 0, 653}, - dictWord{134, 0, 764}, - dictWord{6, 0, 862}, - dictWord{137, 0, 833}, - dictWord{5, 0, 856}, - dictWord{6, 0, 1672}, - dictWord{6, 0, 1757}, - dictWord{134, 0, 1781}, - dictWord{ - 5, - 0, - 92, - }, - dictWord{10, 0, 736}, - dictWord{140, 0, 102}, - dictWord{6, 0, 1927}, - dictWord{6, 0, 1944}, - dictWord{8, 0, 924}, - dictWord{8, 0, 948}, - dictWord{ - 10, - 0, - 967, - }, - dictWord{138, 0, 978}, - dictWord{134, 0, 1479}, - dictWord{5, 0, 590}, - dictWord{8, 0, 360}, - dictWord{9, 0, 213}, - dictWord{138, 0, 63}, - dictWord{ - 134, - 0, - 1521, - }, - dictWord{6, 0, 709}, - dictWord{134, 0, 891}, - dictWord{132, 10, 443}, - dictWord{13, 0, 477}, - dictWord{14, 0, 120}, - dictWord{148, 0, 61}, - dictWord{ - 4, - 11, - 914, - }, - dictWord{5, 11, 800}, - dictWord{133, 11, 852}, - dictWord{10, 11, 54}, - dictWord{141, 11, 115}, - dictWord{4, 11, 918}, - dictWord{133, 11, 876}, - dictWord{139, 11, 152}, - dictWord{4, 11, 92}, - dictWord{133, 11, 274}, - dictWord{135, 11, 1901}, - dictWord{9, 11, 800}, - dictWord{10, 11, 693}, - dictWord{ - 11, - 11, - 482, - }, - dictWord{11, 11, 734}, - dictWord{139, 11, 789}, - dictWord{9, 0, 483}, - dictWord{132, 10, 298}, - dictWord{6, 0, 1213}, - dictWord{141, 11, 498}, - dictWord{135, 11, 1451}, - dictWord{133, 11, 743}, - dictWord{4, 0, 1022}, - dictWord{10, 0, 1000}, - dictWord{12, 0, 957}, - dictWord{12, 0, 980}, - dictWord{ - 12, - 0, - 1013, - }, - dictWord{14, 0, 481}, - dictWord{144, 0, 116}, - dictWord{8, 0, 503}, - dictWord{17, 0, 29}, - dictWord{4, 11, 49}, - dictWord{7, 11, 280}, - dictWord{ - 135, - 11, - 1633, - }, - dictWord{135, 0, 1712}, - dictWord{134, 0, 466}, - dictWord{136, 11, 47}, - dictWord{5, 10, 164}, - dictWord{7, 10, 121}, - dictWord{142, 10, 189}, - dictWord{ - 7, - 10, - 812, - }, - dictWord{7, 10, 1261}, - dictWord{7, 10, 1360}, - dictWord{9, 10, 632}, - dictWord{140, 10, 352}, - dictWord{139, 10, 556}, - dictWord{132, 0, 731}, - dictWord{5, 11, 272}, - dictWord{5, 11, 908}, - dictWord{5, 11, 942}, - dictWord{7, 11, 1008}, - dictWord{7, 11, 1560}, - dictWord{8, 11, 197}, - dictWord{9, 11, 47}, - dictWord{11, 11, 538}, - dictWord{139, 11, 742}, - dictWord{4, 10, 172}, - dictWord{9, 10, 611}, - dictWord{10, 10, 436}, - dictWord{12, 10, 673}, - dictWord{ - 141, - 10, - 255, - }, - dictWord{133, 10, 844}, - dictWord{10, 0, 484}, - dictWord{11, 0, 754}, - dictWord{12, 0, 457}, - dictWord{14, 0, 171}, - dictWord{14, 0, 389}, - dictWord{ - 146, - 0, - 153, - }, - dictWord{9, 10, 263}, - dictWord{10, 10, 147}, - dictWord{138, 10, 492}, - dictWord{137, 11, 891}, - dictWord{138, 0, 241}, - dictWord{133, 10, 537}, - dictWord{6, 0, 2005}, - dictWord{136, 0, 964}, - dictWord{137, 10, 842}, - dictWord{151, 11, 8}, - dictWord{4, 11, 407}, - dictWord{132, 11, 560}, - dictWord{ - 135, - 11, - 1884, - }, - dictWord{6, 0, 1100}, - dictWord{134, 0, 1242}, - dictWord{135, 0, 954}, - dictWord{5, 10, 230}, - dictWord{5, 10, 392}, - dictWord{6, 10, 420}, - dictWord{ - 9, - 10, - 568, - }, - dictWord{140, 10, 612}, - dictWord{4, 11, 475}, - dictWord{11, 11, 35}, - dictWord{11, 11, 90}, - dictWord{13, 11, 7}, - dictWord{13, 11, 71}, - dictWord{ - 13, - 11, - 177, - }, - dictWord{142, 11, 422}, - dictWord{136, 11, 332}, - dictWord{135, 0, 1958}, - dictWord{6, 0, 549}, - dictWord{8, 0, 34}, - dictWord{8, 0, 283}, - dictWord{ - 9, - 0, - 165, - }, - dictWord{138, 0, 475}, - dictWord{10, 0, 952}, - dictWord{12, 0, 966}, - dictWord{140, 0, 994}, - dictWord{5, 0, 652}, - dictWord{5, 0, 701}, - dictWord{ - 135, - 0, - 449, - }, - dictWord{4, 0, 655}, - dictWord{7, 0, 850}, - dictWord{17, 0, 75}, - dictWord{146, 0, 137}, - dictWord{4, 0, 146}, - dictWord{7, 0, 1618}, - dictWord{8, 0, 670}, - dictWord{ - 5, - 10, - 41, - }, - dictWord{7, 10, 1459}, - dictWord{7, 10, 1469}, - dictWord{7, 10, 1859}, - dictWord{9, 10, 549}, - dictWord{139, 10, 905}, - dictWord{133, 10, 696}, - dictWord{6, 0, 159}, - dictWord{6, 0, 364}, - dictWord{7, 0, 516}, - dictWord{137, 0, 518}, - dictWord{135, 0, 1439}, - dictWord{6, 11, 222}, - dictWord{7, 11, 636}, - dictWord{ - 7, - 11, - 1620, - }, - dictWord{8, 11, 409}, - dictWord{9, 11, 693}, - dictWord{139, 11, 77}, - dictWord{13, 0, 151}, - dictWord{141, 11, 45}, - dictWord{6, 0, 1027}, - dictWord{ - 4, - 11, - 336, - }, - dictWord{132, 10, 771}, - dictWord{139, 11, 392}, - dictWord{10, 11, 121}, - dictWord{11, 11, 175}, - dictWord{149, 11, 16}, - dictWord{8, 0, 950}, - dictWord{138, 0, 983}, - dictWord{133, 10, 921}, - dictWord{135, 0, 993}, - dictWord{6, 10, 180}, - dictWord{7, 10, 1137}, - dictWord{8, 10, 751}, - dictWord{ - 139, - 10, - 805, - }, - dictWord{7, 0, 501}, - dictWord{9, 0, 111}, - dictWord{10, 0, 141}, - dictWord{11, 0, 332}, - dictWord{13, 0, 43}, - dictWord{13, 0, 429}, - dictWord{14, 0, 130}, - dictWord{14, 0, 415}, - dictWord{145, 0, 102}, - dictWord{4, 10, 183}, - dictWord{5, 11, 882}, - dictWord{7, 10, 271}, - dictWord{11, 10, 824}, - dictWord{11, 10, 952}, - dictWord{13, 10, 278}, - dictWord{13, 10, 339}, - dictWord{13, 10, 482}, - dictWord{14, 10, 424}, - dictWord{148, 10, 99}, - dictWord{4, 10, 19}, - dictWord{5, 10, 477}, - dictWord{5, 10, 596}, - dictWord{6, 10, 505}, - dictWord{7, 10, 1221}, - dictWord{11, 10, 907}, - dictWord{12, 10, 209}, - dictWord{141, 10, 214}, - dictWord{ - 135, - 10, - 1215, - }, - dictWord{133, 0, 452}, - dictWord{132, 11, 426}, - dictWord{5, 0, 149}, - dictWord{136, 0, 233}, - dictWord{133, 0, 935}, - dictWord{6, 11, 58}, - dictWord{ - 7, - 11, - 654, - }, - dictWord{7, 11, 745}, - dictWord{7, 11, 1969}, - dictWord{8, 11, 240}, - dictWord{8, 11, 675}, - dictWord{9, 11, 479}, - dictWord{9, 11, 731}, - dictWord{ - 10, - 11, - 330, - }, - dictWord{10, 11, 593}, - dictWord{10, 11, 817}, - dictWord{11, 11, 32}, - dictWord{11, 11, 133}, - dictWord{11, 11, 221}, - dictWord{145, 11, 68}, - dictWord{ - 12, - 0, - 582, - }, - dictWord{18, 0, 131}, - dictWord{7, 11, 102}, - dictWord{137, 11, 538}, - dictWord{136, 0, 801}, - dictWord{134, 10, 1645}, - dictWord{132, 0, 70}, - dictWord{6, 10, 92}, - dictWord{6, 10, 188}, - dictWord{7, 10, 1269}, - dictWord{7, 10, 1524}, - dictWord{7, 10, 1876}, - dictWord{10, 10, 228}, - dictWord{139, 10, 1020}, - dictWord{4, 10, 459}, - dictWord{133, 10, 966}, - dictWord{138, 0, 369}, - dictWord{16, 0, 36}, - dictWord{140, 10, 330}, - dictWord{141, 11, 366}, - dictWord{ - 7, - 0, - 721, - }, - dictWord{10, 0, 236}, - dictWord{12, 0, 204}, - dictWord{6, 10, 18}, - dictWord{7, 10, 932}, - dictWord{8, 10, 757}, - dictWord{9, 10, 54}, - dictWord{9, 10, 65}, - dictWord{9, 10, 844}, - dictWord{10, 10, 113}, - dictWord{10, 10, 315}, - dictWord{10, 10, 798}, - dictWord{11, 10, 153}, - dictWord{12, 10, 151}, - dictWord{12, 10, 392}, - dictWord{12, 10, 666}, - dictWord{142, 10, 248}, - dictWord{7, 0, 241}, - dictWord{10, 0, 430}, - dictWord{8, 10, 548}, - dictWord{9, 10, 532}, - dictWord{10, 10, 117}, - dictWord{11, 10, 351}, - dictWord{11, 10, 375}, - dictWord{143, 10, 23}, - dictWord{134, 10, 1742}, - dictWord{133, 10, 965}, - dictWord{133, 11, 566}, - dictWord{ - 6, - 11, - 48, - }, - dictWord{135, 11, 63}, - dictWord{134, 10, 182}, - dictWord{10, 10, 65}, - dictWord{10, 10, 488}, - dictWord{138, 10, 497}, - dictWord{6, 11, 114}, - dictWord{7, 11, 1224}, - dictWord{7, 11, 1556}, - dictWord{136, 11, 3}, - dictWord{134, 0, 1817}, - dictWord{8, 11, 576}, - dictWord{137, 11, 267}, - dictWord{ - 6, - 0, - 1078, - }, - dictWord{144, 0, 16}, - dictWord{9, 10, 588}, - dictWord{138, 10, 260}, - dictWord{138, 0, 1021}, - dictWord{5, 0, 406}, - dictWord{134, 0, 2022}, - dictWord{133, 11, 933}, - dictWord{6, 0, 69}, - dictWord{135, 0, 117}, - dictWord{7, 0, 1830}, - dictWord{136, 11, 427}, - dictWord{4, 0, 432}, - dictWord{135, 0, 824}, - dictWord{134, 10, 1786}, - dictWord{133, 0, 826}, - dictWord{139, 11, 67}, - dictWord{133, 11, 759}, - dictWord{135, 10, 308}, - dictWord{137, 0, 816}, - dictWord{ - 133, - 0, - 1000, - }, - dictWord{4, 0, 297}, - dictWord{6, 0, 529}, - dictWord{7, 0, 152}, - dictWord{7, 0, 713}, - dictWord{7, 0, 1845}, - dictWord{8, 0, 710}, - dictWord{8, 0, 717}, - dictWord{12, 0, 639}, - dictWord{140, 0, 685}, - dictWord{7, 0, 423}, - dictWord{136, 10, 588}, - dictWord{136, 10, 287}, - dictWord{136, 0, 510}, - dictWord{ - 134, - 0, - 1048, - }, - dictWord{6, 0, 618}, - dictWord{7, 11, 56}, - dictWord{7, 11, 1989}, - dictWord{8, 11, 337}, - dictWord{8, 11, 738}, - dictWord{9, 11, 600}, - dictWord{ - 10, - 11, - 483, - }, - dictWord{12, 11, 37}, - dictWord{13, 11, 447}, - dictWord{142, 11, 92}, - dictWord{4, 0, 520}, - dictWord{135, 0, 575}, - dictWord{8, 0, 990}, - dictWord{ - 138, - 0, - 977, - }, - dictWord{135, 11, 774}, - dictWord{9, 11, 347}, - dictWord{11, 11, 24}, - dictWord{140, 11, 170}, - dictWord{136, 11, 379}, - dictWord{140, 10, 290}, - dictWord{132, 11, 328}, - dictWord{4, 0, 321}, - dictWord{134, 0, 569}, - dictWord{4, 11, 101}, - dictWord{135, 11, 1171}, - dictWord{7, 0, 723}, - dictWord{7, 0, 1135}, - dictWord{5, 11, 833}, - dictWord{136, 11, 744}, - dictWord{7, 10, 719}, - dictWord{8, 10, 809}, - dictWord{136, 10, 834}, - dictWord{8, 0, 921}, - dictWord{136, 10, 796}, - dictWord{5, 10, 210}, - dictWord{6, 10, 213}, - dictWord{7, 10, 60}, - dictWord{10, 10, 364}, - dictWord{139, 10, 135}, - dictWord{5, 0, 397}, - dictWord{6, 0, 154}, - dictWord{7, 0, 676}, - dictWord{8, 0, 443}, - dictWord{8, 0, 609}, - dictWord{9, 0, 24}, - dictWord{9, 0, 325}, - dictWord{10, 0, 35}, - dictWord{11, 0, 535}, - dictWord{11, 0, 672}, - dictWord{11, 0, 1018}, - dictWord{12, 0, 637}, - dictWord{16, 0, 30}, - dictWord{5, 10, 607}, - dictWord{8, 10, 326}, - dictWord{136, 10, 490}, - dictWord{4, 10, 701}, - dictWord{5, 10, 472}, - dictWord{6, 11, 9}, - dictWord{6, 11, 397}, - dictWord{7, 11, 53}, - dictWord{7, 11, 1742}, - dictWord{9, 10, 758}, - dictWord{10, 11, 632}, - dictWord{ - 11, - 11, - 828, - }, - dictWord{140, 11, 146}, - dictWord{135, 10, 380}, - dictWord{135, 10, 1947}, - dictWord{148, 11, 109}, - dictWord{10, 10, 278}, - dictWord{ - 138, - 11, - 278, - }, - dictWord{134, 0, 856}, - dictWord{7, 0, 139}, - dictWord{4, 10, 386}, - dictWord{8, 10, 405}, - dictWord{8, 10, 728}, - dictWord{9, 10, 497}, - dictWord{ - 11, - 10, - 110, - }, - dictWord{11, 10, 360}, - dictWord{15, 10, 37}, - dictWord{144, 10, 84}, - dictWord{141, 0, 282}, - dictWord{133, 0, 981}, - dictWord{5, 0, 288}, - dictWord{ - 7, - 10, - 1452, - }, - dictWord{7, 10, 1480}, - dictWord{8, 10, 634}, - dictWord{140, 10, 472}, - dictWord{7, 0, 1890}, - dictWord{8, 11, 367}, - dictWord{10, 11, 760}, - dictWord{ - 14, - 11, - 79, - }, - dictWord{20, 11, 17}, - dictWord{152, 11, 0}, - dictWord{4, 10, 524}, - dictWord{136, 10, 810}, - dictWord{4, 0, 56}, - dictWord{7, 0, 1791}, - dictWord{ - 8, - 0, - 607, - }, - dictWord{8, 0, 651}, - dictWord{11, 0, 465}, - dictWord{11, 0, 835}, - dictWord{12, 0, 337}, - dictWord{141, 0, 480}, - dictWord{10, 10, 238}, - dictWord{ - 141, - 10, - 33, - }, - dictWord{11, 11, 417}, - dictWord{12, 11, 223}, - dictWord{140, 11, 265}, - dictWord{9, 0, 158}, - dictWord{10, 0, 411}, - dictWord{140, 0, 261}, - dictWord{ - 133, - 10, - 532, - }, - dictWord{133, 10, 997}, - dictWord{12, 11, 186}, - dictWord{12, 11, 292}, - dictWord{14, 11, 100}, - dictWord{146, 11, 70}, - dictWord{6, 0, 1403}, - dictWord{136, 0, 617}, - dictWord{134, 0, 1205}, - dictWord{139, 0, 563}, - dictWord{4, 0, 242}, - dictWord{134, 0, 333}, - dictWord{4, 11, 186}, - dictWord{5, 11, 157}, - dictWord{8, 11, 168}, - dictWord{138, 11, 6}, - dictWord{132, 0, 369}, - dictWord{133, 11, 875}, - dictWord{5, 10, 782}, - dictWord{5, 10, 829}, - dictWord{ - 134, - 10, - 1738, - }, - dictWord{134, 0, 622}, - dictWord{135, 11, 1272}, - dictWord{6, 0, 1407}, - dictWord{7, 11, 111}, - dictWord{136, 11, 581}, - dictWord{7, 10, 1823}, - dictWord{139, 10, 693}, - dictWord{7, 0, 160}, - dictWord{10, 0, 624}, - dictWord{142, 0, 279}, - dictWord{132, 0, 363}, - dictWord{10, 11, 589}, - dictWord{12, 11, 111}, - dictWord{13, 11, 260}, - dictWord{14, 11, 82}, - dictWord{18, 11, 63}, - dictWord{147, 11, 45}, - dictWord{7, 11, 1364}, - dictWord{7, 11, 1907}, - dictWord{ - 141, - 11, - 158, - }, - dictWord{4, 11, 404}, - dictWord{4, 11, 659}, - dictWord{135, 11, 675}, - dictWord{13, 11, 211}, - dictWord{14, 11, 133}, - dictWord{14, 11, 204}, - dictWord{ - 15, - 11, - 64, - }, - dictWord{15, 11, 69}, - dictWord{15, 11, 114}, - dictWord{16, 11, 10}, - dictWord{19, 11, 23}, - dictWord{19, 11, 35}, - dictWord{19, 11, 39}, - dictWord{ - 19, - 11, - 51, - }, - dictWord{19, 11, 71}, - dictWord{19, 11, 75}, - dictWord{152, 11, 15}, - dictWord{4, 10, 78}, - dictWord{5, 10, 96}, - dictWord{5, 10, 182}, - dictWord{7, 10, 1724}, - dictWord{7, 10, 1825}, - dictWord{10, 10, 394}, - dictWord{10, 10, 471}, - dictWord{11, 10, 532}, - dictWord{14, 10, 340}, - dictWord{145, 10, 88}, - dictWord{ - 135, - 10, - 1964, - }, - dictWord{133, 11, 391}, - dictWord{11, 11, 887}, - dictWord{14, 11, 365}, - dictWord{142, 11, 375}, - dictWord{5, 11, 540}, - dictWord{6, 11, 1697}, - dictWord{7, 11, 222}, - dictWord{136, 11, 341}, - dictWord{134, 11, 78}, - dictWord{9, 0, 601}, - dictWord{9, 0, 619}, - dictWord{10, 0, 505}, - dictWord{10, 0, 732}, - dictWord{11, 0, 355}, - dictWord{140, 0, 139}, - dictWord{134, 0, 292}, - dictWord{139, 0, 174}, - dictWord{5, 0, 177}, - dictWord{6, 0, 616}, - dictWord{7, 0, 827}, - dictWord{ - 9, - 0, - 525, - }, - dictWord{138, 0, 656}, - dictWord{10, 0, 31}, - dictWord{6, 10, 215}, - dictWord{7, 10, 1028}, - dictWord{7, 10, 1473}, - dictWord{7, 10, 1721}, - dictWord{ - 9, - 10, - 424, - }, - dictWord{138, 10, 779}, - dictWord{135, 10, 584}, - dictWord{136, 11, 293}, - dictWord{134, 0, 685}, - dictWord{135, 11, 1868}, - dictWord{ - 133, - 11, - 460, - }, - dictWord{7, 0, 647}, - dictWord{6, 10, 67}, - dictWord{7, 10, 1630}, - dictWord{9, 10, 354}, - dictWord{9, 10, 675}, - dictWord{10, 10, 830}, - dictWord{ - 14, - 10, - 80, - }, - dictWord{145, 10, 80}, - dictWord{4, 0, 161}, - dictWord{133, 0, 631}, - dictWord{6, 10, 141}, - dictWord{7, 10, 225}, - dictWord{9, 10, 59}, - dictWord{9, 10, 607}, - dictWord{10, 10, 312}, - dictWord{11, 10, 687}, - dictWord{12, 10, 555}, - dictWord{13, 10, 373}, - dictWord{13, 10, 494}, - dictWord{148, 10, 58}, - dictWord{ - 7, - 11, - 965, - }, - dictWord{7, 11, 1460}, - dictWord{135, 11, 1604}, - dictWord{136, 10, 783}, - dictWord{134, 11, 388}, - dictWord{6, 0, 722}, - dictWord{6, 0, 1267}, - dictWord{ - 4, - 11, - 511, - }, - dictWord{9, 11, 333}, - dictWord{9, 11, 379}, - dictWord{10, 11, 602}, - dictWord{11, 11, 441}, - dictWord{11, 11, 723}, - dictWord{11, 11, 976}, - dictWord{140, 11, 357}, - dictWord{134, 0, 1797}, - dictWord{135, 0, 1684}, - dictWord{9, 0, 469}, - dictWord{9, 0, 709}, - dictWord{12, 0, 512}, - dictWord{14, 0, 65}, - dictWord{17, 0, 12}, - dictWord{5, 11, 938}, - dictWord{136, 11, 707}, - dictWord{7, 0, 1230}, - dictWord{136, 0, 531}, - dictWord{10, 0, 229}, - dictWord{11, 0, 73}, - dictWord{ - 11, - 0, - 376, - }, - dictWord{139, 0, 433}, - dictWord{12, 0, 268}, - dictWord{12, 0, 640}, - dictWord{142, 0, 119}, - dictWord{7, 10, 430}, - dictWord{139, 10, 46}, - dictWord{ - 6, - 0, - 558, - }, - dictWord{7, 0, 651}, - dictWord{8, 0, 421}, - dictWord{9, 0, 0}, - dictWord{10, 0, 34}, - dictWord{139, 0, 1008}, - dictWord{6, 0, 106}, - dictWord{7, 0, 1786}, - dictWord{7, 0, 1821}, - dictWord{9, 0, 102}, - dictWord{9, 0, 763}, - dictWord{5, 10, 602}, - dictWord{7, 10, 2018}, - dictWord{137, 10, 418}, - dictWord{5, 0, 65}, - dictWord{ - 6, - 0, - 416, - }, - dictWord{7, 0, 1720}, - dictWord{7, 0, 1924}, - dictWord{10, 0, 109}, - dictWord{11, 0, 14}, - dictWord{11, 0, 70}, - dictWord{11, 0, 569}, - dictWord{11, 0, 735}, - dictWord{15, 0, 153}, - dictWord{20, 0, 80}, - dictWord{136, 10, 677}, - dictWord{135, 11, 1625}, - dictWord{137, 11, 772}, - dictWord{136, 0, 595}, - dictWord{ - 6, - 11, - 469, - }, - dictWord{7, 11, 1709}, - dictWord{138, 11, 515}, - dictWord{7, 0, 1832}, - dictWord{138, 0, 374}, - dictWord{9, 0, 106}, - dictWord{9, 0, 163}, - dictWord{ - 9, - 0, - 296, - }, - dictWord{10, 0, 167}, - dictWord{10, 0, 172}, - dictWord{10, 0, 777}, - dictWord{139, 0, 16}, - dictWord{6, 0, 6}, - dictWord{7, 0, 81}, - dictWord{7, 0, 771}, - dictWord{ - 7, - 0, - 1731, - }, - dictWord{9, 0, 405}, - dictWord{138, 0, 421}, - dictWord{4, 11, 500}, - dictWord{135, 11, 938}, - dictWord{5, 11, 68}, - dictWord{134, 11, 383}, - dictWord{ - 5, - 0, - 881, - }, - dictWord{133, 0, 885}, - dictWord{6, 0, 854}, - dictWord{6, 0, 1132}, - dictWord{6, 0, 1495}, - dictWord{6, 0, 1526}, - dictWord{6, 0, 1533}, - dictWord{ - 134, - 0, - 1577, - }, - dictWord{4, 11, 337}, - dictWord{6, 11, 353}, - dictWord{7, 11, 1934}, - dictWord{8, 11, 488}, - dictWord{137, 11, 429}, - dictWord{7, 11, 236}, - dictWord{ - 7, - 11, - 1795, - }, - dictWord{8, 11, 259}, - dictWord{9, 11, 135}, - dictWord{9, 11, 177}, - dictWord{10, 11, 825}, - dictWord{11, 11, 115}, - dictWord{11, 11, 370}, - dictWord{ - 11, - 11, - 405, - }, - dictWord{11, 11, 604}, - dictWord{12, 11, 10}, - dictWord{12, 11, 667}, - dictWord{12, 11, 669}, - dictWord{13, 11, 76}, - dictWord{14, 11, 310}, - dictWord{15, 11, 76}, - dictWord{15, 11, 147}, - dictWord{148, 11, 23}, - dictWord{5, 0, 142}, - dictWord{134, 0, 546}, - dictWord{4, 11, 15}, - dictWord{5, 11, 22}, - dictWord{ - 6, - 11, - 244, - }, - dictWord{7, 11, 40}, - dictWord{7, 11, 200}, - dictWord{7, 11, 906}, - dictWord{7, 11, 1199}, - dictWord{9, 11, 616}, - dictWord{10, 11, 716}, - dictWord{ - 11, - 11, - 635, - }, - dictWord{11, 11, 801}, - dictWord{140, 11, 458}, - dictWord{5, 0, 466}, - dictWord{11, 0, 571}, - dictWord{12, 0, 198}, - dictWord{13, 0, 283}, - dictWord{ - 14, - 0, - 186, - }, - dictWord{15, 0, 21}, - dictWord{15, 0, 103}, - dictWord{135, 10, 329}, - dictWord{4, 0, 185}, - dictWord{5, 0, 257}, - dictWord{5, 0, 839}, - dictWord{5, 0, 936}, - dictWord{9, 0, 399}, - dictWord{10, 0, 258}, - dictWord{10, 0, 395}, - dictWord{10, 0, 734}, - dictWord{11, 0, 1014}, - dictWord{12, 0, 23}, - dictWord{13, 0, 350}, - dictWord{ - 14, - 0, - 150, - }, - dictWord{19, 0, 6}, - dictWord{135, 11, 1735}, - dictWord{12, 11, 36}, - dictWord{141, 11, 337}, - dictWord{5, 11, 598}, - dictWord{7, 11, 791}, - dictWord{ - 8, - 11, - 108, - }, - dictWord{137, 11, 123}, - dictWord{132, 10, 469}, - dictWord{7, 0, 404}, - dictWord{7, 0, 1377}, - dictWord{7, 0, 1430}, - dictWord{7, 0, 2017}, - dictWord{ - 8, - 0, - 149, - }, - dictWord{8, 0, 239}, - dictWord{8, 0, 512}, - dictWord{8, 0, 793}, - dictWord{8, 0, 818}, - dictWord{9, 0, 474}, - dictWord{9, 0, 595}, - dictWord{10, 0, 122}, - dictWord{10, 0, 565}, - dictWord{10, 0, 649}, - dictWord{10, 0, 783}, - dictWord{11, 0, 239}, - dictWord{11, 0, 295}, - dictWord{11, 0, 447}, - dictWord{11, 0, 528}, - dictWord{ - 11, - 0, - 639, - }, - dictWord{11, 0, 800}, - dictWord{12, 0, 25}, - dictWord{12, 0, 77}, - dictWord{12, 0, 157}, - dictWord{12, 0, 256}, - dictWord{12, 0, 316}, - dictWord{12, 0, 390}, - dictWord{12, 0, 391}, - dictWord{12, 0, 395}, - dictWord{12, 0, 478}, - dictWord{12, 0, 503}, - dictWord{12, 0, 592}, - dictWord{12, 0, 680}, - dictWord{13, 0, 50}, - dictWord{13, 0, 53}, - dictWord{13, 0, 132}, - dictWord{13, 0, 198}, - dictWord{13, 0, 322}, - dictWord{13, 0, 415}, - dictWord{13, 0, 511}, - dictWord{14, 0, 71}, - dictWord{ - 14, - 0, - 395, - }, - dictWord{15, 0, 71}, - dictWord{15, 0, 136}, - dictWord{17, 0, 123}, - dictWord{18, 0, 93}, - dictWord{147, 0, 58}, - dictWord{136, 0, 712}, - dictWord{ - 134, - 10, - 1743, - }, - dictWord{5, 10, 929}, - dictWord{6, 10, 340}, - dictWord{8, 10, 376}, - dictWord{136, 10, 807}, - dictWord{6, 0, 1848}, - dictWord{8, 0, 860}, - dictWord{ - 10, - 0, - 856, - }, - dictWord{10, 0, 859}, - dictWord{10, 0, 925}, - dictWord{10, 0, 941}, - dictWord{140, 0, 762}, - dictWord{6, 0, 629}, - dictWord{6, 0, 906}, - dictWord{9, 0, 810}, - dictWord{140, 0, 652}, - dictWord{5, 10, 218}, - dictWord{7, 10, 1610}, - dictWord{138, 10, 83}, - dictWord{7, 10, 1512}, - dictWord{135, 10, 1794}, - dictWord{ - 4, - 0, - 377, - }, - dictWord{24, 0, 13}, - dictWord{4, 11, 155}, - dictWord{7, 11, 1689}, - dictWord{11, 10, 0}, - dictWord{144, 10, 78}, - dictWord{4, 11, 164}, - dictWord{5, 11, 151}, - dictWord{5, 11, 730}, - dictWord{5, 11, 741}, - dictWord{7, 11, 498}, - dictWord{7, 11, 870}, - dictWord{7, 11, 1542}, - dictWord{12, 11, 213}, - dictWord{14, 11, 36}, - dictWord{14, 11, 391}, - dictWord{17, 11, 111}, - dictWord{18, 11, 6}, - dictWord{18, 11, 46}, - dictWord{18, 11, 151}, - dictWord{19, 11, 36}, - dictWord{20, 11, 32}, - dictWord{20, 11, 56}, - dictWord{20, 11, 69}, - dictWord{20, 11, 102}, - dictWord{21, 11, 4}, - dictWord{22, 11, 8}, - dictWord{22, 11, 10}, - dictWord{22, 11, 14}, - dictWord{ - 150, - 11, - 31, - }, - dictWord{7, 0, 1842}, - dictWord{133, 10, 571}, - dictWord{4, 10, 455}, - dictWord{4, 11, 624}, - dictWord{135, 11, 1752}, - dictWord{134, 0, 1501}, - dictWord{4, 11, 492}, - dictWord{5, 11, 451}, - dictWord{6, 10, 161}, - dictWord{7, 10, 372}, - dictWord{137, 10, 597}, - dictWord{132, 10, 349}, - dictWord{4, 0, 180}, - dictWord{135, 0, 1906}, - dictWord{135, 11, 835}, - dictWord{141, 11, 70}, - dictWord{132, 0, 491}, - dictWord{137, 10, 751}, - dictWord{6, 10, 432}, - dictWord{ - 139, - 10, - 322, - }, - dictWord{4, 0, 171}, - dictWord{138, 0, 234}, - dictWord{6, 11, 113}, - dictWord{135, 11, 436}, - dictWord{4, 0, 586}, - dictWord{7, 0, 1186}, - dictWord{ - 138, - 0, - 631, - }, - dictWord{5, 10, 468}, - dictWord{10, 10, 325}, - dictWord{11, 10, 856}, - dictWord{12, 10, 345}, - dictWord{143, 10, 104}, - dictWord{5, 10, 223}, - dictWord{10, 11, 592}, - dictWord{10, 11, 753}, - dictWord{12, 11, 317}, - dictWord{12, 11, 355}, - dictWord{12, 11, 465}, - dictWord{12, 11, 469}, - dictWord{ - 12, - 11, - 560, - }, - dictWord{12, 11, 578}, - dictWord{141, 11, 243}, - dictWord{132, 10, 566}, - dictWord{135, 11, 520}, - dictWord{4, 10, 59}, - dictWord{135, 10, 1394}, - dictWord{6, 10, 436}, - dictWord{139, 10, 481}, - dictWord{9, 0, 931}, - dictWord{10, 0, 334}, - dictWord{20, 0, 71}, - dictWord{4, 10, 48}, - dictWord{5, 10, 271}, - dictWord{ - 7, - 10, - 953, - }, - dictWord{135, 11, 1878}, - dictWord{11, 0, 170}, - dictWord{5, 10, 610}, - dictWord{136, 10, 457}, - dictWord{133, 10, 755}, - dictWord{6, 0, 1587}, - dictWord{135, 10, 1217}, - dictWord{4, 10, 197}, - dictWord{149, 11, 26}, - dictWord{133, 11, 585}, - dictWord{137, 11, 521}, - dictWord{133, 0, 765}, - dictWord{ - 133, - 10, - 217, - }, - dictWord{139, 11, 586}, - dictWord{133, 0, 424}, - dictWord{9, 11, 752}, - dictWord{12, 11, 610}, - dictWord{13, 11, 431}, - dictWord{16, 11, 59}, - dictWord{146, 11, 109}, - dictWord{136, 0, 714}, - dictWord{7, 0, 685}, - dictWord{132, 11, 307}, - dictWord{9, 0, 420}, - dictWord{10, 0, 269}, - dictWord{10, 0, 285}, - dictWord{10, 0, 576}, - dictWord{11, 0, 397}, - dictWord{13, 0, 175}, - dictWord{145, 0, 90}, - dictWord{132, 0, 429}, - dictWord{133, 11, 964}, - dictWord{9, 11, 463}, - dictWord{138, 11, 595}, - dictWord{7, 0, 18}, - dictWord{7, 0, 699}, - dictWord{7, 0, 1966}, - dictWord{8, 0, 752}, - dictWord{9, 0, 273}, - dictWord{9, 0, 412}, - dictWord{ - 9, - 0, - 703, - }, - dictWord{10, 0, 71}, - dictWord{10, 0, 427}, - dictWord{138, 0, 508}, - dictWord{4, 10, 165}, - dictWord{7, 10, 1398}, - dictWord{135, 10, 1829}, - dictWord{ - 4, - 0, - 53, - }, - dictWord{5, 0, 186}, - dictWord{7, 0, 752}, - dictWord{7, 0, 828}, - dictWord{142, 0, 116}, - dictWord{8, 0, 575}, - dictWord{10, 0, 289}, - dictWord{139, 0, 319}, - dictWord{132, 0, 675}, - dictWord{134, 0, 1424}, - dictWord{4, 11, 75}, - dictWord{5, 11, 180}, - dictWord{6, 11, 500}, - dictWord{7, 11, 58}, - dictWord{7, 11, 710}, - dictWord{138, 11, 645}, - dictWord{133, 11, 649}, - dictWord{6, 11, 276}, - dictWord{7, 11, 282}, - dictWord{7, 11, 879}, - dictWord{7, 11, 924}, - dictWord{8, 11, 459}, - dictWord{9, 11, 599}, - dictWord{9, 11, 754}, - dictWord{11, 11, 574}, - dictWord{12, 11, 128}, - dictWord{12, 11, 494}, - dictWord{13, 11, 52}, - dictWord{13, 11, 301}, - dictWord{15, 11, 30}, - dictWord{143, 11, 132}, - dictWord{6, 0, 647}, - dictWord{134, 0, 1095}, - dictWord{5, 10, 9}, - dictWord{7, 10, 297}, - dictWord{7, 10, 966}, - dictWord{140, 10, 306}, - dictWord{132, 11, 200}, - dictWord{134, 0, 1334}, - dictWord{5, 10, 146}, - dictWord{6, 10, 411}, - dictWord{138, 10, 721}, - dictWord{ - 6, - 0, - 209, - }, - dictWord{6, 0, 1141}, - dictWord{6, 0, 1288}, - dictWord{8, 0, 468}, - dictWord{9, 0, 210}, - dictWord{11, 0, 36}, - dictWord{12, 0, 28}, - dictWord{12, 0, 630}, - dictWord{13, 0, 21}, - dictWord{13, 0, 349}, - dictWord{14, 0, 7}, - dictWord{145, 0, 13}, - dictWord{6, 10, 177}, - dictWord{135, 10, 467}, - dictWord{4, 0, 342}, - dictWord{ - 135, - 0, - 1179, - }, - dictWord{10, 11, 454}, - dictWord{140, 11, 324}, - dictWord{4, 0, 928}, - dictWord{133, 0, 910}, - dictWord{7, 0, 1838}, - dictWord{6, 11, 225}, - dictWord{ - 137, - 11, - 211, - }, - dictWord{16, 0, 101}, - dictWord{20, 0, 115}, - dictWord{20, 0, 118}, - dictWord{148, 0, 122}, - dictWord{4, 0, 496}, - dictWord{135, 0, 856}, - dictWord{ - 4, - 0, - 318, - }, - dictWord{11, 0, 654}, - dictWord{7, 11, 718}, - dictWord{139, 11, 102}, - dictWord{8, 11, 58}, - dictWord{9, 11, 724}, - dictWord{11, 11, 809}, - dictWord{ - 13, - 11, - 113, - }, - dictWord{145, 11, 72}, - dictWord{5, 10, 200}, - dictWord{6, 11, 345}, - dictWord{135, 11, 1247}, - dictWord{8, 11, 767}, - dictWord{8, 11, 803}, - dictWord{ - 9, - 11, - 301, - }, - dictWord{137, 11, 903}, - dictWord{7, 0, 915}, - dictWord{8, 0, 247}, - dictWord{19, 0, 0}, - dictWord{7, 11, 1949}, - dictWord{136, 11, 674}, - dictWord{ - 4, - 0, - 202, - }, - dictWord{5, 0, 382}, - dictWord{6, 0, 454}, - dictWord{7, 0, 936}, - dictWord{7, 0, 1803}, - dictWord{8, 0, 758}, - dictWord{9, 0, 375}, - dictWord{9, 0, 895}, - dictWord{ - 10, - 0, - 743, - }, - dictWord{10, 0, 792}, - dictWord{11, 0, 978}, - dictWord{11, 0, 1012}, - dictWord{142, 0, 109}, - dictWord{7, 0, 1150}, - dictWord{7, 0, 1425}, - dictWord{ - 7, - 0, - 1453, - }, - dictWord{140, 0, 513}, - dictWord{134, 11, 259}, - dictWord{138, 0, 791}, - dictWord{11, 0, 821}, - dictWord{12, 0, 110}, - dictWord{12, 0, 153}, - dictWord{ - 18, - 0, - 41, - }, - dictWord{150, 0, 19}, - dictWord{134, 10, 481}, - dictWord{132, 0, 796}, - dictWord{6, 0, 445}, - dictWord{9, 0, 909}, - dictWord{136, 11, 254}, - dictWord{ - 10, - 0, - 776, - }, - dictWord{13, 0, 345}, - dictWord{142, 0, 425}, - dictWord{4, 10, 84}, - dictWord{7, 10, 1482}, - dictWord{10, 10, 76}, - dictWord{138, 10, 142}, - dictWord{ - 135, - 11, - 742, - }, - dictWord{6, 0, 578}, - dictWord{133, 10, 1015}, - dictWord{6, 0, 1387}, - dictWord{4, 10, 315}, - dictWord{5, 10, 507}, - dictWord{135, 10, 1370}, - dictWord{4, 0, 438}, - dictWord{133, 0, 555}, - dictWord{136, 0, 766}, - dictWord{133, 11, 248}, - dictWord{134, 10, 1722}, - dictWord{4, 11, 116}, - dictWord{5, 11, 95}, - dictWord{5, 11, 445}, - dictWord{7, 11, 1688}, - dictWord{8, 11, 29}, - dictWord{9, 11, 272}, - dictWord{11, 11, 509}, - dictWord{139, 11, 915}, - dictWord{135, 0, 541}, - dictWord{133, 11, 543}, - dictWord{8, 10, 222}, - dictWord{8, 10, 476}, - dictWord{9, 10, 238}, - dictWord{11, 10, 516}, - dictWord{11, 10, 575}, - dictWord{ - 15, - 10, - 109, - }, - dictWord{146, 10, 100}, - dictWord{6, 0, 880}, - dictWord{134, 0, 1191}, - dictWord{5, 11, 181}, - dictWord{136, 11, 41}, - dictWord{134, 0, 1506}, - dictWord{132, 11, 681}, - dictWord{7, 11, 25}, - dictWord{8, 11, 202}, - dictWord{138, 11, 536}, - dictWord{139, 0, 983}, - dictWord{137, 0, 768}, - dictWord{132, 0, 584}, - dictWord{9, 11, 423}, - dictWord{140, 11, 89}, - dictWord{8, 11, 113}, - dictWord{9, 11, 877}, - dictWord{10, 11, 554}, - dictWord{11, 11, 83}, - dictWord{12, 11, 136}, - dictWord{147, 11, 109}, - dictWord{7, 10, 706}, - dictWord{7, 10, 1058}, - dictWord{138, 10, 538}, - dictWord{133, 11, 976}, - dictWord{4, 11, 206}, - dictWord{ - 135, - 11, - 746, - }, - dictWord{136, 11, 526}, - dictWord{140, 0, 737}, - dictWord{11, 10, 92}, - dictWord{11, 10, 196}, - dictWord{11, 10, 409}, - dictWord{11, 10, 450}, - dictWord{11, 10, 666}, - dictWord{11, 10, 777}, - dictWord{12, 10, 262}, - dictWord{13, 10, 385}, - dictWord{13, 10, 393}, - dictWord{15, 10, 115}, - dictWord{ - 16, - 10, - 45, - }, - dictWord{145, 10, 82}, - dictWord{4, 0, 226}, - dictWord{4, 0, 326}, - dictWord{7, 0, 1770}, - dictWord{4, 11, 319}, - dictWord{5, 11, 699}, - dictWord{138, 11, 673}, - dictWord{6, 10, 40}, - dictWord{135, 10, 1781}, - dictWord{5, 0, 426}, - dictWord{8, 0, 30}, - dictWord{9, 0, 2}, - dictWord{11, 0, 549}, - dictWord{147, 0, 122}, - dictWord{ - 6, - 0, - 1161, - }, - dictWord{134, 0, 1329}, - dictWord{138, 10, 97}, - dictWord{6, 10, 423}, - dictWord{7, 10, 665}, - dictWord{135, 10, 1210}, - dictWord{7, 11, 13}, - dictWord{ - 8, - 11, - 226, - }, - dictWord{10, 11, 537}, - dictWord{11, 11, 570}, - dictWord{11, 11, 605}, - dictWord{11, 11, 799}, - dictWord{11, 11, 804}, - dictWord{12, 11, 85}, - dictWord{12, 11, 516}, - dictWord{12, 11, 623}, - dictWord{13, 11, 112}, - dictWord{13, 11, 361}, - dictWord{14, 11, 77}, - dictWord{14, 11, 78}, - dictWord{17, 11, 28}, - dictWord{147, 11, 110}, - dictWord{132, 11, 769}, - dictWord{132, 11, 551}, - dictWord{132, 11, 728}, - dictWord{147, 0, 117}, - dictWord{9, 11, 57}, - dictWord{ - 9, - 11, - 459, - }, - dictWord{10, 11, 425}, - dictWord{11, 11, 119}, - dictWord{12, 11, 184}, - dictWord{12, 11, 371}, - dictWord{13, 11, 358}, - dictWord{145, 11, 51}, - dictWord{ - 5, - 11, - 188, - }, - dictWord{5, 11, 814}, - dictWord{8, 11, 10}, - dictWord{9, 11, 421}, - dictWord{9, 11, 729}, - dictWord{10, 11, 609}, - dictWord{139, 11, 689}, - dictWord{134, 11, 624}, - dictWord{135, 11, 298}, - dictWord{135, 0, 462}, - dictWord{4, 0, 345}, - dictWord{139, 10, 624}, - dictWord{136, 10, 574}, - dictWord{ - 4, - 0, - 385, - }, - dictWord{7, 0, 265}, - dictWord{135, 0, 587}, - dictWord{6, 0, 808}, - dictWord{132, 11, 528}, - dictWord{133, 0, 398}, - dictWord{132, 10, 354}, - dictWord{ - 4, - 0, - 347, - }, - dictWord{5, 0, 423}, - dictWord{5, 0, 996}, - dictWord{135, 0, 1329}, - dictWord{135, 10, 1558}, - dictWord{7, 0, 1259}, - dictWord{9, 0, 125}, - dictWord{ - 139, - 0, - 65, - }, - dictWord{5, 0, 136}, - dictWord{6, 0, 136}, - dictWord{136, 0, 644}, - dictWord{5, 11, 104}, - dictWord{6, 11, 173}, - dictWord{135, 11, 1631}, - dictWord{ - 135, - 0, - 469, - }, - dictWord{133, 10, 830}, - dictWord{4, 0, 278}, - dictWord{5, 0, 465}, - dictWord{135, 0, 1367}, - dictWord{7, 11, 810}, - dictWord{8, 11, 138}, - dictWord{ - 8, - 11, - 342, - }, - dictWord{9, 11, 84}, - dictWord{10, 11, 193}, - dictWord{11, 11, 883}, - dictWord{140, 11, 359}, - dictWord{5, 10, 496}, - dictWord{135, 10, 203}, - dictWord{ - 4, - 0, - 433, - }, - dictWord{133, 0, 719}, - dictWord{6, 11, 95}, - dictWord{134, 10, 547}, - dictWord{5, 10, 88}, - dictWord{137, 10, 239}, - dictWord{6, 11, 406}, - dictWord{ - 10, - 11, - 409, - }, - dictWord{10, 11, 447}, - dictWord{11, 11, 44}, - dictWord{140, 11, 100}, - dictWord{134, 0, 1423}, - dictWord{7, 10, 650}, - dictWord{135, 10, 1310}, - dictWord{134, 0, 749}, - dictWord{135, 11, 1243}, - dictWord{135, 0, 1363}, - dictWord{6, 0, 381}, - dictWord{7, 0, 645}, - dictWord{7, 0, 694}, - dictWord{8, 0, 546}, - dictWord{7, 10, 1076}, - dictWord{9, 10, 80}, - dictWord{11, 10, 78}, - dictWord{11, 10, 421}, - dictWord{11, 10, 534}, - dictWord{140, 10, 545}, - dictWord{ - 134, - 11, - 1636, - }, - dictWord{135, 11, 1344}, - dictWord{12, 0, 277}, - dictWord{7, 10, 274}, - dictWord{11, 10, 479}, - dictWord{139, 10, 507}, - dictWord{6, 0, 705}, - dictWord{ - 6, - 0, - 783, - }, - dictWord{6, 0, 1275}, - dictWord{6, 0, 1481}, - dictWord{4, 11, 282}, - dictWord{7, 11, 1034}, - dictWord{11, 11, 398}, - dictWord{11, 11, 634}, - dictWord{ - 12, - 11, - 1, - }, - dictWord{12, 11, 79}, - dictWord{12, 11, 544}, - dictWord{14, 11, 237}, - dictWord{17, 11, 10}, - dictWord{146, 11, 20}, - dictWord{134, 0, 453}, - dictWord{ - 4, - 0, - 555, - }, - dictWord{8, 0, 536}, - dictWord{10, 0, 288}, - dictWord{11, 0, 1005}, - dictWord{4, 10, 497}, - dictWord{135, 10, 1584}, - dictWord{5, 11, 118}, - dictWord{ - 5, - 11, - 499, - }, - dictWord{6, 11, 476}, - dictWord{7, 11, 600}, - dictWord{7, 11, 888}, - dictWord{135, 11, 1096}, - dictWord{138, 0, 987}, - dictWord{7, 0, 1107}, - dictWord{ - 7, - 10, - 261, - }, - dictWord{7, 10, 1115}, - dictWord{7, 10, 1354}, - dictWord{7, 10, 1588}, - dictWord{7, 10, 1705}, - dictWord{7, 10, 1902}, - dictWord{9, 10, 465}, - dictWord{10, 10, 248}, - dictWord{10, 10, 349}, - dictWord{10, 10, 647}, - dictWord{11, 10, 527}, - dictWord{11, 10, 660}, - dictWord{11, 10, 669}, - dictWord{ - 12, - 10, - 529, - }, - dictWord{141, 10, 305}, - dictWord{7, 11, 296}, - dictWord{7, 11, 596}, - dictWord{8, 11, 560}, - dictWord{8, 11, 586}, - dictWord{9, 11, 612}, - dictWord{ - 11, - 11, - 100, - }, - dictWord{11, 11, 304}, - dictWord{12, 11, 46}, - dictWord{13, 11, 89}, - dictWord{14, 11, 112}, - dictWord{145, 11, 122}, - dictWord{9, 0, 370}, - dictWord{ - 138, - 0, - 90, - }, - dictWord{136, 10, 13}, - dictWord{132, 0, 860}, - dictWord{7, 10, 642}, - dictWord{8, 10, 250}, - dictWord{11, 10, 123}, - dictWord{11, 10, 137}, - dictWord{ - 13, - 10, - 48, - }, - dictWord{142, 10, 95}, - dictWord{135, 10, 1429}, - dictWord{137, 11, 321}, - dictWord{132, 0, 257}, - dictWord{135, 0, 2031}, - dictWord{7, 0, 1768}, - dictWord{7, 11, 1599}, - dictWord{7, 11, 1723}, - dictWord{8, 11, 79}, - dictWord{8, 11, 106}, - dictWord{8, 11, 190}, - dictWord{8, 11, 302}, - dictWord{8, 11, 383}, - dictWord{9, 11, 119}, - dictWord{9, 11, 233}, - dictWord{9, 11, 298}, - dictWord{9, 11, 419}, - dictWord{9, 11, 471}, - dictWord{10, 11, 181}, - dictWord{10, 11, 406}, - dictWord{11, 11, 57}, - dictWord{11, 11, 85}, - dictWord{11, 11, 120}, - dictWord{11, 11, 177}, - dictWord{11, 11, 296}, - dictWord{11, 11, 382}, - dictWord{11, 11, 454}, - dictWord{11, 11, 758}, - dictWord{11, 11, 999}, - dictWord{12, 11, 27}, - dictWord{12, 11, 98}, - dictWord{12, 11, 131}, - dictWord{12, 11, 245}, - dictWord{ - 12, - 11, - 312, - }, - dictWord{12, 11, 446}, - dictWord{12, 11, 454}, - dictWord{13, 11, 25}, - dictWord{13, 11, 98}, - dictWord{13, 11, 426}, - dictWord{13, 11, 508}, - dictWord{ - 14, - 11, - 6, - }, - dictWord{14, 11, 163}, - dictWord{14, 11, 272}, - dictWord{14, 11, 277}, - dictWord{14, 11, 370}, - dictWord{15, 11, 95}, - dictWord{15, 11, 138}, - dictWord{ - 15, - 11, - 167, - }, - dictWord{17, 11, 18}, - dictWord{17, 11, 38}, - dictWord{20, 11, 96}, - dictWord{149, 11, 32}, - dictWord{5, 11, 722}, - dictWord{134, 11, 1759}, - dictWord{145, 11, 16}, - dictWord{6, 0, 1071}, - dictWord{134, 0, 1561}, - dictWord{10, 10, 545}, - dictWord{140, 10, 301}, - dictWord{6, 0, 83}, - dictWord{6, 0, 1733}, - dictWord{135, 0, 1389}, - dictWord{4, 0, 835}, - dictWord{135, 0, 1818}, - dictWord{133, 11, 258}, - dictWord{4, 10, 904}, - dictWord{133, 10, 794}, - dictWord{ - 134, - 0, - 2006, - }, - dictWord{5, 11, 30}, - dictWord{7, 11, 495}, - dictWord{8, 11, 134}, - dictWord{9, 11, 788}, - dictWord{140, 11, 438}, - dictWord{135, 11, 2004}, - dictWord{ - 137, - 0, - 696, - }, - dictWord{5, 11, 50}, - dictWord{6, 11, 439}, - dictWord{7, 11, 780}, - dictWord{135, 11, 1040}, - dictWord{7, 11, 772}, - dictWord{7, 11, 1104}, - dictWord{ - 7, - 11, - 1647, - }, - dictWord{11, 11, 269}, - dictWord{11, 11, 539}, - dictWord{11, 11, 607}, - dictWord{11, 11, 627}, - dictWord{11, 11, 706}, - dictWord{11, 11, 975}, - dictWord{12, 11, 248}, - dictWord{12, 11, 311}, - dictWord{12, 11, 434}, - dictWord{12, 11, 600}, - dictWord{12, 11, 622}, - dictWord{13, 11, 297}, - dictWord{ - 13, - 11, - 367, - }, - dictWord{13, 11, 485}, - dictWord{14, 11, 69}, - dictWord{14, 11, 409}, - dictWord{143, 11, 108}, - dictWord{5, 11, 1}, - dictWord{6, 11, 81}, - dictWord{ - 138, - 11, - 520, - }, - dictWord{7, 0, 1718}, - dictWord{9, 0, 95}, - dictWord{9, 0, 274}, - dictWord{10, 0, 279}, - dictWord{10, 0, 317}, - dictWord{10, 0, 420}, - dictWord{11, 0, 303}, - dictWord{11, 0, 808}, - dictWord{12, 0, 134}, - dictWord{12, 0, 367}, - dictWord{13, 0, 149}, - dictWord{13, 0, 347}, - dictWord{14, 0, 349}, - dictWord{14, 0, 406}, - dictWord{ - 18, - 0, - 22, - }, - dictWord{18, 0, 89}, - dictWord{18, 0, 122}, - dictWord{147, 0, 47}, - dictWord{5, 11, 482}, - dictWord{8, 11, 98}, - dictWord{9, 11, 172}, - dictWord{10, 11, 222}, - dictWord{10, 11, 700}, - dictWord{10, 11, 822}, - dictWord{11, 11, 302}, - dictWord{11, 11, 778}, - dictWord{12, 11, 50}, - dictWord{12, 11, 127}, - dictWord{ - 12, - 11, - 396, - }, - dictWord{13, 11, 62}, - dictWord{13, 11, 328}, - dictWord{14, 11, 122}, - dictWord{147, 11, 72}, - dictWord{7, 10, 386}, - dictWord{138, 10, 713}, - dictWord{ - 6, - 10, - 7, - }, - dictWord{6, 10, 35}, - dictWord{7, 10, 147}, - dictWord{7, 10, 1069}, - dictWord{7, 10, 1568}, - dictWord{7, 10, 1575}, - dictWord{7, 10, 1917}, - dictWord{ - 8, - 10, - 43, - }, - dictWord{8, 10, 208}, - dictWord{9, 10, 128}, - dictWord{9, 10, 866}, - dictWord{10, 10, 20}, - dictWord{11, 10, 981}, - dictWord{147, 10, 33}, - dictWord{ - 133, - 0, - 26, - }, - dictWord{132, 0, 550}, - dictWord{5, 11, 2}, - dictWord{7, 11, 1494}, - dictWord{136, 11, 589}, - dictWord{6, 11, 512}, - dictWord{7, 11, 797}, - dictWord{ - 8, - 11, - 253, - }, - dictWord{9, 11, 77}, - dictWord{10, 11, 1}, - dictWord{10, 11, 129}, - dictWord{10, 11, 225}, - dictWord{11, 11, 118}, - dictWord{11, 11, 226}, - dictWord{ - 11, - 11, - 251, - }, - dictWord{11, 11, 430}, - dictWord{11, 11, 701}, - dictWord{11, 11, 974}, - dictWord{11, 11, 982}, - dictWord{12, 11, 64}, - dictWord{12, 11, 260}, - dictWord{ - 12, - 11, - 488, - }, - dictWord{140, 11, 690}, - dictWord{7, 10, 893}, - dictWord{141, 10, 424}, - dictWord{134, 0, 901}, - dictWord{136, 0, 822}, - dictWord{4, 0, 902}, - dictWord{5, 0, 809}, - dictWord{134, 0, 122}, - dictWord{6, 0, 807}, - dictWord{134, 0, 1366}, - dictWord{7, 0, 262}, - dictWord{5, 11, 748}, - dictWord{134, 11, 553}, - dictWord{133, 0, 620}, - dictWord{4, 0, 34}, - dictWord{5, 0, 574}, - dictWord{7, 0, 279}, - dictWord{7, 0, 1624}, - dictWord{136, 0, 601}, - dictWord{9, 0, 170}, - dictWord{ - 6, - 10, - 322, - }, - dictWord{9, 10, 552}, - dictWord{11, 10, 274}, - dictWord{13, 10, 209}, - dictWord{13, 10, 499}, - dictWord{14, 10, 85}, - dictWord{15, 10, 126}, - dictWord{ - 145, - 10, - 70, - }, - dictWord{132, 0, 537}, - dictWord{4, 11, 12}, - dictWord{7, 11, 420}, - dictWord{7, 11, 522}, - dictWord{7, 11, 809}, - dictWord{8, 11, 797}, - dictWord{ - 141, - 11, - 88, - }, - dictWord{133, 0, 332}, - dictWord{8, 10, 83}, - dictWord{8, 10, 742}, - dictWord{8, 10, 817}, - dictWord{9, 10, 28}, - dictWord{9, 10, 29}, - dictWord{9, 10, 885}, - dictWord{10, 10, 387}, - dictWord{11, 10, 633}, - dictWord{11, 10, 740}, - dictWord{13, 10, 235}, - dictWord{13, 10, 254}, - dictWord{15, 10, 143}, - dictWord{ - 143, - 10, - 146, - }, - dictWord{6, 0, 1909}, - dictWord{9, 0, 964}, - dictWord{12, 0, 822}, - dictWord{12, 0, 854}, - dictWord{12, 0, 865}, - dictWord{12, 0, 910}, - dictWord{12, 0, 938}, - dictWord{15, 0, 169}, - dictWord{15, 0, 208}, - dictWord{15, 0, 211}, - dictWord{18, 0, 205}, - dictWord{18, 0, 206}, - dictWord{18, 0, 220}, - dictWord{18, 0, 223}, - dictWord{152, 0, 24}, - dictWord{140, 10, 49}, - dictWord{5, 11, 528}, - dictWord{135, 11, 1580}, - dictWord{6, 0, 261}, - dictWord{8, 0, 182}, - dictWord{139, 0, 943}, - dictWord{134, 0, 1721}, - dictWord{4, 0, 933}, - dictWord{133, 0, 880}, - dictWord{136, 11, 321}, - dictWord{5, 11, 266}, - dictWord{9, 11, 290}, - dictWord{9, 11, 364}, - dictWord{10, 11, 293}, - dictWord{11, 11, 606}, - dictWord{142, 11, 45}, - dictWord{6, 0, 1609}, - dictWord{4, 11, 50}, - dictWord{6, 11, 510}, - dictWord{6, 11, 594}, - dictWord{9, 11, 121}, - dictWord{10, 11, 49}, - dictWord{10, 11, 412}, - dictWord{139, 11, 834}, - dictWord{7, 0, 895}, - dictWord{136, 11, 748}, - dictWord{132, 11, 466}, - dictWord{4, 10, 110}, - dictWord{10, 10, 415}, - dictWord{10, 10, 597}, - dictWord{142, 10, 206}, - dictWord{133, 0, 812}, - dictWord{135, 11, 281}, - dictWord{ - 6, - 0, - 1890, - }, - dictWord{6, 0, 1902}, - dictWord{6, 0, 1916}, - dictWord{9, 0, 929}, - dictWord{9, 0, 942}, - dictWord{9, 0, 975}, - dictWord{9, 0, 984}, - dictWord{9, 0, 986}, - dictWord{ - 9, - 0, - 1011, - }, - dictWord{9, 0, 1019}, - dictWord{12, 0, 804}, - dictWord{12, 0, 851}, - dictWord{12, 0, 867}, - dictWord{12, 0, 916}, - dictWord{12, 0, 923}, - dictWord{ - 15, - 0, - 194, - }, - dictWord{15, 0, 204}, - dictWord{15, 0, 210}, - dictWord{15, 0, 222}, - dictWord{15, 0, 223}, - dictWord{15, 0, 229}, - dictWord{15, 0, 250}, - dictWord{ - 18, - 0, - 179, - }, - dictWord{18, 0, 186}, - dictWord{18, 0, 192}, - dictWord{7, 10, 205}, - dictWord{135, 10, 2000}, - dictWord{132, 11, 667}, - dictWord{135, 0, 778}, - dictWord{ - 4, - 0, - 137, - }, - dictWord{7, 0, 1178}, - dictWord{135, 0, 1520}, - dictWord{134, 0, 1314}, - dictWord{4, 11, 242}, - dictWord{134, 11, 333}, - dictWord{6, 0, 1661}, - dictWord{7, 0, 1975}, - dictWord{7, 0, 2009}, - dictWord{135, 0, 2011}, - dictWord{134, 0, 1591}, - dictWord{4, 10, 283}, - dictWord{135, 10, 1194}, - dictWord{ - 11, - 0, - 820, - }, - dictWord{150, 0, 51}, - dictWord{4, 11, 39}, - dictWord{5, 11, 36}, - dictWord{7, 11, 1843}, - dictWord{8, 11, 407}, - dictWord{11, 11, 144}, - dictWord{ - 140, - 11, - 523, - }, - dictWord{134, 10, 1720}, - dictWord{4, 11, 510}, - dictWord{7, 11, 29}, - dictWord{7, 11, 66}, - dictWord{7, 11, 1980}, - dictWord{10, 11, 487}, - dictWord{ - 10, - 11, - 809, - }, - dictWord{146, 11, 9}, - dictWord{5, 0, 89}, - dictWord{7, 0, 1915}, - dictWord{9, 0, 185}, - dictWord{9, 0, 235}, - dictWord{10, 0, 64}, - dictWord{10, 0, 270}, - dictWord{10, 0, 403}, - dictWord{10, 0, 469}, - dictWord{10, 0, 529}, - dictWord{10, 0, 590}, - dictWord{11, 0, 140}, - dictWord{11, 0, 860}, - dictWord{13, 0, 1}, - dictWord{ - 13, - 0, - 422, - }, - dictWord{14, 0, 341}, - dictWord{14, 0, 364}, - dictWord{17, 0, 93}, - dictWord{18, 0, 113}, - dictWord{19, 0, 97}, - dictWord{147, 0, 113}, - dictWord{133, 0, 695}, - dictWord{6, 0, 987}, - dictWord{134, 0, 1160}, - dictWord{5, 0, 6}, - dictWord{6, 0, 183}, - dictWord{7, 0, 680}, - dictWord{7, 0, 978}, - dictWord{7, 0, 1013}, - dictWord{ - 7, - 0, - 1055, - }, - dictWord{12, 0, 230}, - dictWord{13, 0, 172}, - dictWord{146, 0, 29}, - dictWord{134, 11, 570}, - dictWord{132, 11, 787}, - dictWord{134, 11, 518}, - dictWord{ - 6, - 0, - 29, - }, - dictWord{139, 0, 63}, - dictWord{132, 11, 516}, - dictWord{136, 11, 821}, - dictWord{132, 0, 311}, - dictWord{134, 0, 1740}, - dictWord{7, 0, 170}, - dictWord{8, 0, 90}, - dictWord{8, 0, 177}, - dictWord{8, 0, 415}, - dictWord{11, 0, 714}, - dictWord{14, 0, 281}, - dictWord{136, 10, 735}, - dictWord{134, 0, 1961}, - dictWord{ - 135, - 11, - 1405, - }, - dictWord{4, 11, 10}, - dictWord{7, 11, 917}, - dictWord{139, 11, 786}, - dictWord{5, 10, 132}, - dictWord{9, 10, 486}, - dictWord{9, 10, 715}, - dictWord{ - 10, - 10, - 458, - }, - dictWord{11, 10, 373}, - dictWord{11, 10, 668}, - dictWord{11, 10, 795}, - dictWord{11, 10, 897}, - dictWord{12, 10, 272}, - dictWord{12, 10, 424}, - dictWord{12, 10, 539}, - dictWord{12, 10, 558}, - dictWord{14, 10, 245}, - dictWord{14, 10, 263}, - dictWord{14, 10, 264}, - dictWord{14, 10, 393}, - dictWord{ - 142, - 10, - 403, - }, - dictWord{11, 0, 91}, - dictWord{13, 0, 129}, - dictWord{15, 0, 101}, - dictWord{145, 0, 125}, - dictWord{135, 0, 1132}, - dictWord{4, 0, 494}, - dictWord{6, 0, 74}, - dictWord{7, 0, 44}, - dictWord{7, 0, 407}, - dictWord{12, 0, 17}, - dictWord{15, 0, 5}, - dictWord{148, 0, 11}, - dictWord{133, 10, 379}, - dictWord{5, 0, 270}, - dictWord{ - 5, - 11, - 684, - }, - dictWord{6, 10, 89}, - dictWord{6, 10, 400}, - dictWord{7, 10, 1569}, - dictWord{7, 10, 1623}, - dictWord{7, 10, 1850}, - dictWord{8, 10, 218}, - dictWord{ - 8, - 10, - 422, - }, - dictWord{9, 10, 570}, - dictWord{138, 10, 626}, - dictWord{4, 0, 276}, - dictWord{133, 0, 296}, - dictWord{6, 0, 1523}, - dictWord{134, 11, 27}, - dictWord{ - 6, - 10, - 387, - }, - dictWord{7, 10, 882}, - dictWord{141, 10, 111}, - dictWord{6, 10, 224}, - dictWord{7, 10, 877}, - dictWord{137, 10, 647}, - dictWord{135, 10, 790}, - dictWord{ - 4, - 0, - 7, - }, - dictWord{5, 0, 90}, - dictWord{5, 0, 158}, - dictWord{6, 0, 542}, - dictWord{7, 0, 221}, - dictWord{7, 0, 1574}, - dictWord{9, 0, 490}, - dictWord{10, 0, 540}, - dictWord{ - 11, - 0, - 443, - }, - dictWord{139, 0, 757}, - dictWord{7, 0, 588}, - dictWord{9, 0, 175}, - dictWord{138, 0, 530}, - dictWord{135, 10, 394}, - dictWord{142, 11, 23}, - dictWord{ - 134, - 0, - 786, - }, - dictWord{135, 0, 580}, - dictWord{7, 0, 88}, - dictWord{136, 0, 627}, - dictWord{5, 0, 872}, - dictWord{6, 0, 57}, - dictWord{7, 0, 471}, - dictWord{9, 0, 447}, - dictWord{137, 0, 454}, - dictWord{6, 11, 342}, - dictWord{6, 11, 496}, - dictWord{8, 11, 275}, - dictWord{137, 11, 206}, - dictWord{4, 11, 909}, - dictWord{133, 11, 940}, - dictWord{6, 0, 735}, - dictWord{132, 11, 891}, - dictWord{8, 0, 845}, - dictWord{8, 0, 916}, - dictWord{135, 10, 1409}, - dictWord{5, 0, 31}, - dictWord{134, 0, 614}, - dictWord{11, 0, 458}, - dictWord{12, 0, 15}, - dictWord{140, 0, 432}, - dictWord{8, 0, 330}, - dictWord{140, 0, 477}, - dictWord{4, 0, 530}, - dictWord{5, 0, 521}, - dictWord{ - 7, - 0, - 1200, - }, - dictWord{10, 0, 460}, - dictWord{132, 11, 687}, - dictWord{6, 0, 424}, - dictWord{135, 0, 1866}, - dictWord{9, 0, 569}, - dictWord{12, 0, 12}, - dictWord{ - 12, - 0, - 81, - }, - dictWord{12, 0, 319}, - dictWord{13, 0, 69}, - dictWord{14, 0, 259}, - dictWord{16, 0, 87}, - dictWord{17, 0, 1}, - dictWord{17, 0, 21}, - dictWord{17, 0, 24}, - dictWord{ - 18, - 0, - 15, - }, - dictWord{18, 0, 56}, - dictWord{18, 0, 59}, - dictWord{18, 0, 127}, - dictWord{18, 0, 154}, - dictWord{19, 0, 19}, - dictWord{148, 0, 31}, - dictWord{7, 0, 1302}, - dictWord{136, 10, 38}, - dictWord{134, 11, 253}, - dictWord{5, 10, 261}, - dictWord{7, 10, 78}, - dictWord{7, 10, 199}, - dictWord{8, 10, 815}, - dictWord{9, 10, 126}, - dictWord{138, 10, 342}, - dictWord{5, 0, 595}, - dictWord{135, 0, 1863}, - dictWord{6, 11, 41}, - dictWord{141, 11, 160}, - dictWord{5, 0, 13}, - dictWord{134, 0, 142}, - dictWord{6, 0, 97}, - dictWord{7, 0, 116}, - dictWord{8, 0, 322}, - dictWord{8, 0, 755}, - dictWord{9, 0, 548}, - dictWord{10, 0, 714}, - dictWord{11, 0, 884}, - dictWord{13, 0, 324}, - dictWord{7, 11, 1304}, - dictWord{138, 11, 477}, - dictWord{132, 10, 628}, - dictWord{134, 11, 1718}, - dictWord{7, 10, 266}, - dictWord{136, 10, 804}, - dictWord{135, 10, 208}, - dictWord{7, 0, 1021}, - dictWord{6, 10, 79}, - dictWord{135, 10, 1519}, - dictWord{7, 0, 1472}, - dictWord{135, 0, 1554}, - dictWord{6, 11, 362}, - dictWord{146, 11, 51}, - dictWord{7, 0, 1071}, - dictWord{7, 0, 1541}, - dictWord{7, 0, 1767}, - dictWord{7, 0, 1806}, - dictWord{11, 0, 162}, - dictWord{11, 0, 242}, - dictWord{11, 0, 452}, - dictWord{12, 0, 605}, - dictWord{15, 0, 26}, - dictWord{144, 0, 44}, - dictWord{136, 10, 741}, - dictWord{133, 11, 115}, - dictWord{145, 0, 115}, - dictWord{134, 10, 376}, - dictWord{6, 0, 1406}, - dictWord{134, 0, 1543}, - dictWord{5, 11, 193}, - dictWord{12, 11, 178}, - dictWord{13, 11, 130}, - dictWord{ - 145, - 11, - 84, - }, - dictWord{135, 0, 1111}, - dictWord{8, 0, 1}, - dictWord{9, 0, 650}, - dictWord{10, 0, 326}, - dictWord{5, 11, 705}, - dictWord{137, 11, 606}, - dictWord{5, 0, 488}, - dictWord{6, 0, 527}, - dictWord{7, 0, 489}, - dictWord{7, 0, 1636}, - dictWord{8, 0, 121}, - dictWord{8, 0, 144}, - dictWord{8, 0, 359}, - dictWord{9, 0, 193}, - dictWord{9, 0, 241}, - dictWord{9, 0, 336}, - dictWord{9, 0, 882}, - dictWord{11, 0, 266}, - dictWord{11, 0, 372}, - dictWord{11, 0, 944}, - dictWord{12, 0, 401}, - dictWord{140, 0, 641}, - dictWord{135, 11, 174}, - dictWord{6, 0, 267}, - dictWord{7, 10, 244}, - dictWord{7, 10, 632}, - dictWord{7, 10, 1609}, - dictWord{8, 10, 178}, - dictWord{8, 10, 638}, - dictWord{141, 10, 58}, - dictWord{134, 0, 1983}, - dictWord{134, 0, 1155}, - dictWord{134, 0, 1575}, - dictWord{134, 0, 1438}, - dictWord{9, 0, 31}, - dictWord{ - 10, - 0, - 244, - }, - dictWord{10, 0, 699}, - dictWord{12, 0, 149}, - dictWord{141, 0, 497}, - dictWord{133, 0, 377}, - dictWord{4, 11, 122}, - dictWord{5, 11, 796}, - dictWord{ - 5, - 11, - 952, - }, - dictWord{6, 11, 1660}, - dictWord{6, 11, 1671}, - dictWord{8, 11, 567}, - dictWord{9, 11, 687}, - dictWord{9, 11, 742}, - dictWord{10, 11, 686}, - dictWord{ - 11, - 11, - 356, - }, - dictWord{11, 11, 682}, - dictWord{140, 11, 281}, - dictWord{145, 0, 101}, - dictWord{11, 11, 0}, - dictWord{144, 11, 78}, - dictWord{5, 11, 179}, - dictWord{ - 5, - 10, - 791, - }, - dictWord{7, 11, 1095}, - dictWord{135, 11, 1213}, - dictWord{8, 11, 372}, - dictWord{9, 11, 122}, - dictWord{138, 11, 175}, - dictWord{7, 10, 686}, - dictWord{8, 10, 33}, - dictWord{8, 10, 238}, - dictWord{10, 10, 616}, - dictWord{11, 10, 467}, - dictWord{11, 10, 881}, - dictWord{13, 10, 217}, - dictWord{13, 10, 253}, - dictWord{142, 10, 268}, - dictWord{9, 0, 476}, - dictWord{4, 11, 66}, - dictWord{7, 11, 722}, - dictWord{135, 11, 904}, - dictWord{7, 11, 352}, - dictWord{137, 11, 684}, - dictWord{135, 0, 2023}, - dictWord{135, 0, 1836}, - dictWord{132, 10, 447}, - dictWord{5, 0, 843}, - dictWord{144, 0, 35}, - dictWord{137, 11, 779}, - dictWord{ - 141, - 11, - 35, - }, - dictWord{4, 10, 128}, - dictWord{5, 10, 415}, - dictWord{6, 10, 462}, - dictWord{7, 10, 294}, - dictWord{7, 10, 578}, - dictWord{10, 10, 710}, - dictWord{ - 139, - 10, - 86, - }, - dictWord{132, 0, 554}, - dictWord{133, 0, 536}, - dictWord{136, 10, 587}, - dictWord{5, 0, 207}, - dictWord{9, 0, 79}, - dictWord{11, 0, 625}, - dictWord{ - 145, - 0, - 7, - }, - dictWord{7, 0, 1371}, - dictWord{6, 10, 427}, - dictWord{138, 10, 692}, - dictWord{4, 0, 424}, - dictWord{4, 10, 195}, - dictWord{135, 10, 802}, - dictWord{ - 8, - 0, - 785, - }, - dictWord{133, 11, 564}, - dictWord{135, 0, 336}, - dictWord{4, 0, 896}, - dictWord{6, 0, 1777}, - dictWord{134, 11, 556}, - dictWord{137, 11, 103}, - dictWord{134, 10, 1683}, - dictWord{7, 11, 544}, - dictWord{8, 11, 719}, - dictWord{138, 11, 61}, - dictWord{138, 10, 472}, - dictWord{4, 11, 5}, - dictWord{5, 11, 498}, - dictWord{136, 11, 637}, - dictWord{7, 0, 750}, - dictWord{9, 0, 223}, - dictWord{11, 0, 27}, - dictWord{11, 0, 466}, - dictWord{12, 0, 624}, - dictWord{14, 0, 265}, - dictWord{ - 146, - 0, - 61, - }, - dictWord{12, 0, 238}, - dictWord{18, 0, 155}, - dictWord{12, 11, 238}, - dictWord{146, 11, 155}, - dictWord{151, 10, 28}, - dictWord{133, 11, 927}, - dictWord{12, 0, 383}, - dictWord{5, 10, 3}, - dictWord{8, 10, 578}, - dictWord{9, 10, 118}, - dictWord{10, 10, 705}, - dictWord{141, 10, 279}, - dictWord{4, 11, 893}, - dictWord{ - 5, - 11, - 780, - }, - dictWord{133, 11, 893}, - dictWord{4, 0, 603}, - dictWord{133, 0, 661}, - dictWord{4, 0, 11}, - dictWord{6, 0, 128}, - dictWord{7, 0, 231}, - dictWord{ - 7, - 0, - 1533, - }, - dictWord{10, 0, 725}, - dictWord{5, 10, 229}, - dictWord{5, 11, 238}, - dictWord{135, 11, 1350}, - dictWord{8, 10, 102}, - dictWord{10, 10, 578}, - dictWord{ - 10, - 10, - 672, - }, - dictWord{12, 10, 496}, - dictWord{13, 10, 408}, - dictWord{14, 10, 121}, - dictWord{145, 10, 106}, - dictWord{132, 0, 476}, - dictWord{134, 0, 1552}, - dictWord{134, 11, 1729}, - dictWord{8, 10, 115}, - dictWord{8, 10, 350}, - dictWord{9, 10, 489}, - dictWord{10, 10, 128}, - dictWord{11, 10, 306}, - dictWord{ - 12, - 10, - 373, - }, - dictWord{14, 10, 30}, - dictWord{17, 10, 79}, - dictWord{19, 10, 80}, - dictWord{150, 10, 55}, - dictWord{135, 0, 1807}, - dictWord{4, 0, 680}, - dictWord{ - 4, - 11, - 60, - }, - dictWord{7, 11, 760}, - dictWord{7, 11, 1800}, - dictWord{8, 11, 314}, - dictWord{9, 11, 700}, - dictWord{139, 11, 487}, - dictWord{4, 10, 230}, - dictWord{ - 5, - 10, - 702, - }, - dictWord{148, 11, 94}, - dictWord{132, 11, 228}, - dictWord{139, 0, 435}, - dictWord{9, 0, 20}, - dictWord{10, 0, 324}, - dictWord{10, 0, 807}, - dictWord{ - 139, - 0, - 488, - }, - dictWord{6, 10, 1728}, - dictWord{136, 11, 419}, - dictWord{4, 10, 484}, - dictWord{18, 10, 26}, - dictWord{19, 10, 42}, - dictWord{20, 10, 43}, - dictWord{ - 21, - 10, - 0, - }, - dictWord{23, 10, 27}, - dictWord{152, 10, 14}, - dictWord{135, 0, 1431}, - dictWord{133, 11, 828}, - dictWord{5, 0, 112}, - dictWord{6, 0, 103}, - dictWord{ - 6, - 0, - 150, - }, - dictWord{7, 0, 1303}, - dictWord{9, 0, 292}, - dictWord{10, 0, 481}, - dictWord{20, 0, 13}, - dictWord{7, 11, 176}, - dictWord{7, 11, 178}, - dictWord{7, 11, 1110}, - dictWord{10, 11, 481}, - dictWord{148, 11, 13}, - dictWord{138, 0, 356}, - dictWord{4, 11, 51}, - dictWord{5, 11, 39}, - dictWord{6, 11, 4}, - dictWord{7, 11, 591}, - dictWord{ - 7, - 11, - 849, - }, - dictWord{7, 11, 951}, - dictWord{7, 11, 1129}, - dictWord{7, 11, 1613}, - dictWord{7, 11, 1760}, - dictWord{7, 11, 1988}, - dictWord{9, 11, 434}, - dictWord{10, 11, 754}, - dictWord{11, 11, 25}, - dictWord{11, 11, 37}, - dictWord{139, 11, 414}, - dictWord{6, 0, 1963}, - dictWord{134, 0, 2000}, - dictWord{ - 132, - 10, - 633, - }, - dictWord{6, 0, 1244}, - dictWord{133, 11, 902}, - dictWord{135, 11, 928}, - dictWord{140, 0, 18}, - dictWord{138, 0, 204}, - dictWord{135, 11, 1173}, - dictWord{134, 0, 867}, - dictWord{4, 0, 708}, - dictWord{8, 0, 15}, - dictWord{9, 0, 50}, - dictWord{9, 0, 386}, - dictWord{11, 0, 18}, - dictWord{11, 0, 529}, - dictWord{140, 0, 228}, - dictWord{134, 11, 270}, - dictWord{4, 0, 563}, - dictWord{7, 0, 109}, - dictWord{7, 0, 592}, - dictWord{7, 0, 637}, - dictWord{7, 0, 770}, - dictWord{8, 0, 463}, - dictWord{ - 9, - 0, - 60, - }, - dictWord{9, 0, 335}, - dictWord{9, 0, 904}, - dictWord{10, 0, 73}, - dictWord{11, 0, 434}, - dictWord{12, 0, 585}, - dictWord{13, 0, 331}, - dictWord{18, 0, 110}, - dictWord{148, 0, 60}, - dictWord{132, 0, 502}, - dictWord{14, 11, 359}, - dictWord{19, 11, 52}, - dictWord{148, 11, 47}, - dictWord{6, 11, 377}, - dictWord{7, 11, 1025}, - dictWord{9, 11, 613}, - dictWord{145, 11, 104}, - dictWord{6, 0, 347}, - dictWord{10, 0, 161}, - dictWord{5, 10, 70}, - dictWord{5, 10, 622}, - dictWord{6, 10, 334}, - dictWord{ - 7, - 10, - 1032, - }, - dictWord{9, 10, 171}, - dictWord{11, 10, 26}, - dictWord{11, 10, 213}, - dictWord{11, 10, 637}, - dictWord{11, 10, 707}, - dictWord{12, 10, 202}, - dictWord{12, 10, 380}, - dictWord{13, 10, 226}, - dictWord{13, 10, 355}, - dictWord{14, 10, 222}, - dictWord{145, 10, 42}, - dictWord{132, 11, 416}, - dictWord{4, 0, 33}, - dictWord{5, 0, 102}, - dictWord{6, 0, 284}, - dictWord{7, 0, 1079}, - dictWord{7, 0, 1423}, - dictWord{7, 0, 1702}, - dictWord{8, 0, 470}, - dictWord{9, 0, 554}, - dictWord{ - 9, - 0, - 723, - }, - dictWord{11, 0, 333}, - dictWord{142, 11, 372}, - dictWord{5, 11, 152}, - dictWord{5, 11, 197}, - dictWord{7, 11, 340}, - dictWord{7, 11, 867}, - dictWord{ - 10, - 11, - 548, - }, - dictWord{10, 11, 581}, - dictWord{11, 11, 6}, - dictWord{12, 11, 3}, - dictWord{12, 11, 19}, - dictWord{14, 11, 110}, - dictWord{142, 11, 289}, - dictWord{ - 7, - 0, - 246, - }, - dictWord{135, 0, 840}, - dictWord{6, 0, 10}, - dictWord{8, 0, 571}, - dictWord{9, 0, 739}, - dictWord{143, 0, 91}, - dictWord{6, 0, 465}, - dictWord{7, 0, 1465}, - dictWord{ - 4, - 10, - 23, - }, - dictWord{4, 10, 141}, - dictWord{5, 10, 313}, - dictWord{5, 10, 1014}, - dictWord{6, 10, 50}, - dictWord{7, 10, 142}, - dictWord{7, 10, 559}, - dictWord{ - 8, - 10, - 640, - }, - dictWord{9, 10, 460}, - dictWord{9, 10, 783}, - dictWord{11, 10, 741}, - dictWord{12, 10, 183}, - dictWord{141, 10, 488}, - dictWord{133, 0, 626}, - dictWord{ - 136, - 0, - 614, - }, - dictWord{138, 0, 237}, - dictWord{7, 11, 34}, - dictWord{7, 11, 190}, - dictWord{8, 11, 28}, - dictWord{8, 11, 141}, - dictWord{8, 11, 444}, - dictWord{ - 8, - 11, - 811, - }, - dictWord{9, 11, 468}, - dictWord{11, 11, 334}, - dictWord{12, 11, 24}, - dictWord{12, 11, 386}, - dictWord{140, 11, 576}, - dictWord{133, 11, 757}, - dictWord{ - 5, - 0, - 18, - }, - dictWord{6, 0, 526}, - dictWord{13, 0, 24}, - dictWord{13, 0, 110}, - dictWord{19, 0, 5}, - dictWord{147, 0, 44}, - dictWord{6, 0, 506}, - dictWord{134, 11, 506}, - dictWord{135, 11, 1553}, - dictWord{4, 0, 309}, - dictWord{5, 0, 462}, - dictWord{7, 0, 970}, - dictWord{7, 0, 1097}, - dictWord{22, 0, 30}, - dictWord{22, 0, 33}, - dictWord{ - 7, - 11, - 1385, - }, - dictWord{11, 11, 582}, - dictWord{11, 11, 650}, - dictWord{11, 11, 901}, - dictWord{11, 11, 949}, - dictWord{12, 11, 232}, - dictWord{12, 11, 236}, - dictWord{13, 11, 413}, - dictWord{13, 11, 501}, - dictWord{146, 11, 116}, - dictWord{9, 0, 140}, - dictWord{5, 10, 222}, - dictWord{138, 10, 534}, - dictWord{6, 0, 1056}, - dictWord{137, 10, 906}, - dictWord{134, 0, 1704}, - dictWord{138, 10, 503}, - dictWord{134, 0, 1036}, - dictWord{5, 10, 154}, - dictWord{7, 10, 1491}, - dictWord{ - 10, - 10, - 379, - }, - dictWord{138, 10, 485}, - dictWord{4, 11, 383}, - dictWord{133, 10, 716}, - dictWord{134, 0, 1315}, - dictWord{5, 0, 86}, - dictWord{7, 0, 743}, - dictWord{ - 9, - 0, - 85, - }, - dictWord{10, 0, 281}, - dictWord{10, 0, 432}, - dictWord{11, 0, 825}, - dictWord{12, 0, 251}, - dictWord{13, 0, 118}, - dictWord{142, 0, 378}, - dictWord{ - 8, - 0, - 264, - }, - dictWord{4, 10, 91}, - dictWord{5, 10, 388}, - dictWord{5, 10, 845}, - dictWord{6, 10, 206}, - dictWord{6, 10, 252}, - dictWord{6, 10, 365}, - dictWord{7, 10, 136}, - dictWord{7, 10, 531}, - dictWord{136, 10, 621}, - dictWord{5, 0, 524}, - dictWord{133, 0, 744}, - dictWord{5, 11, 277}, - dictWord{141, 11, 247}, - dictWord{ - 132, - 11, - 435, - }, - dictWord{10, 0, 107}, - dictWord{140, 0, 436}, - dictWord{132, 0, 927}, - dictWord{10, 0, 123}, - dictWord{12, 0, 670}, - dictWord{146, 0, 94}, - dictWord{ - 7, - 0, - 1149, - }, - dictWord{9, 0, 156}, - dictWord{138, 0, 957}, - dictWord{5, 11, 265}, - dictWord{6, 11, 212}, - dictWord{135, 11, 28}, - dictWord{133, 0, 778}, - dictWord{ - 133, - 0, - 502, - }, - dictWord{8, 0, 196}, - dictWord{10, 0, 283}, - dictWord{139, 0, 406}, - dictWord{135, 10, 576}, - dictWord{136, 11, 535}, - dictWord{134, 0, 1312}, - dictWord{ - 5, - 10, - 771, - }, - dictWord{5, 10, 863}, - dictWord{5, 10, 898}, - dictWord{6, 10, 1632}, - dictWord{6, 10, 1644}, - dictWord{134, 10, 1780}, - dictWord{5, 0, 855}, - dictWord{5, 10, 331}, - dictWord{135, 11, 1487}, - dictWord{132, 11, 702}, - dictWord{5, 11, 808}, - dictWord{135, 11, 2045}, - dictWord{7, 0, 1400}, - dictWord{ - 9, - 0, - 446, - }, - dictWord{138, 0, 45}, - dictWord{140, 10, 632}, - dictWord{132, 0, 1003}, - dictWord{5, 11, 166}, - dictWord{8, 11, 739}, - dictWord{140, 11, 511}, - dictWord{ - 5, - 10, - 107, - }, - dictWord{7, 10, 201}, - dictWord{136, 10, 518}, - dictWord{6, 10, 446}, - dictWord{135, 10, 1817}, - dictWord{134, 0, 1532}, - dictWord{ - 134, - 0, - 1097, - }, - dictWord{4, 11, 119}, - dictWord{5, 11, 170}, - dictWord{5, 11, 447}, - dictWord{7, 11, 1708}, - dictWord{7, 11, 1889}, - dictWord{9, 11, 357}, - dictWord{ - 9, - 11, - 719, - }, - dictWord{12, 11, 486}, - dictWord{140, 11, 596}, - dictWord{9, 10, 851}, - dictWord{141, 10, 510}, - dictWord{7, 0, 612}, - dictWord{8, 0, 545}, - dictWord{ - 8, - 0, - 568, - }, - dictWord{8, 0, 642}, - dictWord{9, 0, 717}, - dictWord{10, 0, 541}, - dictWord{10, 0, 763}, - dictWord{11, 0, 449}, - dictWord{12, 0, 489}, - dictWord{13, 0, 153}, - dictWord{13, 0, 296}, - dictWord{14, 0, 138}, - dictWord{14, 0, 392}, - dictWord{15, 0, 50}, - dictWord{16, 0, 6}, - dictWord{16, 0, 12}, - dictWord{20, 0, 9}, - dictWord{ - 132, - 10, - 504, - }, - dictWord{4, 11, 450}, - dictWord{135, 11, 1158}, - dictWord{11, 0, 54}, - dictWord{13, 0, 173}, - dictWord{13, 0, 294}, - dictWord{5, 10, 883}, - dictWord{ - 5, - 10, - 975, - }, - dictWord{8, 10, 392}, - dictWord{148, 10, 7}, - dictWord{13, 0, 455}, - dictWord{15, 0, 99}, - dictWord{15, 0, 129}, - dictWord{144, 0, 68}, - dictWord{135, 0, 172}, - dictWord{132, 11, 754}, - dictWord{5, 10, 922}, - dictWord{134, 10, 1707}, - dictWord{134, 0, 1029}, - dictWord{17, 11, 39}, - dictWord{148, 11, 36}, - dictWord{ - 4, - 0, - 568, - }, - dictWord{5, 10, 993}, - dictWord{7, 10, 515}, - dictWord{137, 10, 91}, - dictWord{132, 0, 732}, - dictWord{10, 0, 617}, - dictWord{138, 11, 617}, - dictWord{ - 134, - 0, - 974, - }, - dictWord{7, 0, 989}, - dictWord{10, 0, 377}, - dictWord{12, 0, 363}, - dictWord{13, 0, 68}, - dictWord{13, 0, 94}, - dictWord{14, 0, 108}, - dictWord{ - 142, - 0, - 306, - }, - dictWord{136, 0, 733}, - dictWord{132, 0, 428}, - dictWord{7, 0, 1789}, - dictWord{135, 11, 1062}, - dictWord{7, 0, 2015}, - dictWord{140, 0, 665}, - dictWord{135, 10, 1433}, - dictWord{5, 0, 287}, - dictWord{7, 10, 921}, - dictWord{8, 10, 580}, - dictWord{8, 10, 593}, - dictWord{8, 10, 630}, - dictWord{138, 10, 28}, - dictWord{138, 0, 806}, - dictWord{4, 10, 911}, - dictWord{5, 10, 867}, - dictWord{5, 10, 1013}, - dictWord{7, 10, 2034}, - dictWord{8, 10, 798}, - dictWord{136, 10, 813}, - dictWord{134, 0, 1539}, - dictWord{8, 11, 523}, - dictWord{150, 11, 34}, - dictWord{135, 11, 740}, - dictWord{7, 11, 238}, - dictWord{7, 11, 2033}, - dictWord{ - 8, - 11, - 120, - }, - dictWord{8, 11, 188}, - dictWord{8, 11, 659}, - dictWord{9, 11, 598}, - dictWord{10, 11, 466}, - dictWord{12, 11, 342}, - dictWord{12, 11, 588}, - dictWord{ - 13, - 11, - 503, - }, - dictWord{14, 11, 246}, - dictWord{143, 11, 92}, - dictWord{7, 0, 1563}, - dictWord{141, 0, 182}, - dictWord{5, 10, 135}, - dictWord{6, 10, 519}, - dictWord{ - 7, - 10, - 1722, - }, - dictWord{10, 10, 271}, - dictWord{11, 10, 261}, - dictWord{145, 10, 54}, - dictWord{14, 10, 338}, - dictWord{148, 10, 81}, - dictWord{7, 0, 484}, - dictWord{ - 4, - 10, - 300, - }, - dictWord{133, 10, 436}, - dictWord{145, 11, 114}, - dictWord{6, 0, 1623}, - dictWord{134, 0, 1681}, - dictWord{133, 11, 640}, - dictWord{4, 11, 201}, - dictWord{7, 11, 1744}, - dictWord{8, 11, 602}, - dictWord{11, 11, 247}, - dictWord{11, 11, 826}, - dictWord{145, 11, 65}, - dictWord{8, 11, 164}, - dictWord{ - 146, - 11, - 62, - }, - dictWord{6, 0, 1833}, - dictWord{6, 0, 1861}, - dictWord{136, 0, 878}, - dictWord{134, 0, 1569}, - dictWord{8, 10, 357}, - dictWord{10, 10, 745}, - dictWord{ - 14, - 10, - 426, - }, - dictWord{17, 10, 94}, - dictWord{147, 10, 57}, - dictWord{12, 0, 93}, - dictWord{12, 0, 501}, - dictWord{13, 0, 362}, - dictWord{14, 0, 151}, - dictWord{15, 0, 40}, - dictWord{15, 0, 59}, - dictWord{16, 0, 46}, - dictWord{17, 0, 25}, - dictWord{18, 0, 14}, - dictWord{18, 0, 134}, - dictWord{19, 0, 25}, - dictWord{19, 0, 69}, - dictWord{ - 20, - 0, - 16, - }, - dictWord{20, 0, 19}, - dictWord{20, 0, 66}, - dictWord{21, 0, 23}, - dictWord{21, 0, 25}, - dictWord{150, 0, 42}, - dictWord{6, 0, 1748}, - dictWord{8, 0, 715}, - dictWord{ - 9, - 0, - 802, - }, - dictWord{10, 0, 46}, - dictWord{10, 0, 819}, - dictWord{13, 0, 308}, - dictWord{14, 0, 351}, - dictWord{14, 0, 363}, - dictWord{146, 0, 67}, - dictWord{ - 132, - 0, - 994, - }, - dictWord{4, 0, 63}, - dictWord{133, 0, 347}, - dictWord{132, 0, 591}, - dictWord{133, 0, 749}, - dictWord{7, 11, 1577}, - dictWord{10, 11, 304}, - dictWord{ - 10, - 11, - 549, - }, - dictWord{11, 11, 424}, - dictWord{12, 11, 365}, - dictWord{13, 11, 220}, - dictWord{13, 11, 240}, - dictWord{142, 11, 33}, - dictWord{133, 0, 366}, - dictWord{ - 7, - 0, - 557, - }, - dictWord{12, 0, 547}, - dictWord{14, 0, 86}, - dictWord{133, 10, 387}, - dictWord{135, 0, 1747}, - dictWord{132, 11, 907}, - dictWord{5, 11, 100}, - dictWord{10, 11, 329}, - dictWord{12, 11, 416}, - dictWord{149, 11, 29}, - dictWord{4, 10, 6}, - dictWord{5, 10, 708}, - dictWord{136, 10, 75}, - dictWord{7, 10, 1351}, - dictWord{9, 10, 581}, - dictWord{10, 10, 639}, - dictWord{11, 10, 453}, - dictWord{140, 10, 584}, - dictWord{7, 0, 89}, - dictWord{132, 10, 303}, - dictWord{138, 10, 772}, - dictWord{132, 11, 176}, - dictWord{5, 11, 636}, - dictWord{5, 11, 998}, - dictWord{8, 11, 26}, - dictWord{137, 11, 358}, - dictWord{7, 11, 9}, - dictWord{7, 11, 1508}, - dictWord{9, 11, 317}, - dictWord{10, 11, 210}, - dictWord{10, 11, 292}, - dictWord{10, 11, 533}, - dictWord{11, 11, 555}, - dictWord{12, 11, 526}, - dictWord{ - 12, - 11, - 607, - }, - dictWord{13, 11, 263}, - dictWord{13, 11, 459}, - dictWord{142, 11, 271}, - dictWord{134, 0, 1463}, - dictWord{6, 0, 772}, - dictWord{6, 0, 1137}, - dictWord{ - 139, - 11, - 595, - }, - dictWord{7, 0, 977}, - dictWord{139, 11, 66}, - dictWord{138, 0, 893}, - dictWord{20, 0, 48}, - dictWord{148, 11, 48}, - dictWord{5, 0, 824}, - dictWord{ - 133, - 0, - 941, - }, - dictWord{134, 11, 295}, - dictWord{7, 0, 1543}, - dictWord{7, 0, 1785}, - dictWord{10, 0, 690}, - dictWord{4, 10, 106}, - dictWord{139, 10, 717}, - dictWord{ - 7, - 0, - 440, - }, - dictWord{8, 0, 230}, - dictWord{139, 0, 106}, - dictWord{5, 10, 890}, - dictWord{133, 10, 988}, - dictWord{6, 10, 626}, - dictWord{142, 10, 431}, - dictWord{ - 10, - 11, - 127, - }, - dictWord{141, 11, 27}, - dictWord{17, 0, 32}, - dictWord{10, 10, 706}, - dictWord{150, 10, 44}, - dictWord{132, 0, 216}, - dictWord{137, 0, 332}, - dictWord{4, 10, 698}, - dictWord{136, 11, 119}, - dictWord{139, 11, 267}, - dictWord{138, 10, 17}, - dictWord{11, 11, 526}, - dictWord{11, 11, 939}, - dictWord{ - 141, - 11, - 290, - }, - dictWord{7, 11, 1167}, - dictWord{11, 11, 934}, - dictWord{13, 11, 391}, - dictWord{145, 11, 76}, - dictWord{139, 11, 39}, - dictWord{134, 10, 84}, - dictWord{ - 4, - 0, - 914, - }, - dictWord{5, 0, 800}, - dictWord{133, 0, 852}, - dictWord{10, 0, 416}, - dictWord{141, 0, 115}, - dictWord{7, 0, 564}, - dictWord{142, 0, 168}, - dictWord{ - 4, - 0, - 918, - }, - dictWord{133, 0, 876}, - dictWord{134, 0, 1764}, - dictWord{152, 0, 3}, - dictWord{4, 0, 92}, - dictWord{5, 0, 274}, - dictWord{7, 11, 126}, - dictWord{136, 11, 84}, - dictWord{140, 10, 498}, - dictWord{136, 11, 790}, - dictWord{8, 0, 501}, - dictWord{5, 10, 986}, - dictWord{6, 10, 130}, - dictWord{7, 10, 1582}, - dictWord{ - 8, - 10, - 458, - }, - dictWord{10, 10, 101}, - dictWord{10, 10, 318}, - dictWord{138, 10, 823}, - dictWord{6, 11, 64}, - dictWord{12, 11, 377}, - dictWord{141, 11, 309}, - dictWord{ - 5, - 0, - 743, - }, - dictWord{138, 0, 851}, - dictWord{4, 0, 49}, - dictWord{7, 0, 280}, - dictWord{135, 0, 1633}, - dictWord{134, 0, 879}, - dictWord{136, 0, 47}, - dictWord{ - 7, - 10, - 1644, - }, - dictWord{137, 10, 129}, - dictWord{132, 0, 865}, - dictWord{134, 0, 1202}, - dictWord{9, 11, 34}, - dictWord{139, 11, 484}, - dictWord{135, 10, 997}, - dictWord{5, 0, 272}, - dictWord{5, 0, 908}, - dictWord{5, 0, 942}, - dictWord{8, 0, 197}, - dictWord{9, 0, 47}, - dictWord{11, 0, 538}, - dictWord{139, 0, 742}, - dictWord{ - 6, - 11, - 1700, - }, - dictWord{7, 11, 26}, - dictWord{7, 11, 293}, - dictWord{7, 11, 382}, - dictWord{7, 11, 1026}, - dictWord{7, 11, 1087}, - dictWord{7, 11, 2027}, - dictWord{ - 8, - 11, - 24, - }, - dictWord{8, 11, 114}, - dictWord{8, 11, 252}, - dictWord{8, 11, 727}, - dictWord{8, 11, 729}, - dictWord{9, 11, 30}, - dictWord{9, 11, 199}, - dictWord{9, 11, 231}, - dictWord{9, 11, 251}, - dictWord{9, 11, 334}, - dictWord{9, 11, 361}, - dictWord{9, 11, 488}, - dictWord{9, 11, 712}, - dictWord{10, 11, 55}, - dictWord{10, 11, 60}, - dictWord{ - 10, - 11, - 232, - }, - dictWord{10, 11, 332}, - dictWord{10, 11, 384}, - dictWord{10, 11, 396}, - dictWord{10, 11, 504}, - dictWord{10, 11, 542}, - dictWord{10, 11, 652}, - dictWord{11, 11, 20}, - dictWord{11, 11, 48}, - dictWord{11, 11, 207}, - dictWord{11, 11, 291}, - dictWord{11, 11, 298}, - dictWord{11, 11, 342}, - dictWord{ - 11, - 11, - 365, - }, - dictWord{11, 11, 394}, - dictWord{11, 11, 620}, - dictWord{11, 11, 705}, - dictWord{11, 11, 1017}, - dictWord{12, 11, 123}, - dictWord{12, 11, 340}, - dictWord{12, 11, 406}, - dictWord{12, 11, 643}, - dictWord{13, 11, 61}, - dictWord{13, 11, 269}, - dictWord{13, 11, 311}, - dictWord{13, 11, 319}, - dictWord{13, 11, 486}, - dictWord{14, 11, 234}, - dictWord{15, 11, 62}, - dictWord{15, 11, 85}, - dictWord{16, 11, 71}, - dictWord{18, 11, 119}, - dictWord{148, 11, 105}, - dictWord{ - 6, - 0, - 1455, - }, - dictWord{150, 11, 37}, - dictWord{135, 10, 1927}, - dictWord{135, 0, 1911}, - dictWord{137, 0, 891}, - dictWord{7, 10, 1756}, - dictWord{137, 10, 98}, - dictWord{7, 10, 1046}, - dictWord{139, 10, 160}, - dictWord{132, 0, 761}, - dictWord{6, 11, 379}, - dictWord{7, 11, 270}, - dictWord{7, 11, 1116}, - dictWord{ - 8, - 11, - 176, - }, - dictWord{8, 11, 183}, - dictWord{9, 11, 432}, - dictWord{9, 11, 661}, - dictWord{12, 11, 247}, - dictWord{12, 11, 617}, - dictWord{146, 11, 125}, - dictWord{ - 6, - 10, - 45, - }, - dictWord{7, 10, 433}, - dictWord{8, 10, 129}, - dictWord{9, 10, 21}, - dictWord{10, 10, 392}, - dictWord{11, 10, 79}, - dictWord{12, 10, 499}, - dictWord{ - 13, - 10, - 199, - }, - dictWord{141, 10, 451}, - dictWord{4, 0, 407}, - dictWord{5, 11, 792}, - dictWord{133, 11, 900}, - dictWord{132, 0, 560}, - dictWord{135, 0, 183}, - dictWord{ - 13, - 0, - 490, - }, - dictWord{7, 10, 558}, - dictWord{136, 10, 353}, - dictWord{4, 0, 475}, - dictWord{6, 0, 731}, - dictWord{11, 0, 35}, - dictWord{13, 0, 71}, - dictWord{13, 0, 177}, - dictWord{14, 0, 422}, - dictWord{133, 10, 785}, - dictWord{8, 10, 81}, - dictWord{9, 10, 189}, - dictWord{9, 10, 201}, - dictWord{11, 10, 478}, - dictWord{11, 10, 712}, - dictWord{141, 10, 338}, - dictWord{4, 0, 418}, - dictWord{4, 0, 819}, - dictWord{133, 10, 353}, - dictWord{151, 10, 26}, - dictWord{4, 11, 901}, - dictWord{ - 133, - 11, - 776, - }, - dictWord{132, 0, 575}, - dictWord{7, 0, 818}, - dictWord{16, 0, 92}, - dictWord{17, 0, 14}, - dictWord{17, 0, 45}, - dictWord{18, 0, 75}, - dictWord{148, 0, 18}, - dictWord{ - 6, - 0, - 222, - }, - dictWord{7, 0, 636}, - dictWord{7, 0, 1620}, - dictWord{8, 0, 409}, - dictWord{9, 0, 693}, - dictWord{139, 0, 77}, - dictWord{6, 10, 25}, - dictWord{7, 10, 855}, - dictWord{7, 10, 1258}, - dictWord{144, 10, 32}, - dictWord{6, 0, 1880}, - dictWord{6, 0, 1887}, - dictWord{6, 0, 1918}, - dictWord{6, 0, 1924}, - dictWord{9, 0, 967}, - dictWord{9, 0, 995}, - dictWord{9, 0, 1015}, - dictWord{12, 0, 826}, - dictWord{12, 0, 849}, - dictWord{12, 0, 857}, - dictWord{12, 0, 860}, - dictWord{12, 0, 886}, - dictWord{ - 12, - 0, - 932, - }, - dictWord{18, 0, 228}, - dictWord{18, 0, 231}, - dictWord{146, 0, 240}, - dictWord{134, 0, 633}, - dictWord{134, 0, 1308}, - dictWord{4, 11, 37}, - dictWord{ - 5, - 11, - 334, - }, - dictWord{135, 11, 1253}, - dictWord{10, 0, 86}, - dictWord{4, 10, 4}, - dictWord{7, 10, 1118}, - dictWord{7, 10, 1320}, - dictWord{7, 10, 1706}, - dictWord{ - 8, - 10, - 277, - }, - dictWord{9, 10, 622}, - dictWord{11, 10, 724}, - dictWord{12, 10, 350}, - dictWord{12, 10, 397}, - dictWord{13, 10, 28}, - dictWord{13, 10, 159}, - dictWord{ - 15, - 10, - 89, - }, - dictWord{18, 10, 5}, - dictWord{19, 10, 9}, - dictWord{20, 10, 34}, - dictWord{150, 10, 47}, - dictWord{132, 11, 508}, - dictWord{137, 11, 448}, - dictWord{ - 12, - 11, - 107, - }, - dictWord{146, 11, 31}, - dictWord{132, 0, 817}, - dictWord{134, 0, 663}, - dictWord{133, 0, 882}, - dictWord{134, 0, 914}, - dictWord{132, 11, 540}, - dictWord{132, 11, 533}, - dictWord{136, 11, 608}, - dictWord{8, 0, 885}, - dictWord{138, 0, 865}, - dictWord{132, 0, 426}, - dictWord{6, 0, 58}, - dictWord{7, 0, 745}, - dictWord{7, 0, 1969}, - dictWord{8, 0, 399}, - dictWord{8, 0, 675}, - dictWord{9, 0, 479}, - dictWord{9, 0, 731}, - dictWord{10, 0, 330}, - dictWord{10, 0, 593}, - dictWord{ - 10, - 0, - 817, - }, - dictWord{11, 0, 32}, - dictWord{11, 0, 133}, - dictWord{11, 0, 221}, - dictWord{145, 0, 68}, - dictWord{134, 10, 255}, - dictWord{7, 0, 102}, - dictWord{ - 137, - 0, - 538, - }, - dictWord{137, 10, 216}, - dictWord{7, 11, 253}, - dictWord{136, 11, 549}, - dictWord{135, 11, 912}, - dictWord{9, 10, 183}, - dictWord{139, 10, 286}, - dictWord{11, 10, 956}, - dictWord{151, 10, 3}, - dictWord{8, 11, 527}, - dictWord{18, 11, 60}, - dictWord{147, 11, 24}, - dictWord{4, 10, 536}, - dictWord{7, 10, 1141}, - dictWord{10, 10, 723}, - dictWord{139, 10, 371}, - dictWord{133, 11, 920}, - dictWord{7, 0, 876}, - dictWord{135, 10, 285}, - dictWord{135, 10, 560}, - dictWord{ - 132, - 10, - 690, - }, - dictWord{142, 11, 126}, - dictWord{11, 10, 33}, - dictWord{12, 10, 571}, - dictWord{149, 10, 1}, - dictWord{133, 0, 566}, - dictWord{9, 0, 139}, - dictWord{ - 10, - 0, - 399, - }, - dictWord{11, 0, 469}, - dictWord{12, 0, 634}, - dictWord{13, 0, 223}, - dictWord{132, 11, 483}, - dictWord{6, 0, 48}, - dictWord{135, 0, 63}, - dictWord{18, 0, 12}, - dictWord{7, 10, 1862}, - dictWord{12, 10, 491}, - dictWord{12, 10, 520}, - dictWord{13, 10, 383}, - dictWord{142, 10, 244}, - dictWord{135, 11, 1665}, - dictWord{132, 11, 448}, - dictWord{9, 11, 495}, - dictWord{146, 11, 104}, - dictWord{6, 0, 114}, - dictWord{7, 0, 1224}, - dictWord{7, 0, 1556}, - dictWord{136, 0, 3}, - dictWord{ - 4, - 10, - 190, - }, - dictWord{133, 10, 554}, - dictWord{8, 0, 576}, - dictWord{9, 0, 267}, - dictWord{133, 10, 1001}, - dictWord{133, 10, 446}, - dictWord{133, 0, 933}, - dictWord{139, 11, 1009}, - dictWord{8, 11, 653}, - dictWord{13, 11, 93}, - dictWord{147, 11, 14}, - dictWord{6, 0, 692}, - dictWord{6, 0, 821}, - dictWord{134, 0, 1077}, - dictWord{5, 11, 172}, - dictWord{135, 11, 801}, - dictWord{138, 0, 752}, - dictWord{4, 0, 375}, - dictWord{134, 0, 638}, - dictWord{134, 0, 1011}, - dictWord{ - 140, - 11, - 540, - }, - dictWord{9, 0, 96}, - dictWord{133, 11, 260}, - dictWord{139, 11, 587}, - dictWord{135, 10, 1231}, - dictWord{12, 0, 30}, - dictWord{13, 0, 148}, - dictWord{ - 14, - 0, - 87, - }, - dictWord{14, 0, 182}, - dictWord{16, 0, 42}, - dictWord{20, 0, 70}, - dictWord{132, 10, 304}, - dictWord{6, 0, 1398}, - dictWord{7, 0, 56}, - dictWord{7, 0, 1989}, - dictWord{8, 0, 337}, - dictWord{8, 0, 738}, - dictWord{9, 0, 600}, - dictWord{12, 0, 37}, - dictWord{13, 0, 447}, - dictWord{142, 0, 92}, - dictWord{138, 0, 666}, - dictWord{ - 5, - 0, - 394, - }, - dictWord{7, 0, 487}, - dictWord{136, 0, 246}, - dictWord{9, 0, 437}, - dictWord{6, 10, 53}, - dictWord{6, 10, 199}, - dictWord{7, 10, 1408}, - dictWord{8, 10, 32}, - dictWord{8, 10, 93}, - dictWord{10, 10, 397}, - dictWord{10, 10, 629}, - dictWord{11, 10, 593}, - dictWord{11, 10, 763}, - dictWord{13, 10, 326}, - dictWord{145, 10, 35}, - dictWord{134, 10, 105}, - dictWord{9, 0, 320}, - dictWord{10, 0, 506}, - dictWord{138, 10, 794}, - dictWord{7, 11, 57}, - dictWord{8, 11, 167}, - dictWord{8, 11, 375}, - dictWord{9, 11, 82}, - dictWord{9, 11, 561}, - dictWord{10, 11, 620}, - dictWord{10, 11, 770}, - dictWord{11, 10, 704}, - dictWord{141, 10, 396}, - dictWord{6, 0, 1003}, - dictWord{5, 10, 114}, - dictWord{5, 10, 255}, - dictWord{141, 10, 285}, - dictWord{7, 0, 866}, - dictWord{135, 0, 1163}, - dictWord{133, 11, 531}, - dictWord{ - 132, - 0, - 328, - }, - dictWord{7, 10, 2035}, - dictWord{8, 10, 19}, - dictWord{9, 10, 89}, - dictWord{138, 10, 831}, - dictWord{8, 11, 194}, - dictWord{136, 11, 756}, - dictWord{ - 136, - 0, - 1000, - }, - dictWord{5, 11, 453}, - dictWord{134, 11, 441}, - dictWord{4, 0, 101}, - dictWord{5, 0, 833}, - dictWord{7, 0, 1171}, - dictWord{136, 0, 744}, - dictWord{ - 133, - 0, - 726, - }, - dictWord{136, 10, 746}, - dictWord{138, 0, 176}, - dictWord{6, 0, 9}, - dictWord{6, 0, 397}, - dictWord{7, 0, 53}, - dictWord{7, 0, 1742}, - dictWord{10, 0, 632}, - dictWord{11, 0, 828}, - dictWord{140, 0, 146}, - dictWord{135, 11, 22}, - dictWord{145, 11, 64}, - dictWord{132, 0, 839}, - dictWord{11, 0, 417}, - dictWord{12, 0, 223}, - dictWord{140, 0, 265}, - dictWord{4, 11, 102}, - dictWord{7, 11, 815}, - dictWord{7, 11, 1699}, - dictWord{139, 11, 964}, - dictWord{5, 10, 955}, - dictWord{ - 136, - 10, - 814, - }, - dictWord{6, 0, 1931}, - dictWord{6, 0, 2007}, - dictWord{18, 0, 246}, - dictWord{146, 0, 247}, - dictWord{8, 0, 198}, - dictWord{11, 0, 29}, - dictWord{140, 0, 534}, - dictWord{135, 0, 1771}, - dictWord{6, 0, 846}, - dictWord{7, 11, 1010}, - dictWord{11, 11, 733}, - dictWord{11, 11, 759}, - dictWord{12, 11, 563}, - dictWord{ - 13, - 11, - 34, - }, - dictWord{14, 11, 101}, - dictWord{18, 11, 45}, - dictWord{146, 11, 129}, - dictWord{4, 0, 186}, - dictWord{5, 0, 157}, - dictWord{8, 0, 168}, - dictWord{138, 0, 6}, - dictWord{132, 11, 899}, - dictWord{133, 10, 56}, - dictWord{148, 10, 100}, - dictWord{133, 0, 875}, - dictWord{5, 0, 773}, - dictWord{5, 0, 991}, - dictWord{6, 0, 1635}, - dictWord{134, 0, 1788}, - dictWord{6, 0, 1274}, - dictWord{9, 0, 477}, - dictWord{141, 0, 78}, - dictWord{4, 0, 639}, - dictWord{7, 0, 111}, - dictWord{8, 0, 581}, - dictWord{ - 12, - 0, - 177, - }, - dictWord{6, 11, 52}, - dictWord{9, 11, 104}, - dictWord{9, 11, 559}, - dictWord{10, 10, 4}, - dictWord{10, 10, 13}, - dictWord{11, 10, 638}, - dictWord{ - 12, - 11, - 308, - }, - dictWord{19, 11, 87}, - dictWord{148, 10, 57}, - dictWord{132, 11, 604}, - dictWord{4, 11, 301}, - dictWord{133, 10, 738}, - dictWord{133, 10, 758}, - dictWord{134, 0, 1747}, - dictWord{7, 11, 1440}, - dictWord{11, 11, 854}, - dictWord{11, 11, 872}, - dictWord{11, 11, 921}, - dictWord{12, 11, 551}, - dictWord{ - 13, - 11, - 472, - }, - dictWord{142, 11, 367}, - dictWord{7, 0, 1364}, - dictWord{7, 0, 1907}, - dictWord{141, 0, 158}, - dictWord{134, 0, 873}, - dictWord{4, 0, 404}, - dictWord{ - 4, - 0, - 659, - }, - dictWord{7, 0, 552}, - dictWord{135, 0, 675}, - dictWord{135, 10, 1112}, - dictWord{139, 10, 328}, - dictWord{7, 11, 508}, - dictWord{137, 10, 133}, - dictWord{133, 0, 391}, - dictWord{5, 10, 110}, - dictWord{6, 10, 169}, - dictWord{6, 10, 1702}, - dictWord{7, 10, 400}, - dictWord{8, 10, 538}, - dictWord{9, 10, 184}, - dictWord{ - 9, - 10, - 524, - }, - dictWord{140, 10, 218}, - dictWord{6, 11, 310}, - dictWord{7, 11, 1849}, - dictWord{8, 11, 72}, - dictWord{8, 11, 272}, - dictWord{8, 11, 431}, - dictWord{ - 9, - 11, - 12, - }, - dictWord{9, 11, 351}, - dictWord{10, 11, 563}, - dictWord{10, 11, 630}, - dictWord{10, 11, 810}, - dictWord{11, 11, 367}, - dictWord{11, 11, 599}, - dictWord{11, 11, 686}, - dictWord{140, 11, 672}, - dictWord{5, 0, 540}, - dictWord{6, 0, 1697}, - dictWord{136, 0, 668}, - dictWord{132, 0, 883}, - dictWord{134, 0, 78}, - dictWord{12, 0, 628}, - dictWord{18, 0, 79}, - dictWord{6, 10, 133}, - dictWord{9, 10, 353}, - dictWord{139, 10, 993}, - dictWord{6, 11, 181}, - dictWord{7, 11, 537}, - dictWord{ - 8, - 11, - 64, - }, - dictWord{9, 11, 127}, - dictWord{10, 11, 496}, - dictWord{12, 11, 510}, - dictWord{141, 11, 384}, - dictWord{6, 10, 93}, - dictWord{7, 10, 1422}, - dictWord{ - 7, - 10, - 1851, - }, - dictWord{8, 10, 673}, - dictWord{9, 10, 529}, - dictWord{140, 10, 43}, - dictWord{137, 10, 371}, - dictWord{134, 0, 1460}, - dictWord{134, 0, 962}, - dictWord{4, 11, 244}, - dictWord{135, 11, 233}, - dictWord{9, 10, 25}, - dictWord{10, 10, 467}, - dictWord{138, 10, 559}, - dictWord{4, 10, 335}, - dictWord{ - 135, - 10, - 942, - }, - dictWord{133, 0, 460}, - dictWord{135, 11, 334}, - dictWord{134, 11, 1650}, - dictWord{4, 0, 199}, - dictWord{139, 0, 34}, - dictWord{5, 10, 601}, - dictWord{ - 8, - 10, - 39, - }, - dictWord{10, 10, 773}, - dictWord{11, 10, 84}, - dictWord{12, 10, 205}, - dictWord{142, 10, 1}, - dictWord{133, 10, 870}, - dictWord{134, 0, 388}, - dictWord{14, 0, 474}, - dictWord{148, 0, 120}, - dictWord{133, 11, 369}, - dictWord{139, 0, 271}, - dictWord{4, 0, 511}, - dictWord{9, 0, 333}, - dictWord{9, 0, 379}, - dictWord{ - 10, - 0, - 602, - }, - dictWord{11, 0, 441}, - dictWord{11, 0, 723}, - dictWord{11, 0, 976}, - dictWord{12, 0, 357}, - dictWord{132, 10, 181}, - dictWord{134, 0, 608}, - dictWord{134, 10, 1652}, - dictWord{22, 0, 49}, - dictWord{137, 11, 338}, - dictWord{140, 0, 988}, - dictWord{134, 0, 617}, - dictWord{5, 0, 938}, - dictWord{136, 0, 707}, - dictWord{132, 10, 97}, - dictWord{5, 10, 147}, - dictWord{6, 10, 286}, - dictWord{7, 10, 1362}, - dictWord{141, 10, 176}, - dictWord{6, 0, 756}, - dictWord{ - 134, - 0, - 1149, - }, - dictWord{133, 11, 896}, - dictWord{6, 10, 375}, - dictWord{7, 10, 169}, - dictWord{7, 10, 254}, - dictWord{136, 10, 780}, - dictWord{134, 0, 1583}, - dictWord{135, 10, 1447}, - dictWord{139, 0, 285}, - dictWord{7, 11, 1117}, - dictWord{8, 11, 393}, - dictWord{136, 11, 539}, - dictWord{135, 0, 344}, - dictWord{ - 6, - 0, - 469, - }, - dictWord{7, 0, 1709}, - dictWord{138, 0, 515}, - dictWord{5, 10, 629}, - dictWord{135, 10, 1549}, - dictWord{5, 11, 4}, - dictWord{5, 11, 810}, - dictWord{ - 6, - 11, - 13, - }, - dictWord{6, 11, 538}, - dictWord{6, 11, 1690}, - dictWord{6, 11, 1726}, - dictWord{7, 11, 499}, - dictWord{7, 11, 1819}, - dictWord{8, 11, 148}, - dictWord{ - 8, - 11, - 696, - }, - dictWord{8, 11, 791}, - dictWord{12, 11, 125}, - dictWord{13, 11, 54}, - dictWord{143, 11, 9}, - dictWord{135, 11, 1268}, - dictWord{137, 0, 404}, - dictWord{ - 132, - 0, - 500, - }, - dictWord{5, 0, 68}, - dictWord{134, 0, 383}, - dictWord{11, 0, 216}, - dictWord{139, 0, 340}, - dictWord{4, 11, 925}, - dictWord{5, 11, 803}, - dictWord{ - 8, - 11, - 698, - }, - dictWord{138, 11, 828}, - dictWord{4, 0, 337}, - dictWord{6, 0, 353}, - dictWord{7, 0, 1934}, - dictWord{8, 0, 488}, - dictWord{137, 0, 429}, - dictWord{7, 0, 236}, - dictWord{7, 0, 1795}, - dictWord{8, 0, 259}, - dictWord{9, 0, 135}, - dictWord{9, 0, 177}, - dictWord{9, 0, 860}, - dictWord{10, 0, 825}, - dictWord{11, 0, 115}, - dictWord{ - 11, - 0, - 370, - }, - dictWord{11, 0, 405}, - dictWord{11, 0, 604}, - dictWord{12, 0, 10}, - dictWord{12, 0, 667}, - dictWord{12, 0, 669}, - dictWord{13, 0, 76}, - dictWord{14, 0, 310}, - dictWord{15, 0, 76}, - dictWord{15, 0, 147}, - dictWord{148, 0, 23}, - dictWord{4, 0, 15}, - dictWord{4, 0, 490}, - dictWord{5, 0, 22}, - dictWord{6, 0, 244}, - dictWord{7, 0, 40}, - dictWord{7, 0, 200}, - dictWord{7, 0, 906}, - dictWord{7, 0, 1199}, - dictWord{9, 0, 616}, - dictWord{10, 0, 716}, - dictWord{11, 0, 635}, - dictWord{11, 0, 801}, - dictWord{ - 140, - 0, - 458, - }, - dictWord{12, 0, 756}, - dictWord{132, 10, 420}, - dictWord{134, 0, 1504}, - dictWord{6, 0, 757}, - dictWord{133, 11, 383}, - dictWord{6, 0, 1266}, - dictWord{ - 135, - 0, - 1735, - }, - dictWord{5, 0, 598}, - dictWord{7, 0, 791}, - dictWord{8, 0, 108}, - dictWord{9, 0, 123}, - dictWord{7, 10, 1570}, - dictWord{140, 10, 542}, - dictWord{ - 142, - 11, - 410, - }, - dictWord{9, 11, 660}, - dictWord{138, 11, 347}, -} diff --git a/vendor/github.com/andybalholm/brotli/symbol_list.go b/vendor/github.com/andybalholm/brotli/symbol_list.go deleted file mode 100644 index c5cb49e5a9d..00000000000 --- a/vendor/github.com/andybalholm/brotli/symbol_list.go +++ /dev/null @@ -1,22 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Utilities for building Huffman decoding tables. */ - -type symbolList struct { - storage []uint16 - offset int -} - -func symbolListGet(sl symbolList, i int) uint16 { - return sl.storage[i+sl.offset] -} - -func symbolListPut(sl symbolList, i int, val uint16) { - sl.storage[i+sl.offset] = val -} diff --git a/vendor/github.com/andybalholm/brotli/transform.go b/vendor/github.com/andybalholm/brotli/transform.go deleted file mode 100644 index d2c043a6227..00000000000 --- a/vendor/github.com/andybalholm/brotli/transform.go +++ /dev/null @@ -1,641 +0,0 @@ -package brotli - -const ( - transformIdentity = 0 - transformOmitLast1 = 1 - transformOmitLast2 = 2 - transformOmitLast3 = 3 - transformOmitLast4 = 4 - transformOmitLast5 = 5 - transformOmitLast6 = 6 - transformOmitLast7 = 7 - transformOmitLast8 = 8 - transformOmitLast9 = 9 - transformUppercaseFirst = 10 - transformUppercaseAll = 11 - transformOmitFirst1 = 12 - transformOmitFirst2 = 13 - transformOmitFirst3 = 14 - transformOmitFirst4 = 15 - transformOmitFirst5 = 16 - transformOmitFirst6 = 17 - transformOmitFirst7 = 18 - transformOmitFirst8 = 19 - transformOmitFirst9 = 20 - transformShiftFirst = 21 - transformShiftAll = 22 + iota - 22 - numTransformTypes -) - -const transformsMaxCutOff = transformOmitLast9 - -type transforms struct { - prefix_suffix_size uint16 - prefix_suffix []byte - prefix_suffix_map []uint16 - num_transforms uint32 - transforms []byte - params []byte - cutOffTransforms [transformsMaxCutOff + 1]int16 -} - -func transformPrefixId(t *transforms, I int) byte { - return t.transforms[(I*3)+0] -} - -func transformType(t *transforms, I int) byte { - return t.transforms[(I*3)+1] -} - -func transformSuffixId(t *transforms, I int) byte { - return t.transforms[(I*3)+2] -} - -func transformPrefix(t *transforms, I int) []byte { - return t.prefix_suffix[t.prefix_suffix_map[transformPrefixId(t, I)]:] -} - -func transformSuffix(t *transforms, I int) []byte { - return t.prefix_suffix[t.prefix_suffix_map[transformSuffixId(t, I)]:] -} - -/* RFC 7932 transforms string data */ -const kPrefixSuffix string = "\001 \002, \010 of the \004 of \002s \001.\005 and \004 " + "in \001\"\004 to \002\">\001\n\002. \001]\005 for \003 a \006 " + "that \001'\006 with \006 from \004 by \001(\006. T" + "he \004 on \004 as \004 is \004ing \002\n\t\001:\003ed " + "\002=\"\004 at \003ly \001,\002='\005.com/\007. This \005" + " not \003er \003al \004ful \004ive \005less \004es" + "t \004ize \002\xc2\xa0\004ous \005 the \002e \000" - -var kPrefixSuffixMap = [50]uint16{ - 0x00, - 0x02, - 0x05, - 0x0E, - 0x13, - 0x16, - 0x18, - 0x1E, - 0x23, - 0x25, - 0x2A, - 0x2D, - 0x2F, - 0x32, - 0x34, - 0x3A, - 0x3E, - 0x45, - 0x47, - 0x4E, - 0x55, - 0x5A, - 0x5C, - 0x63, - 0x68, - 0x6D, - 0x72, - 0x77, - 0x7A, - 0x7C, - 0x80, - 0x83, - 0x88, - 0x8C, - 0x8E, - 0x91, - 0x97, - 0x9F, - 0xA5, - 0xA9, - 0xAD, - 0xB2, - 0xB7, - 0xBD, - 0xC2, - 0xC7, - 0xCA, - 0xCF, - 0xD5, - 0xD8, -} - -/* RFC 7932 transforms */ -var kTransformsData = []byte{ - 49, - transformIdentity, - 49, - 49, - transformIdentity, - 0, - 0, - transformIdentity, - 0, - 49, - transformOmitFirst1, - 49, - 49, - transformUppercaseFirst, - 0, - 49, - transformIdentity, - 47, - 0, - transformIdentity, - 49, - 4, - transformIdentity, - 0, - 49, - transformIdentity, - 3, - 49, - transformUppercaseFirst, - 49, - 49, - transformIdentity, - 6, - 49, - transformOmitFirst2, - 49, - 49, - transformOmitLast1, - 49, - 1, - transformIdentity, - 0, - 49, - transformIdentity, - 1, - 0, - transformUppercaseFirst, - 0, - 49, - transformIdentity, - 7, - 49, - transformIdentity, - 9, - 48, - transformIdentity, - 0, - 49, - transformIdentity, - 8, - 49, - transformIdentity, - 5, - 49, - transformIdentity, - 10, - 49, - transformIdentity, - 11, - 49, - transformOmitLast3, - 49, - 49, - transformIdentity, - 13, - 49, - transformIdentity, - 14, - 49, - transformOmitFirst3, - 49, - 49, - transformOmitLast2, - 49, - 49, - transformIdentity, - 15, - 49, - transformIdentity, - 16, - 0, - transformUppercaseFirst, - 49, - 49, - transformIdentity, - 12, - 5, - transformIdentity, - 49, - 0, - transformIdentity, - 1, - 49, - transformOmitFirst4, - 49, - 49, - transformIdentity, - 18, - 49, - transformIdentity, - 17, - 49, - transformIdentity, - 19, - 49, - transformIdentity, - 20, - 49, - transformOmitFirst5, - 49, - 49, - transformOmitFirst6, - 49, - 47, - transformIdentity, - 49, - 49, - transformOmitLast4, - 49, - 49, - transformIdentity, - 22, - 49, - transformUppercaseAll, - 49, - 49, - transformIdentity, - 23, - 49, - transformIdentity, - 24, - 49, - transformIdentity, - 25, - 49, - transformOmitLast7, - 49, - 49, - transformOmitLast1, - 26, - 49, - transformIdentity, - 27, - 49, - transformIdentity, - 28, - 0, - transformIdentity, - 12, - 49, - transformIdentity, - 29, - 49, - transformOmitFirst9, - 49, - 49, - transformOmitFirst7, - 49, - 49, - transformOmitLast6, - 49, - 49, - transformIdentity, - 21, - 49, - transformUppercaseFirst, - 1, - 49, - transformOmitLast8, - 49, - 49, - transformIdentity, - 31, - 49, - transformIdentity, - 32, - 47, - transformIdentity, - 3, - 49, - transformOmitLast5, - 49, - 49, - transformOmitLast9, - 49, - 0, - transformUppercaseFirst, - 1, - 49, - transformUppercaseFirst, - 8, - 5, - transformIdentity, - 21, - 49, - transformUppercaseAll, - 0, - 49, - transformUppercaseFirst, - 10, - 49, - transformIdentity, - 30, - 0, - transformIdentity, - 5, - 35, - transformIdentity, - 49, - 47, - transformIdentity, - 2, - 49, - transformUppercaseFirst, - 17, - 49, - transformIdentity, - 36, - 49, - transformIdentity, - 33, - 5, - transformIdentity, - 0, - 49, - transformUppercaseFirst, - 21, - 49, - transformUppercaseFirst, - 5, - 49, - transformIdentity, - 37, - 0, - transformIdentity, - 30, - 49, - transformIdentity, - 38, - 0, - transformUppercaseAll, - 0, - 49, - transformIdentity, - 39, - 0, - transformUppercaseAll, - 49, - 49, - transformIdentity, - 34, - 49, - transformUppercaseAll, - 8, - 49, - transformUppercaseFirst, - 12, - 0, - transformIdentity, - 21, - 49, - transformIdentity, - 40, - 0, - transformUppercaseFirst, - 12, - 49, - transformIdentity, - 41, - 49, - transformIdentity, - 42, - 49, - transformUppercaseAll, - 17, - 49, - transformIdentity, - 43, - 0, - transformUppercaseFirst, - 5, - 49, - transformUppercaseAll, - 10, - 0, - transformIdentity, - 34, - 49, - transformUppercaseFirst, - 33, - 49, - transformIdentity, - 44, - 49, - transformUppercaseAll, - 5, - 45, - transformIdentity, - 49, - 0, - transformIdentity, - 33, - 49, - transformUppercaseFirst, - 30, - 49, - transformUppercaseAll, - 30, - 49, - transformIdentity, - 46, - 49, - transformUppercaseAll, - 1, - 49, - transformUppercaseFirst, - 34, - 0, - transformUppercaseFirst, - 33, - 0, - transformUppercaseAll, - 30, - 0, - transformUppercaseAll, - 1, - 49, - transformUppercaseAll, - 33, - 49, - transformUppercaseAll, - 21, - 49, - transformUppercaseAll, - 12, - 0, - transformUppercaseAll, - 5, - 49, - transformUppercaseAll, - 34, - 0, - transformUppercaseAll, - 12, - 0, - transformUppercaseFirst, - 30, - 0, - transformUppercaseAll, - 34, - 0, - transformUppercaseFirst, - 34, -} - -var kBrotliTransforms = transforms{ - 217, - []byte(kPrefixSuffix), - kPrefixSuffixMap[:], - 121, - kTransformsData, - nil, /* no extra parameters */ - [transformsMaxCutOff + 1]int16{0, 12, 27, 23, 42, 63, 56, 48, 59, 64}, -} - -func getTransforms() *transforms { - return &kBrotliTransforms -} - -func toUpperCase(p []byte) int { - if p[0] < 0xC0 { - if p[0] >= 'a' && p[0] <= 'z' { - p[0] ^= 32 - } - - return 1 - } - - /* An overly simplified uppercasing model for UTF-8. */ - if p[0] < 0xE0 { - p[1] ^= 32 - return 2 - } - - /* An arbitrary transform for three byte characters. */ - p[2] ^= 5 - - return 3 -} - -func shiftTransform(word []byte, word_len int, parameter uint16) int { - /* Limited sign extension: scalar < (1 << 24). */ - var scalar uint32 = (uint32(parameter) & 0x7FFF) + (0x1000000 - (uint32(parameter) & 0x8000)) - if word[0] < 0x80 { - /* 1-byte rune / 0sssssss / 7 bit scalar (ASCII). */ - scalar += uint32(word[0]) - - word[0] = byte(scalar & 0x7F) - return 1 - } else if word[0] < 0xC0 { - /* Continuation / 10AAAAAA. */ - return 1 - } else if word[0] < 0xE0 { - /* 2-byte rune / 110sssss AAssssss / 11 bit scalar. */ - if word_len < 2 { - return 1 - } - scalar += uint32(word[1]&0x3F | (word[0]&0x1F)<<6) - word[0] = byte(0xC0 | (scalar>>6)&0x1F) - word[1] = byte(uint32(word[1]&0xC0) | scalar&0x3F) - return 2 - } else if word[0] < 0xF0 { - /* 3-byte rune / 1110ssss AAssssss BBssssss / 16 bit scalar. */ - if word_len < 3 { - return word_len - } - scalar += uint32(word[2])&0x3F | uint32(word[1]&0x3F)<<6 | uint32(word[0]&0x0F)<<12 - word[0] = byte(0xE0 | (scalar>>12)&0x0F) - word[1] = byte(uint32(word[1]&0xC0) | (scalar>>6)&0x3F) - word[2] = byte(uint32(word[2]&0xC0) | scalar&0x3F) - return 3 - } else if word[0] < 0xF8 { - /* 4-byte rune / 11110sss AAssssss BBssssss CCssssss / 21 bit scalar. */ - if word_len < 4 { - return word_len - } - scalar += uint32(word[3])&0x3F | uint32(word[2]&0x3F)<<6 | uint32(word[1]&0x3F)<<12 | uint32(word[0]&0x07)<<18 - word[0] = byte(0xF0 | (scalar>>18)&0x07) - word[1] = byte(uint32(word[1]&0xC0) | (scalar>>12)&0x3F) - word[2] = byte(uint32(word[2]&0xC0) | (scalar>>6)&0x3F) - word[3] = byte(uint32(word[3]&0xC0) | scalar&0x3F) - return 4 - } - - return 1 -} - -func transformDictionaryWord(dst []byte, word []byte, len int, trans *transforms, transform_idx int) int { - var idx int = 0 - var prefix []byte = transformPrefix(trans, transform_idx) - var type_ byte = transformType(trans, transform_idx) - var suffix []byte = transformSuffix(trans, transform_idx) - { - var prefix_len int = int(prefix[0]) - prefix = prefix[1:] - for { - tmp1 := prefix_len - prefix_len-- - if tmp1 == 0 { - break - } - dst[idx] = prefix[0] - idx++ - prefix = prefix[1:] - } - } - { - var t int = int(type_) - var i int = 0 - if t <= transformOmitLast9 { - len -= t - } else if t >= transformOmitFirst1 && t <= transformOmitFirst9 { - var skip int = t - (transformOmitFirst1 - 1) - word = word[skip:] - len -= skip - } - - for i < len { - dst[idx] = word[i] - idx++ - i++ - } - if t == transformUppercaseFirst { - toUpperCase(dst[idx-len:]) - } else if t == transformUppercaseAll { - var uppercase []byte = dst - uppercase = uppercase[idx-len:] - for len > 0 { - var step int = toUpperCase(uppercase) - uppercase = uppercase[step:] - len -= step - } - } else if t == transformShiftFirst { - var param uint16 = uint16(trans.params[transform_idx*2]) + uint16(trans.params[transform_idx*2+1])<<8 - shiftTransform(dst[idx-len:], int(len), param) - } else if t == transformShiftAll { - var param uint16 = uint16(trans.params[transform_idx*2]) + uint16(trans.params[transform_idx*2+1])<<8 - var shift []byte = dst - shift = shift[idx-len:] - for len > 0 { - var step int = shiftTransform(shift, int(len), param) - shift = shift[step:] - len -= step - } - } - } - { - var suffix_len int = int(suffix[0]) - suffix = suffix[1:] - for { - tmp2 := suffix_len - suffix_len-- - if tmp2 == 0 { - break - } - dst[idx] = suffix[0] - idx++ - suffix = suffix[1:] - } - return idx - } -} diff --git a/vendor/github.com/andybalholm/brotli/utf8_util.go b/vendor/github.com/andybalholm/brotli/utf8_util.go deleted file mode 100644 index f86de3d2091..00000000000 --- a/vendor/github.com/andybalholm/brotli/utf8_util.go +++ /dev/null @@ -1,71 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Heuristics for deciding about the UTF8-ness of strings. */ - -const kMinUTF8Ratio float64 = 0.75 - -/* Returns 1 if at least min_fraction of the bytes between pos and - pos + length in the (data, mask) ring-buffer is UTF8-encoded, otherwise - returns 0. */ -func parseAsUTF8(symbol *int, input []byte, size uint) uint { - /* ASCII */ - if input[0]&0x80 == 0 { - *symbol = int(input[0]) - if *symbol > 0 { - return 1 - } - } - - /* 2-byte UTF8 */ - if size > 1 && input[0]&0xE0 == 0xC0 && input[1]&0xC0 == 0x80 { - *symbol = (int(input[0])&0x1F)<<6 | int(input[1])&0x3F - if *symbol > 0x7F { - return 2 - } - } - - /* 3-byte UFT8 */ - if size > 2 && input[0]&0xF0 == 0xE0 && input[1]&0xC0 == 0x80 && input[2]&0xC0 == 0x80 { - *symbol = (int(input[0])&0x0F)<<12 | (int(input[1])&0x3F)<<6 | int(input[2])&0x3F - if *symbol > 0x7FF { - return 3 - } - } - - /* 4-byte UFT8 */ - if size > 3 && input[0]&0xF8 == 0xF0 && input[1]&0xC0 == 0x80 && input[2]&0xC0 == 0x80 && input[3]&0xC0 == 0x80 { - *symbol = (int(input[0])&0x07)<<18 | (int(input[1])&0x3F)<<12 | (int(input[2])&0x3F)<<6 | int(input[3])&0x3F - if *symbol > 0xFFFF && *symbol <= 0x10FFFF { - return 4 - } - } - - /* Not UTF8, emit a special symbol above the UTF8-code space */ - *symbol = 0x110000 | int(input[0]) - - return 1 -} - -/* Returns 1 if at least min_fraction of the data is UTF8-encoded.*/ -func isMostlyUTF8(data []byte, pos uint, mask uint, length uint, min_fraction float64) bool { - var size_utf8 uint = 0 - var i uint = 0 - for i < length { - var symbol int - var current_data []byte - current_data = data[(pos+i)&mask:] - var bytes_read uint = parseAsUTF8(&symbol, current_data, length-i) - i += bytes_read - if symbol < 0x110000 { - size_utf8 += bytes_read - } - } - - return float64(size_utf8) > min_fraction*float64(length) -} diff --git a/vendor/github.com/andybalholm/brotli/util.go b/vendor/github.com/andybalholm/brotli/util.go deleted file mode 100644 index a84553a6396..00000000000 --- a/vendor/github.com/andybalholm/brotli/util.go +++ /dev/null @@ -1,7 +0,0 @@ -package brotli - -func assert(cond bool) { - if !cond { - panic("assertion failure") - } -} diff --git a/vendor/github.com/andybalholm/brotli/write_bits.go b/vendor/github.com/andybalholm/brotli/write_bits.go deleted file mode 100644 index 8f15c20257f..00000000000 --- a/vendor/github.com/andybalholm/brotli/write_bits.go +++ /dev/null @@ -1,56 +0,0 @@ -package brotli - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Write bits into a byte array. */ - -/* This function writes bits into bytes in increasing addresses, and within - a byte least-significant-bit first. - - The function can write up to 56 bits in one go with WriteBits - Example: let's assume that 3 bits (Rs below) have been written already: - - BYTE-0 BYTE+1 BYTE+2 - - 0000 0RRR 0000 0000 0000 0000 - - Now, we could write 5 or less bits in MSB by just sifting by 3 - and OR'ing to BYTE-0. - - For n bits, we take the last 5 bits, OR that with high bits in BYTE-0, - and locate the rest in BYTE+1, BYTE+2, etc. */ -func writeBits(n_bits uint, bits uint64, pos *uint, array []byte) { - var array_pos []byte = array[*pos>>3:] - var bits_reserved_in_first_byte uint = (*pos & 7) - /* implicit & 0xFF is assumed for uint8_t arithmetics */ - - var bits_left_to_write uint - bits <<= bits_reserved_in_first_byte - array_pos[0] |= byte(bits) - array_pos = array_pos[1:] - for bits_left_to_write = n_bits + bits_reserved_in_first_byte; bits_left_to_write >= 9; bits_left_to_write -= 8 { - bits >>= 8 - array_pos[0] = byte(bits) - array_pos = array_pos[1:] - } - - array_pos[0] = 0 - *pos += n_bits -} - -func writeSingleBit(bit bool, pos *uint, array []byte) { - if bit { - writeBits(1, 1, pos, array) - } else { - writeBits(1, 0, pos, array) - } -} - -func writeBitsPrepareStorage(pos uint, array []byte) { - assert(pos&7 == 0) - array[pos>>3] = 0 -} diff --git a/vendor/github.com/andybalholm/brotli/writer.go b/vendor/github.com/andybalholm/brotli/writer.go deleted file mode 100644 index ec333f9cff6..00000000000 --- a/vendor/github.com/andybalholm/brotli/writer.go +++ /dev/null @@ -1,123 +0,0 @@ -package brotli - -import ( - "errors" - "io" -) - -const ( - BestSpeed = 0 - BestCompression = 11 - DefaultCompression = 6 -) - -// WriterOptions configures Writer. -type WriterOptions struct { - // Quality controls the compression-speed vs compression-density trade-offs. - // The higher the quality, the slower the compression. Range is 0 to 11. - Quality int - // LGWin is the base 2 logarithm of the sliding window size. - // Range is 10 to 24. 0 indicates automatic configuration based on Quality. - LGWin int -} - -var ( - errEncode = errors.New("brotli: encode error") - errWriterClosed = errors.New("brotli: Writer is closed") -) - -// Writes to the returned writer are compressed and written to dst. -// It is the caller's responsibility to call Close on the Writer when done. -// Writes may be buffered and not flushed until Close. -func NewWriter(dst io.Writer) *Writer { - return NewWriterLevel(dst, DefaultCompression) -} - -// NewWriterLevel is like NewWriter but specifies the compression level instead -// of assuming DefaultCompression. -// The compression level can be DefaultCompression or any integer value between -// BestSpeed and BestCompression inclusive. -func NewWriterLevel(dst io.Writer, level int) *Writer { - return NewWriterOptions(dst, WriterOptions{ - Quality: level, - }) -} - -// NewWriterOptions is like NewWriter but specifies WriterOptions -func NewWriterOptions(dst io.Writer, options WriterOptions) *Writer { - w := new(Writer) - w.options = options - w.Reset(dst) - return w -} - -// Reset discards the Writer's state and makes it equivalent to the result of -// its original state from NewWriter or NewWriterLevel, but writing to dst -// instead. This permits reusing a Writer rather than allocating a new one. -func (w *Writer) Reset(dst io.Writer) { - encoderInitState(w) - w.params.quality = w.options.Quality - if w.options.LGWin > 0 { - w.params.lgwin = uint(w.options.LGWin) - } - w.dst = dst -} - -func (w *Writer) writeChunk(p []byte, op int) (n int, err error) { - if w.dst == nil { - return 0, errWriterClosed - } - - for { - availableIn := uint(len(p)) - nextIn := p - success := encoderCompressStream(w, op, &availableIn, &nextIn) - bytesConsumed := len(p) - int(availableIn) - p = p[bytesConsumed:] - n += bytesConsumed - if !success { - return n, errEncode - } - - outputData := encoderTakeOutput(w) - - if len(outputData) > 0 { - _, err = w.dst.Write(outputData) - if err != nil { - return n, err - } - } - if len(p) == 0 { - return n, nil - } - } -} - -// Flush outputs encoded data for all input provided to Write. The resulting -// output can be decoded to match all input before Flush, but the stream is -// not yet complete until after Close. -// Flush has a negative impact on compression. -func (w *Writer) Flush() error { - _, err := w.writeChunk(nil, operationFlush) - return err -} - -// Close flushes remaining data to the decorated writer. -func (w *Writer) Close() error { - // If stream is already closed, it is reported by `writeChunk`. - _, err := w.writeChunk(nil, operationFinish) - w.dst = nil - return err -} - -// Write implements io.Writer. Flush or Close must be called to ensure that the -// encoded bytes are actually flushed to the underlying Writer. -func (w *Writer) Write(p []byte) (n int, err error) { - return w.writeChunk(p, operationProcess) -} - -type nopCloser struct { - io.Writer -} - -func (nopCloser) Close() error { return nil } diff --git a/vendor/github.com/armon/go-metrics/.gitignore b/vendor/github.com/armon/go-metrics/.gitignore deleted file mode 100644 index 8c03ec112a4..00000000000 --- a/vendor/github.com/armon/go-metrics/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe - -/metrics.out diff --git a/vendor/github.com/armon/go-metrics/LICENSE b/vendor/github.com/armon/go-metrics/LICENSE deleted file mode 100644 index 106569e542b..00000000000 --- a/vendor/github.com/armon/go-metrics/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 Armon Dadgar - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/armon/go-metrics/README.md b/vendor/github.com/armon/go-metrics/README.md deleted file mode 100644 index aa73348c08d..00000000000 --- a/vendor/github.com/armon/go-metrics/README.md +++ /dev/null @@ -1,91 +0,0 @@ -go-metrics -========== - -This library provides a `metrics` package which can be used to instrument code, -expose application metrics, and profile runtime performance in a flexible manner. - -Current API: [![GoDoc](https://godoc.org/github.com/armon/go-metrics?status.svg)](https://godoc.org/github.com/armon/go-metrics) - -Sinks ------ - -The `metrics` package makes use of a `MetricSink` interface to support delivery -to any type of backend. Currently the following sinks are provided: - -* StatsiteSink : Sinks to a [statsite](https://github.com/armon/statsite/) instance (TCP) -* StatsdSink: Sinks to a [StatsD](https://github.com/etsy/statsd/) / statsite instance (UDP) -* PrometheusSink: Sinks to a [Prometheus](http://prometheus.io/) metrics endpoint (exposed via HTTP for scrapes) -* InmemSink : Provides in-memory aggregation, can be used to export stats -* FanoutSink : Sinks to multiple sinks. Enables writing to multiple statsite instances for example. -* BlackholeSink : Sinks to nowhere - -In addition to the sinks, the `InmemSignal` can be used to catch a signal, -and dump a formatted output of recent metrics. For example, when a process gets -a SIGUSR1, it can dump to stderr recent performance metrics for debugging. - -Labels ------- - -Most metrics do have an equivalent ending with `WithLabels`, such methods -allow to push metrics with labels and use some features of underlying Sinks -(ex: translated into Prometheus labels). - -Since some of these labels may increase greatly cardinality of metrics, the -library allow to filter labels using a blacklist/whitelist filtering system -which is global to all metrics. - -* If `Config.AllowedLabels` is not nil, then only labels specified in this value will be sent to underlying Sink, otherwise, all labels are sent by default. -* If `Config.BlockedLabels` is not nil, any label specified in this value will not be sent to underlying Sinks. - -By default, both `Config.AllowedLabels` and `Config.BlockedLabels` are nil, meaning that -no tags are filetered at all, but it allow to a user to globally block some tags with high -cardinality at application level. - -Examples --------- - -Here is an example of using the package: - -```go -func SlowMethod() { - // Profiling the runtime of a method - defer metrics.MeasureSince([]string{"SlowMethod"}, time.Now()) -} - -// Configure a statsite sink as the global metrics sink -sink, _ := metrics.NewStatsiteSink("statsite:8125") -metrics.NewGlobal(metrics.DefaultConfig("service-name"), sink) - -// Emit a Key/Value pair -metrics.EmitKey([]string{"questions", "meaning of life"}, 42) -``` - -Here is an example of setting up a signal handler: - -```go -// Setup the inmem sink and signal handler -inm := metrics.NewInmemSink(10*time.Second, time.Minute) -sig := metrics.DefaultInmemSignal(inm) -metrics.NewGlobal(metrics.DefaultConfig("service-name"), inm) - -// Run some code -inm.SetGauge([]string{"foo"}, 42) -inm.EmitKey([]string{"bar"}, 30) - -inm.IncrCounter([]string{"baz"}, 42) -inm.IncrCounter([]string{"baz"}, 1) -inm.IncrCounter([]string{"baz"}, 80) - -inm.AddSample([]string{"method", "wow"}, 42) -inm.AddSample([]string{"method", "wow"}, 100) -inm.AddSample([]string{"method", "wow"}, 22) - -.... -``` - -When a signal comes in, output like the following will be dumped to stderr: - - [2014-01-28 14:57:33.04 -0800 PST][G] 'foo': 42.000 - [2014-01-28 14:57:33.04 -0800 PST][P] 'bar': 30.000 - [2014-01-28 14:57:33.04 -0800 PST][C] 'baz': Count: 3 Min: 1.000 Mean: 41.000 Max: 80.000 Stddev: 39.509 - [2014-01-28 14:57:33.04 -0800 PST][S] 'method.wow': Count: 3 Min: 22.000 Mean: 54.667 Max: 100.000 Stddev: 40.513 \ No newline at end of file diff --git a/vendor/github.com/armon/go-metrics/const_unix.go b/vendor/github.com/armon/go-metrics/const_unix.go deleted file mode 100644 index 31098dd57e5..00000000000 --- a/vendor/github.com/armon/go-metrics/const_unix.go +++ /dev/null @@ -1,12 +0,0 @@ -// +build !windows - -package metrics - -import ( - "syscall" -) - -const ( - // DefaultSignal is used with DefaultInmemSignal - DefaultSignal = syscall.SIGUSR1 -) diff --git a/vendor/github.com/armon/go-metrics/const_windows.go b/vendor/github.com/armon/go-metrics/const_windows.go deleted file mode 100644 index 38136af3e42..00000000000 --- a/vendor/github.com/armon/go-metrics/const_windows.go +++ /dev/null @@ -1,13 +0,0 @@ -// +build windows - -package metrics - -import ( - "syscall" -) - -const ( - // DefaultSignal is used with DefaultInmemSignal - // Windows has no SIGUSR1, use SIGBREAK - DefaultSignal = syscall.Signal(21) -) diff --git a/vendor/github.com/armon/go-metrics/inmem.go b/vendor/github.com/armon/go-metrics/inmem.go deleted file mode 100644 index 4e2d6a709e2..00000000000 --- a/vendor/github.com/armon/go-metrics/inmem.go +++ /dev/null @@ -1,348 +0,0 @@ -package metrics - -import ( - "bytes" - "fmt" - "math" - "net/url" - "strings" - "sync" - "time" -) - -// InmemSink provides a MetricSink that does in-memory aggregation -// without sending metrics over a network. It can be embedded within -// an application to provide profiling information. -type InmemSink struct { - // How long is each aggregation interval - interval time.Duration - - // Retain controls how many metrics interval we keep - retain time.Duration - - // maxIntervals is the maximum length of intervals. - // It is retain / interval. - maxIntervals int - - // intervals is a slice of the retained intervals - intervals []*IntervalMetrics - intervalLock sync.RWMutex - - rateDenom float64 -} - -// IntervalMetrics stores the aggregated metrics -// for a specific interval -type IntervalMetrics struct { - sync.RWMutex - - // The start time of the interval - Interval time.Time - - // Gauges maps the key to the last set value - Gauges map[string]GaugeValue - - // Points maps the string to the list of emitted values - // from EmitKey - Points map[string][]float32 - - // Counters maps the string key to a sum of the counter - // values - Counters map[string]SampledValue - - // Samples maps the key to an AggregateSample, - // which has the rolled up view of a sample - Samples map[string]SampledValue -} - -// NewIntervalMetrics creates a new IntervalMetrics for a given interval -func NewIntervalMetrics(intv time.Time) *IntervalMetrics { - return &IntervalMetrics{ - Interval: intv, - Gauges: make(map[string]GaugeValue), - Points: make(map[string][]float32), - Counters: make(map[string]SampledValue), - Samples: make(map[string]SampledValue), - } -} - -// AggregateSample is used to hold aggregate metrics -// about a sample -type AggregateSample struct { - Count int // The count of emitted pairs - Rate float64 // The values rate per time unit (usually 1 second) - Sum float64 // The sum of values - SumSq float64 `json:"-"` // The sum of squared values - Min float64 // Minimum value - Max float64 // Maximum value - LastUpdated time.Time `json:"-"` // When value was last updated -} - -// Computes a Stddev of the values -func (a *AggregateSample) Stddev() float64 { - num := (float64(a.Count) * a.SumSq) - math.Pow(a.Sum, 2) - div := float64(a.Count * (a.Count - 1)) - if div == 0 { - return 0 - } - return math.Sqrt(num / div) -} - -// Computes a mean of the values -func (a *AggregateSample) Mean() float64 { - if a.Count == 0 { - return 0 - } - return a.Sum / float64(a.Count) -} - -// Ingest is used to update a sample -func (a *AggregateSample) Ingest(v float64, rateDenom float64) { - a.Count++ - a.Sum += v - a.SumSq += (v * v) - if v < a.Min || a.Count == 1 { - a.Min = v - } - if v > a.Max || a.Count == 1 { - a.Max = v - } - a.Rate = float64(a.Sum) / rateDenom - a.LastUpdated = time.Now() -} - -func (a *AggregateSample) String() string { - if a.Count == 0 { - return "Count: 0" - } else if a.Stddev() == 0 { - return fmt.Sprintf("Count: %d Sum: %0.3f LastUpdated: %s", a.Count, a.Sum, a.LastUpdated) - } else { - return fmt.Sprintf("Count: %d Min: %0.3f Mean: %0.3f Max: %0.3f Stddev: %0.3f Sum: %0.3f LastUpdated: %s", - a.Count, a.Min, a.Mean(), a.Max, a.Stddev(), a.Sum, a.LastUpdated) - } -} - -// NewInmemSinkFromURL creates an InmemSink from a URL. It is used -// (and tested) from NewMetricSinkFromURL. -func NewInmemSinkFromURL(u *url.URL) (MetricSink, error) { - params := u.Query() - - interval, err := time.ParseDuration(params.Get("interval")) - if err != nil { - return nil, fmt.Errorf("Bad 'interval' param: %s", err) - } - - retain, err := time.ParseDuration(params.Get("retain")) - if err != nil { - return nil, fmt.Errorf("Bad 'retain' param: %s", err) - } - - return NewInmemSink(interval, retain), nil -} - -// NewInmemSink is used to construct a new in-memory sink. -// Uses an aggregation interval and maximum retention period. -func NewInmemSink(interval, retain time.Duration) *InmemSink { - rateTimeUnit := time.Second - i := &InmemSink{ - interval: interval, - retain: retain, - maxIntervals: int(retain / interval), - rateDenom: float64(interval.Nanoseconds()) / float64(rateTimeUnit.Nanoseconds()), - } - i.intervals = make([]*IntervalMetrics, 0, i.maxIntervals) - return i -} - -func (i *InmemSink) SetGauge(key []string, val float32) { - i.SetGaugeWithLabels(key, val, nil) -} - -func (i *InmemSink) SetGaugeWithLabels(key []string, val float32, labels []Label) { - k, name := i.flattenKeyLabels(key, labels) - intv := i.getInterval() - - intv.Lock() - defer intv.Unlock() - intv.Gauges[k] = GaugeValue{Name: name, Value: val, Labels: labels} -} - -func (i *InmemSink) EmitKey(key []string, val float32) { - k := i.flattenKey(key) - intv := i.getInterval() - - intv.Lock() - defer intv.Unlock() - vals := intv.Points[k] - intv.Points[k] = append(vals, val) -} - -func (i *InmemSink) IncrCounter(key []string, val float32) { - i.IncrCounterWithLabels(key, val, nil) -} - -func (i *InmemSink) IncrCounterWithLabels(key []string, val float32, labels []Label) { - k, name := i.flattenKeyLabels(key, labels) - intv := i.getInterval() - - intv.Lock() - defer intv.Unlock() - - agg, ok := intv.Counters[k] - if !ok { - agg = SampledValue{ - Name: name, - AggregateSample: &AggregateSample{}, - Labels: labels, - } - intv.Counters[k] = agg - } - agg.Ingest(float64(val), i.rateDenom) -} - -func (i *InmemSink) AddSample(key []string, val float32) { - i.AddSampleWithLabels(key, val, nil) -} - -func (i *InmemSink) AddSampleWithLabels(key []string, val float32, labels []Label) { - k, name := i.flattenKeyLabels(key, labels) - intv := i.getInterval() - - intv.Lock() - defer intv.Unlock() - - agg, ok := intv.Samples[k] - if !ok { - agg = SampledValue{ - Name: name, - AggregateSample: &AggregateSample{}, - Labels: labels, - } - intv.Samples[k] = agg - } - agg.Ingest(float64(val), i.rateDenom) -} - -// Data is used to retrieve all the aggregated metrics -// Intervals may be in use, and a read lock should be acquired -func (i *InmemSink) Data() []*IntervalMetrics { - // Get the current interval, forces creation - i.getInterval() - - i.intervalLock.RLock() - defer i.intervalLock.RUnlock() - - n := len(i.intervals) - intervals := make([]*IntervalMetrics, n) - - copy(intervals[:n-1], i.intervals[:n-1]) - current := i.intervals[n-1] - - // make its own copy for current interval - intervals[n-1] = &IntervalMetrics{} - copyCurrent := intervals[n-1] - current.RLock() - *copyCurrent = *current - - copyCurrent.Gauges = make(map[string]GaugeValue, len(current.Gauges)) - for k, v := range current.Gauges { - copyCurrent.Gauges[k] = v - } - // saved values will be not change, just copy its link - copyCurrent.Points = make(map[string][]float32, len(current.Points)) - for k, v := range current.Points { - copyCurrent.Points[k] = v - } - copyCurrent.Counters = make(map[string]SampledValue, len(current.Counters)) - for k, v := range current.Counters { - copyCurrent.Counters[k] = v - } - copyCurrent.Samples = make(map[string]SampledValue, len(current.Samples)) - for k, v := range current.Samples { - copyCurrent.Samples[k] = v - } - current.RUnlock() - - return intervals -} - -func (i *InmemSink) getExistingInterval(intv time.Time) *IntervalMetrics { - i.intervalLock.RLock() - defer i.intervalLock.RUnlock() - - n := len(i.intervals) - if n > 0 && i.intervals[n-1].Interval == intv { - return i.intervals[n-1] - } - return nil -} - -func (i *InmemSink) createInterval(intv time.Time) *IntervalMetrics { - i.intervalLock.Lock() - defer i.intervalLock.Unlock() - - // Check for an existing interval - n := len(i.intervals) - if n > 0 && i.intervals[n-1].Interval == intv { - return i.intervals[n-1] - } - - // Add the current interval - current := NewIntervalMetrics(intv) - i.intervals = append(i.intervals, current) - n++ - - // Truncate the intervals if they are too long - if n >= i.maxIntervals { - copy(i.intervals[0:], i.intervals[n-i.maxIntervals:]) - i.intervals = i.intervals[:i.maxIntervals] - } - return current -} - -// getInterval returns the current interval to write to -func (i *InmemSink) getInterval() *IntervalMetrics { - intv := time.Now().Truncate(i.interval) - if m := i.getExistingInterval(intv); m != nil { - return m - } - return i.createInterval(intv) -} - -// Flattens the key for formatting, removes spaces -func (i *InmemSink) flattenKey(parts []string) string { - buf := &bytes.Buffer{} - replacer := strings.NewReplacer(" ", "_") - - if len(parts) > 0 { - replacer.WriteString(buf, parts[0]) - } - for _, part := range parts[1:] { - replacer.WriteString(buf, ".") - replacer.WriteString(buf, part) - } - - return buf.String() -} - -// Flattens the key for formatting along with its labels, removes spaces -func (i *InmemSink) flattenKeyLabels(parts []string, labels []Label) (string, string) { - buf := &bytes.Buffer{} - replacer := strings.NewReplacer(" ", "_") - - if len(parts) > 0 { - replacer.WriteString(buf, parts[0]) - } - for _, part := range parts[1:] { - replacer.WriteString(buf, ".") - replacer.WriteString(buf, part) - } - - key := buf.String() - - for _, label := range labels { - replacer.WriteString(buf, fmt.Sprintf(";%s=%s", label.Name, label.Value)) - } - - return buf.String(), key -} diff --git a/vendor/github.com/armon/go-metrics/inmem_endpoint.go b/vendor/github.com/armon/go-metrics/inmem_endpoint.go deleted file mode 100644 index 504f1b37485..00000000000 --- a/vendor/github.com/armon/go-metrics/inmem_endpoint.go +++ /dev/null @@ -1,118 +0,0 @@ -package metrics - -import ( - "fmt" - "net/http" - "sort" - "time" -) - -// MetricsSummary holds a roll-up of metrics info for a given interval -type MetricsSummary struct { - Timestamp string - Gauges []GaugeValue - Points []PointValue - Counters []SampledValue - Samples []SampledValue -} - -type GaugeValue struct { - Name string - Hash string `json:"-"` - Value float32 - - Labels []Label `json:"-"` - DisplayLabels map[string]string `json:"Labels"` -} - -type PointValue struct { - Name string - Points []float32 -} - -type SampledValue struct { - Name string - Hash string `json:"-"` - *AggregateSample - Mean float64 - Stddev float64 - - Labels []Label `json:"-"` - DisplayLabels map[string]string `json:"Labels"` -} - -// DisplayMetrics returns a summary of the metrics from the most recent finished interval. -func (i *InmemSink) DisplayMetrics(resp http.ResponseWriter, req *http.Request) (interface{}, error) { - data := i.Data() - - var interval *IntervalMetrics - n := len(data) - switch { - case n == 0: - return nil, fmt.Errorf("no metric intervals have been initialized yet") - case n == 1: - // Show the current interval if it's all we have - interval = i.intervals[0] - default: - // Show the most recent finished interval if we have one - interval = i.intervals[n-2] - } - - summary := MetricsSummary{ - Timestamp: interval.Interval.Round(time.Second).UTC().String(), - Gauges: make([]GaugeValue, 0, len(interval.Gauges)), - Points: make([]PointValue, 0, len(interval.Points)), - } - - // Format and sort the output of each metric type, so it gets displayed in a - // deterministic order. - for name, points := range interval.Points { - summary.Points = append(summary.Points, PointValue{name, points}) - } - sort.Slice(summary.Points, func(i, j int) bool { - return summary.Points[i].Name < summary.Points[j].Name - }) - - for hash, value := range interval.Gauges { - value.Hash = hash - value.DisplayLabels = make(map[string]string) - for _, label := range value.Labels { - value.DisplayLabels[label.Name] = label.Value - } - value.Labels = nil - - summary.Gauges = append(summary.Gauges, value) - } - sort.Slice(summary.Gauges, func(i, j int) bool { - return summary.Gauges[i].Hash < summary.Gauges[j].Hash - }) - - summary.Counters = formatSamples(interval.Counters) - summary.Samples = formatSamples(interval.Samples) - - return summary, nil -} - -func formatSamples(source map[string]SampledValue) []SampledValue { - output := make([]SampledValue, 0, len(source)) - for hash, sample := range source { - displayLabels := make(map[string]string) - for _, label := range sample.Labels { - displayLabels[label.Name] = label.Value - } - - output = append(output, SampledValue{ - Name: sample.Name, - Hash: hash, - AggregateSample: sample.AggregateSample, - Mean: sample.AggregateSample.Mean(), - Stddev: sample.AggregateSample.Stddev(), - DisplayLabels: displayLabels, - }) - } - sort.Slice(output, func(i, j int) bool { - return output[i].Hash < output[j].Hash - }) - - return output -} diff --git a/vendor/github.com/armon/go-metrics/inmem_signal.go b/vendor/github.com/armon/go-metrics/inmem_signal.go deleted file mode 100644 index 0937f4aedf7..00000000000 --- a/vendor/github.com/armon/go-metrics/inmem_signal.go +++ /dev/null @@ -1,117 +0,0 @@ -package metrics - -import ( - "bytes" - "fmt" - "io" - "os" - "os/signal" - "strings" - "sync" - "syscall" -) - -// InmemSignal is used to listen for a given signal, and when received, -// to dump the current metrics from the InmemSink to an io.Writer -type InmemSignal struct { - signal syscall.Signal - inm *InmemSink - w io.Writer - sigCh chan os.Signal - - stop bool - stopCh chan struct{} - stopLock sync.Mutex -} - -// NewInmemSignal creates a new InmemSignal which listens for a given signal, -// and dumps the current metrics out to a writer -func NewInmemSignal(inmem *InmemSink, sig syscall.Signal, w io.Writer) *InmemSignal { - i := &InmemSignal{ - signal: sig, - inm: inmem, - w: w, - sigCh: make(chan os.Signal, 1), - stopCh: make(chan struct{}), - } - signal.Notify(i.sigCh, sig) - go i.run() - return i -} - -// DefaultInmemSignal returns a new InmemSignal that responds to SIGUSR1 -// and writes output to stderr. Windows uses SIGBREAK -func DefaultInmemSignal(inmem *InmemSink) *InmemSignal { - return NewInmemSignal(inmem, DefaultSignal, os.Stderr) -} - -// Stop is used to stop the InmemSignal from listening -func (i *InmemSignal) Stop() { - i.stopLock.Lock() - defer i.stopLock.Unlock() - - if i.stop { - return - } - i.stop = true - close(i.stopCh) - signal.Stop(i.sigCh) -} - -// run is a long running routine that handles signals -func (i *InmemSignal) run() { - for { - select { - case <-i.sigCh: - i.dumpStats() - case <-i.stopCh: - return - } - } -} - -// dumpStats is used to dump the data to output writer -func (i *InmemSignal) dumpStats() { - buf := bytes.NewBuffer(nil) - - data := i.inm.Data() - // Skip the last period which is still being aggregated - for j := 0; j < len(data)-1; j++ { - intv := data[j] - intv.RLock() - for _, val := range intv.Gauges { - name := i.flattenLabels(val.Name, val.Labels) - fmt.Fprintf(buf, "[%v][G] '%s': %0.3f\n", intv.Interval, name, val.Value) - } - for name, vals := range intv.Points { - for _, val := range vals { - fmt.Fprintf(buf, "[%v][P] '%s': %0.3f\n", intv.Interval, name, val) - } - } - for _, agg := range intv.Counters { - name := i.flattenLabels(agg.Name, agg.Labels) - fmt.Fprintf(buf, "[%v][C] '%s': %s\n", intv.Interval, name, agg.AggregateSample) - } - for _, agg := range intv.Samples { - name := i.flattenLabels(agg.Name, agg.Labels) - fmt.Fprintf(buf, "[%v][S] '%s': %s\n", intv.Interval, name, agg.AggregateSample) - } - intv.RUnlock() - } - - // Write out the bytes - i.w.Write(buf.Bytes()) -} - -// Flattens the key for formatting along with its labels, removes spaces -func (i *InmemSignal) flattenLabels(name string, labels []Label) string { - buf := bytes.NewBufferString(name) - replacer := strings.NewReplacer(" ", "_", ":", "_") - - for _, label := range labels { - replacer.WriteString(buf, ".") - replacer.WriteString(buf, label.Value) - } - - return buf.String() -} diff --git a/vendor/github.com/armon/go-metrics/metrics.go b/vendor/github.com/armon/go-metrics/metrics.go deleted file mode 100644 index cf9def748e2..00000000000 --- a/vendor/github.com/armon/go-metrics/metrics.go +++ /dev/null @@ -1,278 +0,0 @@ -package metrics - -import ( - "runtime" - "strings" - "time" - - "github.com/hashicorp/go-immutable-radix" -) - -type Label struct { - Name string - Value string -} - -func (m *Metrics) SetGauge(key []string, val float32) { - m.SetGaugeWithLabels(key, val, nil) -} - -func (m *Metrics) SetGaugeWithLabels(key []string, val float32, labels []Label) { - if m.HostName != "" { - if m.EnableHostnameLabel { - labels = append(labels, Label{"host", m.HostName}) - } else if m.EnableHostname { - key = insert(0, m.HostName, key) - } - } - if m.EnableTypePrefix { - key = insert(0, "gauge", key) - } - if m.ServiceName != "" { - if m.EnableServiceLabel { - labels = append(labels, Label{"service", m.ServiceName}) - } else { - key = insert(0, m.ServiceName, key) - } - } - allowed, labelsFiltered := m.allowMetric(key, labels) - if !allowed { - return - } - m.sink.SetGaugeWithLabels(key, val, labelsFiltered) -} - -func (m *Metrics) EmitKey(key []string, val float32) { - if m.EnableTypePrefix { - key = insert(0, "kv", key) - } - if m.ServiceName != "" { - key = insert(0, m.ServiceName, key) - } - allowed, _ := m.allowMetric(key, nil) - if !allowed { - return - } - m.sink.EmitKey(key, val) -} - -func (m *Metrics) IncrCounter(key []string, val float32) { - m.IncrCounterWithLabels(key, val, nil) -} - -func (m *Metrics) IncrCounterWithLabels(key []string, val float32, labels []Label) { - if m.HostName != "" && m.EnableHostnameLabel { - labels = append(labels, Label{"host", m.HostName}) - } - if m.EnableTypePrefix { - key = insert(0, "counter", key) - } - if m.ServiceName != "" { - if m.EnableServiceLabel { - labels = append(labels, Label{"service", m.ServiceName}) - } else { - key = insert(0, m.ServiceName, key) - } - } - allowed, labelsFiltered := m.allowMetric(key, labels) - if !allowed { - return - } - m.sink.IncrCounterWithLabels(key, val, labelsFiltered) -} - -func (m *Metrics) AddSample(key []string, val float32) { - m.AddSampleWithLabels(key, val, nil) -} - -func (m *Metrics) AddSampleWithLabels(key []string, val float32, labels []Label) { - if m.HostName != "" && m.EnableHostnameLabel { - labels = append(labels, Label{"host", m.HostName}) - } - if m.EnableTypePrefix { - key = insert(0, "sample", key) - } - if m.ServiceName != "" { - if m.EnableServiceLabel { - labels = append(labels, Label{"service", m.ServiceName}) - } else { - key = insert(0, m.ServiceName, key) - } - } - allowed, labelsFiltered := m.allowMetric(key, labels) - if !allowed { - return - } - m.sink.AddSampleWithLabels(key, val, labelsFiltered) -} - -func (m *Metrics) MeasureSince(key []string, start time.Time) { - m.MeasureSinceWithLabels(key, start, nil) -} - -func (m *Metrics) MeasureSinceWithLabels(key []string, start time.Time, labels []Label) { - if m.HostName != "" && m.EnableHostnameLabel { - labels = append(labels, Label{"host", m.HostName}) - } - if m.EnableTypePrefix { - key = insert(0, "timer", key) - } - if m.ServiceName != "" { - if m.EnableServiceLabel { - labels = append(labels, Label{"service", m.ServiceName}) - } else { - key = insert(0, m.ServiceName, key) - } - } - allowed, labelsFiltered := m.allowMetric(key, labels) - if !allowed { - return - } - now := time.Now() - elapsed := now.Sub(start) - msec := float32(elapsed.Nanoseconds()) / float32(m.TimerGranularity) - m.sink.AddSampleWithLabels(key, msec, labelsFiltered) -} - -// UpdateFilter overwrites the existing filter with the given rules. -func (m *Metrics) UpdateFilter(allow, block []string) { - m.UpdateFilterAndLabels(allow, block, m.AllowedLabels, m.BlockedLabels) -} - -// UpdateFilterAndLabels overwrites the existing filter with the given rules. -func (m *Metrics) UpdateFilterAndLabels(allow, block, allowedLabels, blockedLabels []string) { - m.filterLock.Lock() - defer m.filterLock.Unlock() - - m.AllowedPrefixes = allow - m.BlockedPrefixes = block - - if allowedLabels == nil { - // Having a white list means we take only elements from it - m.allowedLabels = nil - } else { - m.allowedLabels = make(map[string]bool) - for _, v := range allowedLabels { - m.allowedLabels[v] = true - } - } - m.blockedLabels = make(map[string]bool) - for _, v := range blockedLabels { - m.blockedLabels[v] = true - } - m.AllowedLabels = allowedLabels - m.BlockedLabels = blockedLabels - - m.filter = iradix.New() - for _, prefix := range m.AllowedPrefixes { - m.filter, _, _ = m.filter.Insert([]byte(prefix), true) - } - for _, prefix := range m.BlockedPrefixes { - m.filter, _, _ = m.filter.Insert([]byte(prefix), false) - } -} - -// labelIsAllowed return true if a should be included in metric -// the caller should lock m.filterLock while calling this method -func (m *Metrics) labelIsAllowed(label *Label) bool { - labelName := (*label).Name - if m.blockedLabels != nil { - _, ok := m.blockedLabels[labelName] - if ok { - // If present, let's remove this label - return false - } - } - if m.allowedLabels != nil { - _, ok := m.allowedLabels[labelName] - return ok - } - // Allow by default - return true -} - -// filterLabels return only allowed labels -// the caller should lock m.filterLock while calling this method -func (m *Metrics) filterLabels(labels []Label) []Label { - if labels == nil { - return nil - } - toReturn := labels[:0] - for _, label := range labels { - if m.labelIsAllowed(&label) { - toReturn = append(toReturn, label) - } - } - return toReturn -} - -// Returns whether the metric should be allowed based on configured prefix filters -// Also return the applicable labels -func (m *Metrics) allowMetric(key []string, labels []Label) (bool, []Label) { - m.filterLock.RLock() - defer m.filterLock.RUnlock() - - if m.filter == nil || m.filter.Len() == 0 { - return m.Config.FilterDefault, m.filterLabels(labels) - } - - _, allowed, ok := m.filter.Root().LongestPrefix([]byte(strings.Join(key, "."))) - if !ok { - return m.Config.FilterDefault, m.filterLabels(labels) - } - - return allowed.(bool), m.filterLabels(labels) -} - -// Periodically collects runtime stats to publish -func (m *Metrics) collectStats() { - for { - time.Sleep(m.ProfileInterval) - m.emitRuntimeStats() - } -} - -// Emits various runtime statsitics -func (m *Metrics) emitRuntimeStats() { - // Export number of Goroutines - numRoutines := runtime.NumGoroutine() - m.SetGauge([]string{"runtime", "num_goroutines"}, float32(numRoutines)) - - // Export memory stats - var stats runtime.MemStats - runtime.ReadMemStats(&stats) - m.SetGauge([]string{"runtime", "alloc_bytes"}, float32(stats.Alloc)) - m.SetGauge([]string{"runtime", "sys_bytes"}, float32(stats.Sys)) - m.SetGauge([]string{"runtime", "malloc_count"}, float32(stats.Mallocs)) - m.SetGauge([]string{"runtime", "free_count"}, float32(stats.Frees)) - m.SetGauge([]string{"runtime", "heap_objects"}, float32(stats.HeapObjects)) - m.SetGauge([]string{"runtime", "total_gc_pause_ns"}, float32(stats.PauseTotalNs)) - m.SetGauge([]string{"runtime", "total_gc_runs"}, float32(stats.NumGC)) - - // Export info about the last few GC runs - num := stats.NumGC - - // Handle wrap around - if num < m.lastNumGC { - m.lastNumGC = 0 - } - - // Ensure we don't scan more than 256 - if num-m.lastNumGC >= 256 { - m.lastNumGC = num - 255 - } - - for i := m.lastNumGC; i < num; i++ { - pause := stats.PauseNs[i%256] - m.AddSample([]string{"runtime", "gc_pause_ns"}, float32(pause)) - } - m.lastNumGC = num -} - -// Inserts a string value at an index into the slice -func insert(i int, v string, s []string) []string { - s = append(s, "") - copy(s[i+1:], s[i:]) - s[i] = v - return s -} diff --git a/vendor/github.com/armon/go-metrics/sink.go b/vendor/github.com/armon/go-metrics/sink.go deleted file mode 100644 index 0b7d6e4be43..00000000000 --- a/vendor/github.com/armon/go-metrics/sink.go +++ /dev/null @@ -1,115 +0,0 @@ -package metrics - -import ( - "fmt" - "net/url" -) - -// The MetricSink interface is used to transmit metrics information -// to an external system -type MetricSink interface { - // A Gauge should retain the last value it is set to - SetGauge(key []string, val float32) - SetGaugeWithLabels(key []string, val float32, labels []Label) - - // Should emit a Key/Value pair for each call - EmitKey(key []string, val float32) - - // Counters should accumulate values - IncrCounter(key []string, val float32) - IncrCounterWithLabels(key []string, val float32, labels []Label) - - // Samples are for timing information, where quantiles are used - AddSample(key []string, val float32) - AddSampleWithLabels(key []string, val float32, labels []Label) -} - -// BlackholeSink is used to just blackhole messages -type BlackholeSink struct{} - -func (*BlackholeSink) SetGauge(key []string, val float32) {} -func (*BlackholeSink) SetGaugeWithLabels(key []string, val float32, labels []Label) {} -func (*BlackholeSink) EmitKey(key []string, val float32) {} -func (*BlackholeSink) IncrCounter(key []string, val float32) {} -func (*BlackholeSink) IncrCounterWithLabels(key []string, val float32, labels []Label) {} -func (*BlackholeSink) AddSample(key []string, val float32) {} -func (*BlackholeSink) AddSampleWithLabels(key []string, val float32, labels []Label) {} - -// FanoutSink is used to sink to fanout values to multiple sinks -type FanoutSink []MetricSink - -func (fh FanoutSink) SetGauge(key []string, val float32) { - fh.SetGaugeWithLabels(key, val, nil) -} - -func (fh FanoutSink) SetGaugeWithLabels(key []string, val float32, labels []Label) { - for _, s := range fh { - s.SetGaugeWithLabels(key, val, labels) - } -} - -func (fh FanoutSink) EmitKey(key []string, val float32) { - for _, s := range fh { - s.EmitKey(key, val) - } -} - -func (fh FanoutSink) IncrCounter(key []string, val float32) { - fh.IncrCounterWithLabels(key, val, nil) -} - -func (fh FanoutSink) IncrCounterWithLabels(key []string, val float32, labels []Label) { - for _, s := range fh { - s.IncrCounterWithLabels(key, val, labels) - } -} - -func (fh FanoutSink) AddSample(key []string, val float32) { - fh.AddSampleWithLabels(key, val, nil) -} - -func (fh FanoutSink) AddSampleWithLabels(key []string, val float32, labels []Label) { - for _, s := range fh { - s.AddSampleWithLabels(key, val, labels) - } -} - -// sinkURLFactoryFunc is an generic interface around the *SinkFromURL() function provided -// by each sink type -type sinkURLFactoryFunc func(*url.URL) (MetricSink, error) - -// sinkRegistry supports the generic NewMetricSink function by mapping URL -// schemes to metric sink factory functions -var sinkRegistry = map[string]sinkURLFactoryFunc{ - "statsd": NewStatsdSinkFromURL, - "statsite": NewStatsiteSinkFromURL, - "inmem": NewInmemSinkFromURL, -} - -// NewMetricSinkFromURL allows a generic URL input to configure any of the -// supported sinks. The scheme of the URL identifies the type of the sink, the -// and query parameters are used to set options. -// -// "statsd://" - Initializes a StatsdSink. The host and port are passed through -// as the "addr" of the sink -// -// "statsite://" - Initializes a StatsiteSink. The host and port become the -// "addr" of the sink -// -// "inmem://" - Initializes an InmemSink. The host and port are ignored. The -// "interval" and "duration" query parameters must be specified with valid -// durations, see NewInmemSink for details. -func NewMetricSinkFromURL(urlStr string) (MetricSink, error) { - u, err := url.Parse(urlStr) - if err != nil { - return nil, err - } - - sinkURLFactoryFunc := sinkRegistry[u.Scheme] - if sinkURLFactoryFunc == nil { - return nil, fmt.Errorf( - "cannot create metric sink, unrecognized sink name: %q", u.Scheme) - } - - return sinkURLFactoryFunc(u) -} diff --git a/vendor/github.com/armon/go-metrics/start.go b/vendor/github.com/armon/go-metrics/start.go deleted file mode 100644 index 32a28c48378..00000000000 --- a/vendor/github.com/armon/go-metrics/start.go +++ /dev/null @@ -1,141 +0,0 @@ -package metrics - -import ( - "os" - "sync" - "sync/atomic" - "time" - - "github.com/hashicorp/go-immutable-radix" -) - -// Config is used to configure metrics settings -type Config struct { - ServiceName string // Prefixed with keys to separate services - HostName string // Hostname to use. If not provided and EnableHostname, it will be os.Hostname - EnableHostname bool // Enable prefixing gauge values with hostname - EnableHostnameLabel bool // Enable adding hostname to labels - EnableServiceLabel bool // Enable adding service to labels - EnableRuntimeMetrics bool // Enables profiling of runtime metrics (GC, Goroutines, Memory) - EnableTypePrefix bool // Prefixes key with a type ("counter", "gauge", "timer") - TimerGranularity time.Duration // Granularity of timers. - ProfileInterval time.Duration // Interval to profile runtime metrics - - AllowedPrefixes []string // A list of metric prefixes to allow, with '.' as the separator - BlockedPrefixes []string // A list of metric prefixes to block, with '.' as the separator - AllowedLabels []string // A list of metric labels to allow, with '.' as the separator - BlockedLabels []string // A list of metric labels to block, with '.' as the separator - FilterDefault bool // Whether to allow metrics by default -} - -// Metrics represents an instance of a metrics sink that can -// be used to emit -type Metrics struct { - Config - lastNumGC uint32 - sink MetricSink - filter *iradix.Tree - allowedLabels map[string]bool - blockedLabels map[string]bool - filterLock sync.RWMutex // Lock filters and allowedLabels/blockedLabels access -} - -// Shared global metrics instance -var globalMetrics atomic.Value // *Metrics - -func init() { - // Initialize to a blackhole sink to avoid errors - globalMetrics.Store(&Metrics{sink: &BlackholeSink{}}) -} - -// DefaultConfig provides a sane default configuration -func DefaultConfig(serviceName string) *Config { - c := &Config{ - ServiceName: serviceName, // Use client provided service - HostName: "", - EnableHostname: true, // Enable hostname prefix - EnableRuntimeMetrics: true, // Enable runtime profiling - EnableTypePrefix: false, // Disable type prefix - TimerGranularity: time.Millisecond, // Timers are in milliseconds - ProfileInterval: time.Second, // Poll runtime every second - FilterDefault: true, // Don't filter metrics by default - } - - // Try to get the hostname - name, _ := os.Hostname() - c.HostName = name - return c -} - -// New is used to create a new instance of Metrics -func New(conf *Config, sink MetricSink) (*Metrics, error) { - met := &Metrics{} - met.Config = *conf - met.sink = sink - met.UpdateFilterAndLabels(conf.AllowedPrefixes, conf.BlockedPrefixes, conf.AllowedLabels, conf.BlockedLabels) - - // Start the runtime collector - if conf.EnableRuntimeMetrics { - go met.collectStats() - } - return met, nil -} - -// NewGlobal is the same as New, but it assigns the metrics object to be -// used globally as well as returning it. -func NewGlobal(conf *Config, sink MetricSink) (*Metrics, error) { - metrics, err := New(conf, sink) - if err == nil { - globalMetrics.Store(metrics) - } - return metrics, err -} - -// Proxy all the methods to the globalMetrics instance -func SetGauge(key []string, val float32) { - globalMetrics.Load().(*Metrics).SetGauge(key, val) -} - -func SetGaugeWithLabels(key []string, val float32, labels []Label) { - globalMetrics.Load().(*Metrics).SetGaugeWithLabels(key, val, labels) -} - -func EmitKey(key []string, val float32) { - globalMetrics.Load().(*Metrics).EmitKey(key, val) -} - -func IncrCounter(key []string, val float32) { - globalMetrics.Load().(*Metrics).IncrCounter(key, val) -} - -func IncrCounterWithLabels(key []string, val float32, labels []Label) { - globalMetrics.Load().(*Metrics).IncrCounterWithLabels(key, val, labels) -} - -func AddSample(key []string, val float32) { - globalMetrics.Load().(*Metrics).AddSample(key, val) -} - -func AddSampleWithLabels(key []string, val float32, labels []Label) { - globalMetrics.Load().(*Metrics).AddSampleWithLabels(key, val, labels) -} - -func MeasureSince(key []string, start time.Time) { - globalMetrics.Load().(*Metrics).MeasureSince(key, start) -} - -func MeasureSinceWithLabels(key []string, start time.Time, labels []Label) { - globalMetrics.Load().(*Metrics).MeasureSinceWithLabels(key, start, labels) -} - -func UpdateFilter(allow, block []string) { - globalMetrics.Load().(*Metrics).UpdateFilter(allow, block) -} - -// UpdateFilterAndLabels set allow/block prefixes of metrics while allowedLabels -// and blockedLabels - when not nil - allow filtering of labels in order to -// block/allow globally labels (especially useful when having large number of -// values for a given label). See README.md for more information about usage. -func UpdateFilterAndLabels(allow, block, allowedLabels, blockedLabels []string) { - globalMetrics.Load().(*Metrics).UpdateFilterAndLabels(allow, block, allowedLabels, blockedLabels) -} diff --git a/vendor/github.com/armon/go-metrics/statsd.go b/vendor/github.com/armon/go-metrics/statsd.go deleted file mode 100644 index 1bfffce46e2..00000000000 --- a/vendor/github.com/armon/go-metrics/statsd.go +++ /dev/null @@ -1,184 +0,0 @@ -package metrics - -import ( - "bytes" - "fmt" - "log" - "net" - "net/url" - "strings" - "time" -) - -const ( - // statsdMaxLen is the maximum size of a packet - // to send to statsd - statsdMaxLen = 1400 -) - -// StatsdSink provides a MetricSink that can be used -// with a statsite or statsd metrics server. It uses -// only UDP packets, while StatsiteSink uses TCP. -type StatsdSink struct { - addr string - metricQueue chan string -} - -// NewStatsdSinkFromURL creates an StatsdSink from a URL. It is used -// (and tested) from NewMetricSinkFromURL. -func NewStatsdSinkFromURL(u *url.URL) (MetricSink, error) { - return NewStatsdSink(u.Host) -} - -// NewStatsdSink is used to create a new StatsdSink -func NewStatsdSink(addr string) (*StatsdSink, error) { - s := &StatsdSink{ - addr: addr, - metricQueue: make(chan string, 4096), - } - go s.flushMetrics() - return s, nil -} - -// Close is used to stop flushing to statsd -func (s *StatsdSink) Shutdown() { - close(s.metricQueue) -} - -func (s *StatsdSink) SetGauge(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|g\n", flatKey, val)) -} - -func (s *StatsdSink) SetGaugeWithLabels(key []string, val float32, labels []Label) { - flatKey := s.flattenKeyLabels(key, labels) - s.pushMetric(fmt.Sprintf("%s:%f|g\n", flatKey, val)) -} - -func (s *StatsdSink) EmitKey(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|kv\n", flatKey, val)) -} - -func (s *StatsdSink) IncrCounter(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|c\n", flatKey, val)) -} - -func (s *StatsdSink) IncrCounterWithLabels(key []string, val float32, labels []Label) { - flatKey := s.flattenKeyLabels(key, labels) - s.pushMetric(fmt.Sprintf("%s:%f|c\n", flatKey, val)) -} - -func (s *StatsdSink) AddSample(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|ms\n", flatKey, val)) -} - -func (s *StatsdSink) AddSampleWithLabels(key []string, val float32, labels []Label) { - flatKey := s.flattenKeyLabels(key, labels) - s.pushMetric(fmt.Sprintf("%s:%f|ms\n", flatKey, val)) -} - -// Flattens the key for formatting, removes spaces -func (s *StatsdSink) flattenKey(parts []string) string { - joined := strings.Join(parts, ".") - return strings.Map(func(r rune) rune { - switch r { - case ':': - fallthrough - case ' ': - return '_' - default: - return r - } - }, joined) -} - -// Flattens the key along with labels for formatting, removes spaces -func (s *StatsdSink) flattenKeyLabels(parts []string, labels []Label) string { - for _, label := range labels { - parts = append(parts, label.Value) - } - return s.flattenKey(parts) -} - -// Does a non-blocking push to the metrics queue -func (s *StatsdSink) pushMetric(m string) { - select { - case s.metricQueue <- m: - default: - } -} - -// Flushes metrics -func (s *StatsdSink) flushMetrics() { - var sock net.Conn - var err error - var wait <-chan time.Time - ticker := time.NewTicker(flushInterval) - defer ticker.Stop() - -CONNECT: - // Create a buffer - buf := bytes.NewBuffer(nil) - - // Attempt to connect - sock, err = net.Dial("udp", s.addr) - if err != nil { - log.Printf("[ERR] Error connecting to statsd! Err: %s", err) - goto WAIT - } - - for { - select { - case metric, ok := <-s.metricQueue: - // Get a metric from the queue - if !ok { - goto QUIT - } - - // Check if this would overflow the packet size - if len(metric)+buf.Len() > statsdMaxLen { - _, err := sock.Write(buf.Bytes()) - buf.Reset() - if err != nil { - log.Printf("[ERR] Error writing to statsd! Err: %s", err) - goto WAIT - } - } - - // Append to the buffer - buf.WriteString(metric) - - case <-ticker.C: - if buf.Len() == 0 { - continue - } - - _, err := sock.Write(buf.Bytes()) - buf.Reset() - if err != nil { - log.Printf("[ERR] Error flushing to statsd! Err: %s", err) - goto WAIT - } - } - } - -WAIT: - // Wait for a while - wait = time.After(time.Duration(5) * time.Second) - for { - select { - // Dequeue the messages to avoid backlog - case _, ok := <-s.metricQueue: - if !ok { - goto QUIT - } - case <-wait: - goto CONNECT - } - } -QUIT: - s.metricQueue = nil -} diff --git a/vendor/github.com/armon/go-metrics/statsite.go b/vendor/github.com/armon/go-metrics/statsite.go deleted file mode 100644 index 6c0d284d2dd..00000000000 --- a/vendor/github.com/armon/go-metrics/statsite.go +++ /dev/null @@ -1,172 +0,0 @@ -package metrics - -import ( - "bufio" - "fmt" - "log" - "net" - "net/url" - "strings" - "time" -) - -const ( - // We force flush the statsite metrics after this period of - // inactivity. Prevents stats from getting stuck in a buffer - // forever. - flushInterval = 100 * time.Millisecond -) - -// NewStatsiteSinkFromURL creates an StatsiteSink from a URL. It is used -// (and tested) from NewMetricSinkFromURL. -func NewStatsiteSinkFromURL(u *url.URL) (MetricSink, error) { - return NewStatsiteSink(u.Host) -} - -// StatsiteSink provides a MetricSink that can be used with a -// statsite metrics server -type StatsiteSink struct { - addr string - metricQueue chan string -} - -// NewStatsiteSink is used to create a new StatsiteSink -func NewStatsiteSink(addr string) (*StatsiteSink, error) { - s := &StatsiteSink{ - addr: addr, - metricQueue: make(chan string, 4096), - } - go s.flushMetrics() - return s, nil -} - -// Close is used to stop flushing to statsite -func (s *StatsiteSink) Shutdown() { - close(s.metricQueue) -} - -func (s *StatsiteSink) SetGauge(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|g\n", flatKey, val)) -} - -func (s *StatsiteSink) SetGaugeWithLabels(key []string, val float32, labels []Label) { - flatKey := s.flattenKeyLabels(key, labels) - s.pushMetric(fmt.Sprintf("%s:%f|g\n", flatKey, val)) -} - -func (s *StatsiteSink) EmitKey(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|kv\n", flatKey, val)) -} - -func (s *StatsiteSink) IncrCounter(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|c\n", flatKey, val)) -} - -func (s *StatsiteSink) IncrCounterWithLabels(key []string, val float32, labels []Label) { - flatKey := s.flattenKeyLabels(key, labels) - s.pushMetric(fmt.Sprintf("%s:%f|c\n", flatKey, val)) -} - -func (s *StatsiteSink) AddSample(key []string, val float32) { - flatKey := s.flattenKey(key) - s.pushMetric(fmt.Sprintf("%s:%f|ms\n", flatKey, val)) -} - -func (s *StatsiteSink) AddSampleWithLabels(key []string, val float32, labels []Label) { - flatKey := s.flattenKeyLabels(key, labels) - s.pushMetric(fmt.Sprintf("%s:%f|ms\n", flatKey, val)) -} - -// Flattens the key for formatting, removes spaces -func (s *StatsiteSink) flattenKey(parts []string) string { - joined := strings.Join(parts, ".") - return strings.Map(func(r rune) rune { - switch r { - case ':': - fallthrough - case ' ': - return '_' - default: - return r - } - }, joined) -} - -// Flattens the key along with labels for formatting, removes spaces -func (s *StatsiteSink) flattenKeyLabels(parts []string, labels []Label) string { - for _, label := range labels { - parts = append(parts, label.Value) - } - return s.flattenKey(parts) -} - -// Does a non-blocking push to the metrics queue -func (s *StatsiteSink) pushMetric(m string) { - select { - case s.metricQueue <- m: - default: - } -} - -// Flushes metrics -func (s *StatsiteSink) flushMetrics() { - var sock net.Conn - var err error - var wait <-chan time.Time - var buffered *bufio.Writer - ticker := time.NewTicker(flushInterval) - defer ticker.Stop() - -CONNECT: - // Attempt to connect - sock, err = net.Dial("tcp", s.addr) - if err != nil { - log.Printf("[ERR] Error connecting to statsite! Err: %s", err) - goto WAIT - } - - // Create a buffered writer - buffered = bufio.NewWriter(sock) - - for { - select { - case metric, ok := <-s.metricQueue: - // Get a metric from the queue - if !ok { - goto QUIT - } - - // Try to send to statsite - _, err := buffered.Write([]byte(metric)) - if err != nil { - log.Printf("[ERR] Error writing to statsite! Err: %s", err) - goto WAIT - } - case <-ticker.C: - if err := buffered.Flush(); err != nil { - log.Printf("[ERR] Error flushing to statsite! Err: %s", err) - goto WAIT - } - } - } - -WAIT: - // Wait for a while - wait = time.After(time.Duration(5) * time.Second) - for { - select { - // Dequeue the messages to avoid backlog - case _, ok := <-s.metricQueue: - if !ok { - goto QUIT - } - case <-wait: - goto CONNECT - } - } -QUIT: - s.metricQueue = nil -} diff --git a/vendor/github.com/bshuster-repo/logrus-logstash-hook/.gitignore b/vendor/github.com/bshuster-repo/logrus-logstash-hook/.gitignore deleted file mode 100644 index 42067232927..00000000000 --- a/vendor/github.com/bshuster-repo/logrus-logstash-hook/.gitignore +++ /dev/null @@ -1,26 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test -.idea - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof -*.iml diff --git a/vendor/github.com/bshuster-repo/logrus-logstash-hook/.travis.yml b/vendor/github.com/bshuster-repo/logrus-logstash-hook/.travis.yml deleted file mode 100644 index 60c00ef6629..00000000000 --- a/vendor/github.com/bshuster-repo/logrus-logstash-hook/.travis.yml +++ /dev/null @@ -1,19 +0,0 @@ -language: go -sudo: false - -matrix: - include: - - go: 1.3 - - go: 1.4 - - go: 1.5 - - go: 1.6 - - go: tip - -install: - - # Skip - -script: - - go get -t -v ./... - - diff -u <(echo -n) <(gofmt -d .) - - go tool vet . - - go test -v -race ./... diff --git a/vendor/github.com/bshuster-repo/logrus-logstash-hook/CHANGELOG.md b/vendor/github.com/bshuster-repo/logrus-logstash-hook/CHANGELOG.md deleted file mode 100644 index 31c8b5f3472..00000000000 --- a/vendor/github.com/bshuster-repo/logrus-logstash-hook/CHANGELOG.md +++ /dev/null @@ -1,18 +0,0 @@ -# Changelog - -## 0.4 - - * Update the name of the package from `logrus_logstash` to `logrustash` - * Add TimeFormat to Hook - * Replace the old logrus package path: `github.com/Sirupsen/logrus` with `github.com/sirupsen/logrus` - -## 0.3 - - * Fix the Logstash format to set `@version` to `"1"` - * Add unit-tests to logstash.go - * Remove the assert package - * Add prefix filtering - -## Before that (major changes) - - * Update LICENSE to MIT from GPL diff --git a/vendor/github.com/bshuster-repo/logrus-logstash-hook/LICENSE b/vendor/github.com/bshuster-repo/logrus-logstash-hook/LICENSE deleted file mode 100644 index 3fb4442f849..00000000000 --- a/vendor/github.com/bshuster-repo/logrus-logstash-hook/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Boaz Shuster - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/bshuster-repo/logrus-logstash-hook/README.md b/vendor/github.com/bshuster-repo/logrus-logstash-hook/README.md deleted file mode 100644 index 9cc4378c504..00000000000 --- a/vendor/github.com/bshuster-repo/logrus-logstash-hook/README.md +++ /dev/null @@ -1,106 +0,0 @@ -# Logstash hook for logrus :walrus: [![Build Status](https://travis-ci.org/bshuster-repo/logrus-logstash-hook.svg?branch=master)](https://travis-ci.org/bshuster-repo/logrus-logstash-hook) -Use this hook to send the logs to [Logstash](https://www.elastic.co/products/logstash) over both UDP and TCP. - -## Usage - -```go -package main - -import ( - "github.com/sirupsen/logrus" - "github.com/bshuster-repo/logrus-logstash-hook" -) - -func main() { - log := logrus.New() - hook, err := logrustash.NewHook("tcp", "172.17.0.2:9999", "myappName") - - if err != nil { - log.Fatal(err) - } - log.Hooks.Add(hook) - ctx := log.WithFields(logrus.Fields{ - "method": "main", - }) - ... - ctx.Info("Hello World!") -} -``` - -This is how it will look like: - -```ruby -{ - "@timestamp" => "2016-02-29T16:57:23.000Z", - "@version" => "1", - "level" => "info", - "message" => "Hello World!", - "method" => "main", - "host" => "172.17.0.1", - "port" => 45199, - "type" => "myappName" -} -``` -## Hook Fields -Fields can be added to the hook, which will always be in the log context. -This can be done when creating the hook: - -```go - -hook, err := logrustash.NewHookWithFields("tcp", "172.17.0.2:9999", "myappName", logrus.Fields{ - "hostname": os.Hostname(), - "serviceName": "myServiceName", -}) -``` - -Or afterwards: - -```go - -hook.WithFields(logrus.Fields{ - "hostname": os.Hostname(), - "serviceName": "myServiceName", -}) -``` -This allows you to set up the hook so logging is available immediately, and add important fields as they become available. - -Single fields can be added/updated using 'WithField': - -```go - -hook.WithField("status", "running") -``` - - - -## Field prefix - -The hook allows you to send logging to logstash and also retain the default std output in text format. -However to keep this console output readable some fields might need to be omitted from the default non-hooked log output. -Each hook can be configured with a prefix used to identify fields which are only to be logged to the logstash connection. -For example if you don't want to see the hostname and serviceName on each log line in the console output you can add a prefix: - -```go - - -hook, err := logrustash.NewHookWithFields("tcp", "172.17.0.2:9999", "myappName", logrus.Fields{ - "_hostname": os.Hostname(), - "_serviceName": "myServiceName", -}) -... -hook.WithPrefix("_") -``` - -There are also constructors available which allow you to specify the prefix from the start. -The std-out will not have the '\_hostname' and '\_servicename' fields, and the logstash output will, but the prefix will be dropped from the name. - - -# Authors - -Name | Github | Twitter | ------------- | --------- | ---------- | -Boaz Shuster | ripcurld0 | @ripcurld0 | - -# License - -MIT. diff --git a/vendor/github.com/bshuster-repo/logrus-logstash-hook/logstash.go b/vendor/github.com/bshuster-repo/logrus-logstash-hook/logstash.go deleted file mode 100644 index 1f2e5a0cb96..00000000000 --- a/vendor/github.com/bshuster-repo/logrus-logstash-hook/logstash.go +++ /dev/null @@ -1,133 +0,0 @@ -package logrustash - -import ( - "net" - "strings" - - "github.com/sirupsen/logrus" -) - -// Hook represents a connection to a Logstash instance -type Hook struct { - conn net.Conn - appName string - alwaysSentFields logrus.Fields - hookOnlyPrefix string - TimeFormat string -} - -// NewHook creates a new hook to a Logstash instance, which listens on -// `protocol`://`address`. -func NewHook(protocol, address, appName string) (*Hook, error) { - return NewHookWithFields(protocol, address, appName, make(logrus.Fields)) -} - -// NewHookWithConn creates a new hook to a Logstash instance, using the supplied connection -func NewHookWithConn(conn net.Conn, appName string) (*Hook, error) { - return NewHookWithFieldsAndConn(conn, appName, make(logrus.Fields)) -} - -// NewHookWithFields creates a new hook to a Logstash instance, which listens on -// `protocol`://`address`. alwaysSentFields will be sent with every log entry. -func NewHookWithFields(protocol, address, appName string, alwaysSentFields logrus.Fields) (*Hook, error) { - return NewHookWithFieldsAndPrefix(protocol, address, appName, alwaysSentFields, "") -} - -// NewHookWithFieldsAndPrefix creates a new hook to a Logstash instance, which listens on -// `protocol`://`address`. alwaysSentFields will be sent with every log entry. prefix is used to select fields to filter -func NewHookWithFieldsAndPrefix(protocol, address, appName string, alwaysSentFields logrus.Fields, prefix string) (*Hook, error) { - conn, err := net.Dial(protocol, address) - if err != nil { - return nil, err - } - return NewHookWithFieldsAndConnAndPrefix(conn, appName, alwaysSentFields, prefix) -} - -// NewHookWithFieldsAndConn creates a new hook to a Logstash instance using the supplied connection -func NewHookWithFieldsAndConn(conn net.Conn, appName string, alwaysSentFields logrus.Fields) (*Hook, error) { - return NewHookWithFieldsAndConnAndPrefix(conn, appName, alwaysSentFields, "") -} - -//NewHookWithFieldsAndConnAndPrefix creates a new hook to a Logstash instance using the suppolied connection and prefix -func NewHookWithFieldsAndConnAndPrefix(conn net.Conn, appName string, alwaysSentFields logrus.Fields, prefix string) (*Hook, error) { - return &Hook{conn: conn, appName: appName, alwaysSentFields: alwaysSentFields, hookOnlyPrefix: prefix}, nil -} - -//NewFilterHook makes a new hook which does not forward to logstash, but simply enforces the prefix rules -func NewFilterHook() *Hook { - return NewFilterHookWithPrefix("") -} - -//NewFilterHookWithPrefix make a new hook which does not forward to logstash, but simply enforces the specified prefix -func NewFilterHookWithPrefix(prefix string) *Hook { - return &Hook{conn: nil, appName: "", alwaysSentFields: make(logrus.Fields), hookOnlyPrefix: prefix} -} - -func (h *Hook) filterHookOnly(entry *logrus.Entry) { - if h.hookOnlyPrefix != "" { - for key := range entry.Data { - if strings.HasPrefix(key, h.hookOnlyPrefix) { - delete(entry.Data, key) - } - } - } - -} - -//WithPrefix sets a prefix filter to use in all subsequent logging -func (h *Hook) WithPrefix(prefix string) { - h.hookOnlyPrefix = prefix -} - -func (h *Hook) WithField(key string, value interface{}) { - h.alwaysSentFields[key] = value -} - -func (h *Hook) WithFields(fields logrus.Fields) { - //Add all the new fields to the 'alwaysSentFields', possibly overwriting exising fields - for key, value := range fields { - h.alwaysSentFields[key] = value - } -} - -func (h *Hook) Fire(entry *logrus.Entry) error { - //make sure we always clear the hookonly fields from the entry - defer h.filterHookOnly(entry) - - // Add in the alwaysSentFields. We don't override fields that are already set. - for k, v := range h.alwaysSentFields { - if _, inMap := entry.Data[k]; !inMap { - entry.Data[k] = v - } - } - - //For a filteringHook, stop here - if h.conn == nil { - return nil - } - - formatter := LogstashFormatter{Type: h.appName} - if h.TimeFormat != "" { - formatter.TimestampFormat = h.TimeFormat - } - - dataBytes, err := formatter.FormatWithPrefix(entry, h.hookOnlyPrefix) - if err != nil { - return err - } - if _, err = h.conn.Write(dataBytes); err != nil { - return err - } - return nil -} - -func (h *Hook) Levels() []logrus.Level { - return []logrus.Level{ - logrus.PanicLevel, - logrus.FatalLevel, - logrus.ErrorLevel, - logrus.WarnLevel, - logrus.InfoLevel, - logrus.DebugLevel, - } -} diff --git a/vendor/github.com/bshuster-repo/logrus-logstash-hook/logstash_formatter.go b/vendor/github.com/bshuster-repo/logrus-logstash-hook/logstash_formatter.go deleted file mode 100644 index 64bc5c3899b..00000000000 --- a/vendor/github.com/bshuster-repo/logrus-logstash-hook/logstash_formatter.go +++ /dev/null @@ -1,81 +0,0 @@ -package logrustash - -import ( - "encoding/json" - "fmt" - "strings" - "time" - - "github.com/sirupsen/logrus" -) - -// Formatter generates json in logstash format. -// Logstash site: http://logstash.net/ -type LogstashFormatter struct { - Type string // if not empty use for logstash type field. - - // TimestampFormat sets the format used for timestamps. - TimestampFormat string -} - -func (f *LogstashFormatter) Format(entry *logrus.Entry) ([]byte, error) { - return f.FormatWithPrefix(entry, "") -} - -func (f *LogstashFormatter) FormatWithPrefix(entry *logrus.Entry, prefix string) ([]byte, error) { - fields := make(logrus.Fields) - for k, v := range entry.Data { - //remvove the prefix when sending the fields to logstash - if prefix != "" && strings.HasPrefix(k, prefix) { - k = strings.TrimPrefix(k, prefix) - } - - switch v := v.(type) { - case error: - // Otherwise errors are ignored by `encoding/json` - // https://github.com/Sirupsen/logrus/issues/377 - fields[k] = v.Error() - default: - fields[k] = v - } - } - - fields["@version"] = "1" - - timeStampFormat := f.TimestampFormat - - if timeStampFormat == "" { - timeStampFormat = time.RFC3339 - } - - fields["@timestamp"] = entry.Time.Format(timeStampFormat) - - // set message field - v, ok := entry.Data["message"] - if ok { - fields["fields.message"] = v - } - fields["message"] = entry.Message - - // set level field - v, ok = entry.Data["level"] - if ok { - fields["fields.level"] = v - } - fields["level"] = entry.Level.String() - - // set type field - if f.Type != "" { - v, ok = entry.Data["type"] - if ok { - fields["fields.type"] = v - } - fields["type"] = f.Type - } - - serialized, err := json.Marshal(fields) - if err != nil { - return nil, fmt.Errorf("Failed to marshal fields to JSON, %v", err) - } - return append(serialized, '\n'), nil -} diff --git a/vendor/github.com/buger/jsonparser/.gitignore b/vendor/github.com/buger/jsonparser/.gitignore deleted file mode 100644 index 9de1b0fa606..00000000000 --- a/vendor/github.com/buger/jsonparser/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ - -*.test - -*.out - -*.mprof - -vendor/github.com/buger/goterm/ -prof.cpu -prof.mem diff --git a/vendor/github.com/buger/jsonparser/.travis.yml b/vendor/github.com/buger/jsonparser/.travis.yml deleted file mode 100644 index 8e29f1312a0..00000000000 --- a/vendor/github.com/buger/jsonparser/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: go -go: - - 1.7.x - - 1.8.x - - 1.9.x - - 1.10.x - - 1.11.x -script: go test -v ./. diff --git a/vendor/github.com/buger/jsonparser/Dockerfile b/vendor/github.com/buger/jsonparser/Dockerfile deleted file mode 100644 index 37fc9fd0b4d..00000000000 --- a/vendor/github.com/buger/jsonparser/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM golang:1.6 - -RUN go get github.com/Jeffail/gabs -RUN go get github.com/bitly/go-simplejson -RUN go get github.com/pquerna/ffjson -RUN go get github.com/antonholmquist/jason -RUN go get github.com/mreiferson/go-ujson -RUN go get -tags=unsafe -u github.com/ugorji/go/codec -RUN go get github.com/mailru/easyjson - -WORKDIR /go/src/github.com/buger/jsonparser -ADD . /go/src/github.com/buger/jsonparser \ No newline at end of file diff --git a/vendor/github.com/buger/jsonparser/LICENSE b/vendor/github.com/buger/jsonparser/LICENSE deleted file mode 100644 index ac25aeb7da2..00000000000 --- a/vendor/github.com/buger/jsonparser/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2016 Leonid Bugaev - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/buger/jsonparser/Makefile b/vendor/github.com/buger/jsonparser/Makefile deleted file mode 100644 index e843368cf10..00000000000 --- a/vendor/github.com/buger/jsonparser/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -SOURCE = parser.go -CONTAINER = jsonparser -SOURCE_PATH = /go/src/github.com/buger/jsonparser -BENCHMARK = JsonParser -BENCHTIME = 5s -TEST = . -DRUN = docker run -v `pwd`:$(SOURCE_PATH) -i -t $(CONTAINER) - -build: - docker build -t $(CONTAINER) . - -race: - $(DRUN) --env GORACE="halt_on_error=1" go test ./. $(ARGS) -v -race -timeout 15s - -bench: - $(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -benchtime $(BENCHTIME) -v - -bench_local: - $(DRUN) go test $(LDFLAGS) -test.benchmem -bench . $(ARGS) -benchtime $(BENCHTIME) -v - -profile: - $(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -memprofile mem.mprof -v - $(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -cpuprofile cpu.out -v - $(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -c - -test: - $(DRUN) go test $(LDFLAGS) ./ -run $(TEST) -timeout 10s $(ARGS) -v - -fmt: - $(DRUN) go fmt ./... - -vet: - $(DRUN) go vet ./. - -bash: - $(DRUN) /bin/bash \ No newline at end of file diff --git a/vendor/github.com/buger/jsonparser/README.md b/vendor/github.com/buger/jsonparser/README.md deleted file mode 100644 index a9ee6022dc1..00000000000 --- a/vendor/github.com/buger/jsonparser/README.md +++ /dev/null @@ -1,365 +0,0 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/buger/jsonparser)](https://goreportcard.com/report/github.com/buger/jsonparser) ![License](https://img.shields.io/dub/l/vibe-d.svg) -# Alternative JSON parser for Go (so far fastest) - -It does not require you to know the structure of the payload (eg. create structs), and allows accessing fields by providing the path to them. It is up to **10 times faster** than standard `encoding/json` package (depending on payload size and usage), **allocates no memory**. See benchmarks below. - -## Rationale -Originally I made this for a project that relies on a lot of 3rd party APIs that can be unpredictable and complex. -I love simplicity and prefer to avoid external dependecies. `encoding/json` requires you to know exactly your data structures, or if you prefer to use `map[string]interface{}` instead, it will be very slow and hard to manage. -I investigated what's on the market and found that most libraries are just wrappers around `encoding/json`, there is few options with own parsers (`ffjson`, `easyjson`), but they still requires you to create data structures. - - -Goal of this project is to push JSON parser to the performance limits and not sacrifice with compliance and developer user experience. - -## Example -For the given JSON our goal is to extract the user's full name, number of github followers and avatar. - -```go -import "github.com/buger/jsonparser" - -... - -data := []byte(`{ - "person": { - "name": { - "first": "Leonid", - "last": "Bugaev", - "fullName": "Leonid Bugaev" - }, - "github": { - "handle": "buger", - "followers": 109 - }, - "avatars": [ - { "url": "https://avatars1.githubusercontent.com/u/14009?v=3&s=460", "type": "thumbnail" } - ] - }, - "company": { - "name": "Acme" - } -}`) - -// You can specify key path by providing arguments to Get function -jsonparser.Get(data, "person", "name", "fullName") - -// There is `GetInt` and `GetBoolean` helpers if you exactly know key data type -jsonparser.GetInt(data, "person", "github", "followers") - -// When you try to get object, it will return you []byte slice pointer to data containing it -// In `company` it will be `{"name": "Acme"}` -jsonparser.Get(data, "company") - -// If the key doesn't exist it will throw an error -var size int64 -if value, err := jsonparser.GetInt(data, "company", "size"); err == nil { - size = value -} - -// You can use `ArrayEach` helper to iterate items [item1, item2 .... itemN] -jsonparser.ArrayEach(data, func(value []byte, dataType jsonparser.ValueType, offset int, err error) { - fmt.Println(jsonparser.Get(value, "url")) -}, "person", "avatars") - -// Or use can access fields by index! -jsonparser.GetInt("person", "avatars", "[0]", "url") - -// You can use `ObjectEach` helper to iterate objects { "key1":object1, "key2":object2, .... "keyN":objectN } -jsonparser.ObjectEach(data, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error { - fmt.Printf("Key: '%s'\n Value: '%s'\n Type: %s\n", string(key), string(value), dataType) - return nil -}, "person", "name") - -// The most efficient way to extract multiple keys is `EachKey` - -paths := [][]string{ - []string{"person", "name", "fullName"}, - []string{"person", "avatars", "[0]", "url"}, - []string{"company", "url"}, -} -jsonparser.EachKey(data, func(idx int, value []byte, vt jsonparser.ValueType, err error){ - switch idx { - case 0: // []string{"person", "name", "fullName"} - ... - case 1: // []string{"person", "avatars", "[0]", "url"} - ... - case 2: // []string{"company", "url"}, - ... - } -}, paths...) - -// For more information see docs below -``` - -## Need to speedup your app? - -I'm available for consulting and can help you push your app performance to the limits. Ping me at: leonsbox@gmail.com. - -## Reference - -Library API is really simple. You just need the `Get` method to perform any operation. The rest is just helpers around it. - -You also can view API at [godoc.org](https://godoc.org/github.com/buger/jsonparser) - - -### **`Get`** -```go -func Get(data []byte, keys ...string) (value []byte, dataType jsonparser.ValueType, offset int, err error) -``` -Receives data structure, and key path to extract value from. - -Returns: -* `value` - Pointer to original data structure containing key value, or just empty slice if nothing found or error -* `dataType` - Can be: `NotExist`, `String`, `Number`, `Object`, `Array`, `Boolean` or `Null` -* `offset` - Offset from provided data structure where key value ends. Used mostly internally, for example for `ArrayEach` helper. -* `err` - If the key is not found or any other parsing issue, it should return error. If key not found it also sets `dataType` to `NotExist` - -Accepts multiple keys to specify path to JSON value (in case of quering nested structures). -If no keys are provided it will try to extract the closest JSON value (simple ones or object/array), useful for reading streams or arrays, see `ArrayEach` implementation. - -Note that keys can be an array indexes: `jsonparser.GetInt("person", "avatars", "[0]", "url")`, pretty cool, yeah? - -### **`GetString`** -```go -func GetString(data []byte, keys ...string) (val string, err error) -``` -Returns strings properly handing escaped and unicode characters. Note that this will cause additional memory allocations. - -### **`GetUnsafeString`** -If you need string in your app, and ready to sacrifice with support of escaped symbols in favor of speed. It returns string mapped to existing byte slice memory, without any allocations: -```go -s, _, := jsonparser.GetUnsafeString(data, "person", "name", "title") -switch s { - case 'CEO': - ... - case 'Engineer' - ... - ... -} -``` -Note that `unsafe` here means that your string will exist until GC will free underlying byte slice, for most of cases it means that you can use this string only in current context, and should not pass it anywhere externally: through channels or any other way. - - -### **`GetBoolean`**, **`GetInt`** and **`GetFloat`** -```go -func GetBoolean(data []byte, keys ...string) (val bool, err error) - -func GetFloat(data []byte, keys ...string) (val float64, err error) - -func GetInt(data []byte, keys ...string) (val int64, err error) -``` -If you know the key type, you can use the helpers above. -If key data type do not match, it will return error. - -### **`ArrayEach`** -```go -func ArrayEach(data []byte, cb func(value []byte, dataType jsonparser.ValueType, offset int, err error), keys ...string) -``` -Needed for iterating arrays, accepts a callback function with the same return arguments as `Get`. - -### **`ObjectEach`** -```go -func ObjectEach(data []byte, callback func(key []byte, value []byte, dataType ValueType, offset int) error, keys ...string) (err error) -``` -Needed for iterating object, accepts a callback function. Example: -```go -var handler func([]byte, []byte, jsonparser.ValueType, int) error -handler = func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error { - //do stuff here -} -jsonparser.ObjectEach(myJson, handler) -``` - - -### **`EachKey`** -```go -func EachKey(data []byte, cb func(idx int, value []byte, dataType jsonparser.ValueType, err error), paths ...[]string) -``` -When you need to read multiple keys, and you do not afraid of low-level API `EachKey` is your friend. It read payload only single time, and calls callback function once path is found. For example when you call multiple times `Get`, it has to process payload multiple times, each time you call it. Depending on payload `EachKey` can be multiple times faster than `Get`. Path can use nested keys as well! - -```go -paths := [][]string{ - []string{"uuid"}, - []string{"tz"}, - []string{"ua"}, - []string{"st"}, -} -var data SmallPayload - -jsonparser.EachKey(smallFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error){ - switch idx { - case 0: - data.Uuid, _ = value - case 1: - v, _ := jsonparser.ParseInt(value) - data.Tz = int(v) - case 2: - data.Ua, _ = value - case 3: - v, _ := jsonparser.ParseInt(value) - data.St = int(v) - } -}, paths...) -``` - -### **`Set`** -```go -func Set(data []byte, setValue []byte, keys ...string) (value []byte, err error) -``` -Receives existing data structure, key path to set, and value to set at that key. *This functionality is experimental.* - -Returns: -* `value` - Pointer to original data structure with updated or added key value. -* `err` - If any parsing issue, it should return error. - -Accepts multiple keys to specify path to JSON value (in case of updating or creating nested structures). - -Note that keys can be an array indexes: `jsonparser.Set(data, []byte("http://github.com"), "person", "avatars", "[0]", "url")` - -### **`Delete`** -```go -func Delete(data []byte, keys ...string) value []byte -``` -Receives existing data structure, and key path to delete. *This functionality is experimental.* - -Returns: -* `value` - Pointer to original data structure with key path deleted if it can be found. If there is no key path, then the whole data structure is deleted. - -Accepts multiple keys to specify path to JSON value (in case of updating or creating nested structures). - -Note that keys can be an array indexes: `jsonparser.Delete(data, "person", "avatars", "[0]", "url")` - - -## What makes it so fast? -* It does not rely on `encoding/json`, `reflection` or `interface{}`, the only real package dependency is `bytes`. -* Operates with JSON payload on byte level, providing you pointers to the original data structure: no memory allocation. -* No automatic type conversions, by default everything is a []byte, but it provides you value type, so you can convert by yourself (there is few helpers included). -* Does not parse full record, only keys you specified - - -## Benchmarks - -There are 3 benchmark types, trying to simulate real-life usage for small, medium and large JSON payloads. -For each metric, the lower value is better. Time/op is in nanoseconds. Values better than standard encoding/json marked as bold text. -Benchmarks run on standard Linode 1024 box. - -Compared libraries: -* https://golang.org/pkg/encoding/json -* https://github.com/Jeffail/gabs -* https://github.com/a8m/djson -* https://github.com/bitly/go-simplejson -* https://github.com/antonholmquist/jason -* https://github.com/mreiferson/go-ujson -* https://github.com/ugorji/go/codec -* https://github.com/pquerna/ffjson -* https://github.com/mailru/easyjson -* https://github.com/buger/jsonparser - -#### TLDR -If you want to skip next sections we have 2 winner: `jsonparser` and `easyjson`. -`jsonparser` is up to 10 times faster than standard `encoding/json` package (depending on payload size and usage), and almost infinitely (literally) better in memory consumption because it operates with data on byte level, and provide direct slice pointers. -`easyjson` wins in CPU in medium tests and frankly i'm impressed with this package: it is remarkable results considering that it is almost drop-in replacement for `encoding/json` (require some code generation). - -It's hard to fully compare `jsonparser` and `easyjson` (or `ffson`), they a true parsers and fully process record, unlike `jsonparser` which parse only keys you specified. - -If you searching for replacement of `encoding/json` while keeping structs, `easyjson` is an amazing choice. If you want to process dynamic JSON, have memory constrains, or more control over your data you should try `jsonparser`. - -`jsonparser` performance heavily depends on usage, and it works best when you do not need to process full record, only some keys. The more calls you need to make, the slower it will be, in contrast `easyjson` (or `ffjson`, `encoding/json`) parser record only 1 time, and then you can make as many calls as you want. - -With great power comes great responsibility! :) - - -#### Small payload - -Each test processes 190 bytes of http log as a JSON record. -It should read multiple fields. -https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_small_payload_test.go - -Library | time/op | bytes/op | allocs/op - ------ | ------- | -------- | ------- -encoding/json struct | 7879 | 880 | 18 -encoding/json interface{} | 8946 | 1521 | 38 -Jeffail/gabs | 10053 | 1649 | 46 -bitly/go-simplejson | 10128 | 2241 | 36 -antonholmquist/jason | 27152 | 7237 | 101 -github.com/ugorji/go/codec | 8806 | 2176 | 31 -mreiferson/go-ujson | **7008** | **1409** | 37 -a8m/djson | 3862 | 1249 | 30 -pquerna/ffjson | **3769** | **624** | **15** -mailru/easyjson | **2002** | **192** | **9** -buger/jsonparser | **1367** | **0** | **0** -buger/jsonparser (EachKey API) | **809** | **0** | **0** - -Winners are ffjson, easyjson and jsonparser, where jsonparser is up to 9.8x faster than encoding/json and 4.6x faster than ffjson, and slightly faster than easyjson. -If you look at memory allocation, jsonparser has no rivals, as it makes no data copy and operates with raw []byte structures and pointers to it. - -#### Medium payload - -Each test processes a 2.4kb JSON record (based on Clearbit API). -It should read multiple nested fields and 1 array. - -https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_medium_payload_test.go - -| Library | time/op | bytes/op | allocs/op | -| ------- | ------- | -------- | --------- | -| encoding/json struct | 57749 | 1336 | 29 | -| encoding/json interface{} | 79297 | 10627 | 215 | -| Jeffail/gabs | 83807 | 11202 | 235 | -| bitly/go-simplejson | 88187 | 17187 | 220 | -| antonholmquist/jason | 94099 | 19013 | 247 | -| github.com/ugorji/go/codec | 114719 | 6712 | 152 | -| mreiferson/go-ujson | **56972** | 11547 | 270 | -| a8m/djson | 28525 | 10196 | 198 | -| pquerna/ffjson | **20298** | **856** | **20** | -| mailru/easyjson | **10512** | **336** | **12** | -| buger/jsonparser | **15955** | **0** | **0** | -| buger/jsonparser (EachKey API) | **8916** | **0** | **0** | - -The difference between ffjson and jsonparser in CPU usage is smaller, while the memory consumption difference is growing. On the other hand `easyjson` shows remarkable performance for medium payload. - -`gabs`, `go-simplejson` and `jason` are based on encoding/json and map[string]interface{} and actually only helpers for unstructured JSON, their performance correlate with `encoding/json interface{}`, and they will skip next round. -`go-ujson` while have its own parser, shows same performance as `encoding/json`, also skips next round. Same situation with `ugorji/go/codec`, but it showed unexpectedly bad performance for complex payloads. - - -#### Large payload - -Each test processes a 24kb JSON record (based on Discourse API) -It should read 2 arrays, and for each item in array get a few fields. -Basically it means processing a full JSON file. - -https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_large_payload_test.go - -| Library | time/op | bytes/op | allocs/op | -| --- | --- | --- | --- | -| encoding/json struct | 748336 | 8272 | 307 | -| encoding/json interface{} | 1224271 | 215425 | 3395 | -| a8m/djson | 510082 | 213682 | 2845 | -| pquerna/ffjson | **312271** | **7792** | **298** | -| mailru/easyjson | **154186** | **6992** | **288** | -| buger/jsonparser | **85308** | **0** | **0** | - -`jsonparser` now is a winner, but do not forget that it is way more lightweight parser than `ffson` or `easyjson`, and they have to parser all the data, while `jsonparser` parse only what you need. All `ffjson`, `easysjon` and `jsonparser` have their own parsing code, and does not depend on `encoding/json` or `interface{}`, thats one of the reasons why they are so fast. `easyjson` also use a bit of `unsafe` package to reduce memory consuption (in theory it can lead to some unexpected GC issue, but i did not tested enough) - -Also last benchmark did not included `EachKey` test, because in this particular case we need to read lot of Array values, and using `ArrayEach` is more efficient. - -## Questions and support - -All bug-reports and suggestions should go though Github Issues. - -## Contributing - -1. Fork it -2. Create your feature branch (git checkout -b my-new-feature) -3. Commit your changes (git commit -am 'Added some feature') -4. Push to the branch (git push origin my-new-feature) -5. Create new Pull Request - -## Development - -All my development happens using Docker, and repo include some Make tasks to simplify development. - -* `make build` - builds docker image, usually can be called only once -* `make test` - run tests -* `make fmt` - run go fmt -* `make bench` - run benchmarks (if you need to run only single benchmark modify `BENCHMARK` variable in make file) -* `make profile` - runs benchmark and generate 3 files- `cpu.out`, `mem.mprof` and `benchmark.test` binary, which can be used for `go tool pprof` -* `make bash` - enter container (i use it for running `go tool pprof` above) diff --git a/vendor/github.com/buger/jsonparser/bytes.go b/vendor/github.com/buger/jsonparser/bytes.go deleted file mode 100644 index 0bb0ff39562..00000000000 --- a/vendor/github.com/buger/jsonparser/bytes.go +++ /dev/null @@ -1,47 +0,0 @@ -package jsonparser - -import ( - bio "bytes" -) - -// minInt64 '-9223372036854775808' is the smallest representable number in int64 -const minInt64 = `9223372036854775808` - -// About 2x faster then strconv.ParseInt because it only supports base 10, which is enough for JSON -func parseInt(bytes []byte) (v int64, ok bool, overflow bool) { - if len(bytes) == 0 { - return 0, false, false - } - - var neg bool = false - if bytes[0] == '-' { - neg = true - bytes = bytes[1:] - } - - var b int64 = 0 - for _, c := range bytes { - if c >= '0' && c <= '9' { - b = (10 * v) + int64(c-'0') - } else { - return 0, false, false - } - if overflow = (b < v); overflow { - break - } - v = b - } - - if overflow { - if neg && bio.Equal(bytes, []byte(minInt64)) { - return b, true, false - } - return 0, false, true - } - - if neg { - return -v, true, false - } else { - return v, true, false - } -} diff --git a/vendor/github.com/buger/jsonparser/bytes_safe.go b/vendor/github.com/buger/jsonparser/bytes_safe.go deleted file mode 100644 index ff16a4a1955..00000000000 --- a/vendor/github.com/buger/jsonparser/bytes_safe.go +++ /dev/null @@ -1,25 +0,0 @@ -// +build appengine appenginevm - -package jsonparser - -import ( - "strconv" -) - -// See fastbytes_unsafe.go for explanation on why *[]byte is used (signatures must be consistent with those in that file) - -func equalStr(b *[]byte, s string) bool { - return string(*b) == s -} - -func parseFloat(b *[]byte) (float64, error) { - return strconv.ParseFloat(string(*b), 64) -} - -func bytesToString(b *[]byte) string { - return string(*b) -} - -func StringToBytes(s string) []byte { - return []byte(s) -} diff --git a/vendor/github.com/buger/jsonparser/bytes_unsafe.go b/vendor/github.com/buger/jsonparser/bytes_unsafe.go deleted file mode 100644 index d3f523ddeb7..00000000000 --- a/vendor/github.com/buger/jsonparser/bytes_unsafe.go +++ /dev/null @@ -1,42 +0,0 @@ -// +build !appengine,!appenginevm - -package jsonparser - -import ( - "reflect" - "strconv" - "unsafe" -) - -// -// The reason for using *[]byte rather than []byte in parameters is an optimization. As of Go 1.6, -// the compiler cannot perfectly inline the function when using a non-pointer slice. That is, -// the non-pointer []byte parameter version is slower than if its function body is manually -// inlined, whereas the pointer []byte version is equally fast to the manually inlined -// version. Instruction count in assembly taken from "go tool compile" confirms this difference. -// -// TODO: Remove hack after Go 1.7 release -// -func equalStr(b *[]byte, s string) bool { - return *(*string)(unsafe.Pointer(b)) == s -} - -func parseFloat(b *[]byte) (float64, error) { - return strconv.ParseFloat(*(*string)(unsafe.Pointer(b)), 64) -} - -// A hack until issue golang/go#2632 is fixed. -// See: https://github.com/golang/go/issues/2632 -func bytesToString(b *[]byte) string { - return *(*string)(unsafe.Pointer(b)) -} - -func StringToBytes(s string) []byte { - sh := (*reflect.StringHeader)(unsafe.Pointer(&s)) - bh := reflect.SliceHeader{ - Data: sh.Data, - Len: sh.Len, - Cap: sh.Len, - } - return *(*[]byte)(unsafe.Pointer(&bh)) -} diff --git a/vendor/github.com/buger/jsonparser/escape.go b/vendor/github.com/buger/jsonparser/escape.go deleted file mode 100644 index 49669b94207..00000000000 --- a/vendor/github.com/buger/jsonparser/escape.go +++ /dev/null @@ -1,173 +0,0 @@ -package jsonparser - -import ( - "bytes" - "unicode/utf8" -) - -// JSON Unicode stuff: see https://tools.ietf.org/html/rfc7159#section-7 - -const supplementalPlanesOffset = 0x10000 -const highSurrogateOffset = 0xD800 -const lowSurrogateOffset = 0xDC00 - -const basicMultilingualPlaneReservedOffset = 0xDFFF -const basicMultilingualPlaneOffset = 0xFFFF - -func combineUTF16Surrogates(high, low rune) rune { - return supplementalPlanesOffset + (high-highSurrogateOffset)<<10 + (low - lowSurrogateOffset) -} - -const badHex = -1 - -func h2I(c byte) int { - switch { - case c >= '0' && c <= '9': - return int(c - '0') - case c >= 'A' && c <= 'F': - return int(c - 'A' + 10) - case c >= 'a' && c <= 'f': - return int(c - 'a' + 10) - } - return badHex -} - -// decodeSingleUnicodeEscape decodes a single \uXXXX escape sequence. The prefix \u is assumed to be present and -// is not checked. -// In JSON, these escapes can either come alone or as part of "UTF16 surrogate pairs" that must be handled together. -// This function only handles one; decodeUnicodeEscape handles this more complex case. -func decodeSingleUnicodeEscape(in []byte) (rune, bool) { - // We need at least 6 characters total - if len(in) < 6 { - return utf8.RuneError, false - } - - // Convert hex to decimal - h1, h2, h3, h4 := h2I(in[2]), h2I(in[3]), h2I(in[4]), h2I(in[5]) - if h1 == badHex || h2 == badHex || h3 == badHex || h4 == badHex { - return utf8.RuneError, false - } - - // Compose the hex digits - return rune(h1<<12 + h2<<8 + h3<<4 + h4), true -} - -// isUTF16EncodedRune checks if a rune is in the range for non-BMP characters, -// which is used to describe UTF16 chars. -// Source: https://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane -func isUTF16EncodedRune(r rune) bool { - return highSurrogateOffset <= r && r <= basicMultilingualPlaneReservedOffset -} - -func decodeUnicodeEscape(in []byte) (rune, int) { - if r, ok := decodeSingleUnicodeEscape(in); !ok { - // Invalid Unicode escape - return utf8.RuneError, -1 - } else if r <= basicMultilingualPlaneOffset && !isUTF16EncodedRune(r) { - // Valid Unicode escape in Basic Multilingual Plane - return r, 6 - } else if r2, ok := decodeSingleUnicodeEscape(in[6:]); !ok { // Note: previous decodeSingleUnicodeEscape success guarantees at least 6 bytes remain - // UTF16 "high surrogate" without manditory valid following Unicode escape for the "low surrogate" - return utf8.RuneError, -1 - } else if r2 < lowSurrogateOffset { - // Invalid UTF16 "low surrogate" - return utf8.RuneError, -1 - } else { - // Valid UTF16 surrogate pair - return combineUTF16Surrogates(r, r2), 12 - } -} - -// backslashCharEscapeTable: when '\X' is found for some byte X, it is to be replaced with backslashCharEscapeTable[X] -var backslashCharEscapeTable = [...]byte{ - '"': '"', - '\\': '\\', - '/': '/', - 'b': '\b', - 'f': '\f', - 'n': '\n', - 'r': '\r', - 't': '\t', -} - -// unescapeToUTF8 unescapes the single escape sequence starting at 'in' into 'out' and returns -// how many characters were consumed from 'in' and emitted into 'out'. -// If a valid escape sequence does not appear as a prefix of 'in', (-1, -1) to signal the error. -func unescapeToUTF8(in, out []byte) (inLen int, outLen int) { - if len(in) < 2 || in[0] != '\\' { - // Invalid escape due to insufficient characters for any escape or no initial backslash - return -1, -1 - } - - // https://tools.ietf.org/html/rfc7159#section-7 - switch e := in[1]; e { - case '"', '\\', '/', 'b', 'f', 'n', 'r', 't': - // Valid basic 2-character escapes (use lookup table) - out[0] = backslashCharEscapeTable[e] - return 2, 1 - case 'u': - // Unicode escape - if r, inLen := decodeUnicodeEscape(in); inLen == -1 { - // Invalid Unicode escape - return -1, -1 - } else { - // Valid Unicode escape; re-encode as UTF8 - outLen := utf8.EncodeRune(out, r) - return inLen, outLen - } - } - - return -1, -1 -} - -// unescape unescapes the string contained in 'in' and returns it as a slice. -// If 'in' contains no escaped characters: -// Returns 'in'. -// Else, if 'out' is of sufficient capacity (guaranteed if cap(out) >= len(in)): -// 'out' is used to build the unescaped string and is returned with no extra allocation -// Else: -// A new slice is allocated and returned. -func Unescape(in, out []byte) ([]byte, error) { - firstBackslash := bytes.IndexByte(in, '\\') - if firstBackslash == -1 { - return in, nil - } - - // Get a buffer of sufficient size (allocate if needed) - if cap(out) < len(in) { - out = make([]byte, len(in)) - } else { - out = out[0:len(in)] - } - - // Copy the first sequence of unescaped bytes to the output and obtain a buffer pointer (subslice) - copy(out, in[:firstBackslash]) - in = in[firstBackslash:] - buf := out[firstBackslash:] - - for len(in) > 0 { - // Unescape the next escaped character - inLen, bufLen := unescapeToUTF8(in, buf) - if inLen == -1 { - return nil, MalformedStringEscapeError - } - - in = in[inLen:] - buf = buf[bufLen:] - - // Copy everything up until the next backslash - nextBackslash := bytes.IndexByte(in, '\\') - if nextBackslash == -1 { - copy(buf, in) - buf = buf[len(in):] - break - } else { - copy(buf, in[:nextBackslash]) - buf = buf[nextBackslash:] - in = in[nextBackslash:] - } - } - - // Trim the out buffer to the amount that was actually emitted - return out[:len(out)-len(buf)], nil -} diff --git a/vendor/github.com/buger/jsonparser/parser.go b/vendor/github.com/buger/jsonparser/parser.go deleted file mode 100644 index 5c25200906b..00000000000 --- a/vendor/github.com/buger/jsonparser/parser.go +++ /dev/null @@ -1,1211 +0,0 @@ -package jsonparser - -import ( - "bytes" - "errors" - "fmt" - "math" - "strconv" -) - -// Errors -var ( - KeyPathNotFoundError = errors.New("Key path not found") - UnknownValueTypeError = errors.New("Unknown value type") - MalformedJsonError = errors.New("Malformed JSON error") - MalformedStringError = errors.New("Value is string, but can't find closing '\"' symbol") - MalformedArrayError = errors.New("Value is array, but can't find closing ']' symbol") - MalformedObjectError = errors.New("Value looks like object, but can't find closing '}' symbol") - MalformedValueError = errors.New("Value looks like Number/Boolean/None, but can't find its end: ',' or '}' symbol") - OverflowIntegerError = errors.New("Value is number, but overflowed while parsing") - MalformedStringEscapeError = errors.New("Encountered an invalid escape sequence in a string") -) - -// How much stack space to allocate for unescaping JSON strings; if a string longer -// than this needs to be escaped, it will result in a heap allocation -const unescapeStackBufSize = 64 - -func tokenEnd(data []byte) int { - for i, c := range data { - switch c { - case ' ', '\n', '\r', '\t', ',', '}', ']': - return i - } - } - - return len(data) -} - -func findTokenStart(data []byte, token byte) int { - for i := len(data) - 1; i >= 0; i-- { - switch data[i] { - case token: - return i - case '[', '{': - return 0 - } - } - - return 0 -} - -func findKeyStart(data []byte, key string) (int, error) { - i := 0 - ln := len(data) - if ln > 0 && (data[0] == '{' || data[0] == '[') { - i = 1 - } - var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings - - if ku, err := Unescape(StringToBytes(key), stackbuf[:]); err == nil { - key = bytesToString(&ku) - } - - for i < ln { - switch data[i] { - case '"': - i++ - keyBegin := i - - strEnd, keyEscaped := stringEnd(data[i:]) - if strEnd == -1 { - break - } - i += strEnd - keyEnd := i - 1 - - valueOffset := nextToken(data[i:]) - if valueOffset == -1 { - break - } - - i += valueOffset - - // if string is a key, and key level match - k := data[keyBegin:keyEnd] - // for unescape: if there are no escape sequences, this is cheap; if there are, it is a - // bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize - if keyEscaped { - if ku, err := Unescape(k, stackbuf[:]); err != nil { - break - } else { - k = ku - } - } - - if data[i] == ':' && len(key) == len(k) && bytesToString(&k) == key { - return keyBegin - 1, nil - } - - case '[': - i = blockEnd(data[i:], data[i], ']') + i - case '{': - i = blockEnd(data[i:], data[i], '}') + i - } - i++ - } - - return -1, KeyPathNotFoundError -} - -func tokenStart(data []byte) int { - for i := len(data) - 1; i >= 0; i-- { - switch data[i] { - case '\n', '\r', '\t', ',', '{', '[': - return i - } - } - - return 0 -} - -// Find position of next character which is not whitespace -func nextToken(data []byte) int { - for i, c := range data { - switch c { - case ' ', '\n', '\r', '\t': - continue - default: - return i - } - } - - return -1 -} - -// Find position of last character which is not whitespace -func lastToken(data []byte) int { - for i := len(data) - 1; i >= 0; i-- { - switch data[i] { - case ' ', '\n', '\r', '\t': - continue - default: - return i - } - } - - return -1 -} - -// Tries to find the end of string -// Support if string contains escaped quote symbols. -func stringEnd(data []byte) (int, bool) { - escaped := false - for i, c := range data { - if c == '"' { - if !escaped { - return i + 1, false - } else { - j := i - 1 - for { - if j < 0 || data[j] != '\\' { - return i + 1, true // even number of backslashes - } - j-- - if j < 0 || data[j] != '\\' { - break // odd number of backslashes - } - j-- - - } - } - } else if c == '\\' { - escaped = true - } - } - - return -1, escaped -} - -// Find end of the data structure, array or object. -// For array openSym and closeSym will be '[' and ']', for object '{' and '}' -func blockEnd(data []byte, openSym byte, closeSym byte) int { - level := 0 - i := 0 - ln := len(data) - - for i < ln { - switch data[i] { - case '"': // If inside string, skip it - se, _ := stringEnd(data[i+1:]) - if se == -1 { - return -1 - } - i += se - case openSym: // If open symbol, increase level - level++ - case closeSym: // If close symbol, increase level - level-- - - // If we have returned to the original level, we're done - if level == 0 { - return i + 1 - } - } - i++ - } - - return -1 -} - -func searchKeys(data []byte, keys ...string) int { - keyLevel := 0 - level := 0 - i := 0 - ln := len(data) - lk := len(keys) - lastMatched := true - - if lk == 0 { - return 0 - } - - var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings - - for i < ln { - switch data[i] { - case '"': - i++ - keyBegin := i - - strEnd, keyEscaped := stringEnd(data[i:]) - if strEnd == -1 { - return -1 - } - i += strEnd - keyEnd := i - 1 - - valueOffset := nextToken(data[i:]) - if valueOffset == -1 { - return -1 - } - - i += valueOffset - - // if string is a key - if data[i] == ':' { - if level < 1 { - return -1 - } - - key := data[keyBegin:keyEnd] - - // for unescape: if there are no escape sequences, this is cheap; if there are, it is a - // bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize - var keyUnesc []byte - if !keyEscaped { - keyUnesc = key - } else if ku, err := Unescape(key, stackbuf[:]); err != nil { - return -1 - } else { - keyUnesc = ku - } - - if equalStr(&keyUnesc, keys[level-1]) { - lastMatched = true - - // if key level match - if keyLevel == level-1 { - keyLevel++ - // If we found all keys in path - if keyLevel == lk { - return i + 1 - } - } - } else { - lastMatched = false - } - } else { - i-- - } - case '{': - - // in case parent key is matched then only we will increase the level otherwise can directly - // can move to the end of this block - if !lastMatched { - end := blockEnd(data[i:], '{', '}') - i += end - 1 - } else{ - level++ - } - case '}': - level-- - if level == keyLevel { - keyLevel-- - } - case '[': - // If we want to get array element by index - if keyLevel == level && keys[level][0] == '[' { - aIdx, err := strconv.Atoi(keys[level][1 : len(keys[level])-1]) - if err != nil { - return -1 - } - var curIdx int - var valueFound []byte - var valueOffset int - var curI = i - ArrayEach(data[i:], func(value []byte, dataType ValueType, offset int, err error) { - if curIdx == aIdx { - valueFound = value - valueOffset = offset - if dataType == String { - valueOffset = valueOffset - 2 - valueFound = data[curI+valueOffset : curI+valueOffset+len(value)+2] - } - } - curIdx += 1 - }) - - if valueFound == nil { - return -1 - } else { - subIndex := searchKeys(valueFound, keys[level+1:]...) - if subIndex < 0 { - return -1 - } - return i + valueOffset + subIndex - } - } else { - // Do not search for keys inside arrays - if arraySkip := blockEnd(data[i:], '[', ']'); arraySkip == -1 { - return -1 - } else { - i += arraySkip - 1 - } - } - } - - i++ - } - - return -1 -} - -var bitwiseFlags []int64 - -func init() { - for i := 0; i < 63; i++ { - bitwiseFlags = append(bitwiseFlags, int64(math.Pow(2, float64(i)))) - } -} - -func sameTree(p1, p2 []string) bool { - minLen := len(p1) - if len(p2) < minLen { - minLen = len(p2) - } - - for pi_1, p_1 := range p1[:minLen] { - if p2[pi_1] != p_1 { - return false - } - } - - return true -} - -func EachKey(data []byte, cb func(int, []byte, ValueType, error), paths ...[]string) int { - var pathFlags int64 - var level, pathsMatched, i int - ln := len(data) - - var maxPath int - for _, p := range paths { - if len(p) > maxPath { - maxPath = len(p) - } - } - - var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings - pathsBuf := make([]string, maxPath) - - for i < ln { - switch data[i] { - case '"': - i++ - keyBegin := i - - strEnd, keyEscaped := stringEnd(data[i:]) - if strEnd == -1 { - return -1 - } - i += strEnd - - keyEnd := i - 1 - - valueOffset := nextToken(data[i:]) - if valueOffset == -1 { - return -1 - } - - i += valueOffset - - // if string is a key, and key level match - if data[i] == ':' { - match := -1 - key := data[keyBegin:keyEnd] - - // for unescape: if there are no escape sequences, this is cheap; if there are, it is a - // bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize - var keyUnesc []byte - if !keyEscaped { - keyUnesc = key - } else if ku, err := Unescape(key, stackbuf[:]); err != nil { - return -1 - } else { - keyUnesc = ku - } - - if maxPath >= level { - if level < 1 { - cb(-1, nil, Unknown, MalformedJsonError) - return -1 - } - - pathsBuf[level-1] = bytesToString(&keyUnesc) - for pi, p := range paths { - if len(p) != level || pathFlags&bitwiseFlags[pi+1] != 0 || !equalStr(&keyUnesc, p[level-1]) || !sameTree(p, pathsBuf[:level]) { - continue - } - - match = pi - - i++ - pathsMatched++ - pathFlags |= bitwiseFlags[pi+1] - - v, dt, of, e := Get(data[i:]) - cb(pi, v, dt, e) - - if of != -1 { - i += of - } - - if pathsMatched == len(paths) { - break - } - } - if pathsMatched == len(paths) { - return i - } - } - - if match == -1 { - tokenOffset := nextToken(data[i+1:]) - i += tokenOffset - - if data[i] == '{' { - blockSkip := blockEnd(data[i:], '{', '}') - i += blockSkip + 1 - } - } - - if i < ln { - switch data[i] { - case '{', '}', '[', '"': - i-- - } - } - } else { - i-- - } - case '{': - level++ - case '}': - level-- - case '[': - var arrIdxFlags int64 - var pIdxFlags int64 - - if level < 0 { - cb(-1, nil, Unknown, MalformedJsonError) - return -1 - } - - for pi, p := range paths { - if len(p) < level+1 || pathFlags&bitwiseFlags[pi+1] != 0 || p[level][0] != '[' || !sameTree(p, pathsBuf[:level]) { - continue - } - - aIdx, _ := strconv.Atoi(p[level][1 : len(p[level])-1]) - arrIdxFlags |= bitwiseFlags[aIdx+1] - pIdxFlags |= bitwiseFlags[pi+1] - } - - if arrIdxFlags > 0 { - level++ - - var curIdx int - arrOff, _ := ArrayEach(data[i:], func(value []byte, dataType ValueType, offset int, err error) { - if arrIdxFlags&bitwiseFlags[curIdx+1] != 0 { - for pi, p := range paths { - if pIdxFlags&bitwiseFlags[pi+1] != 0 { - aIdx, _ := strconv.Atoi(p[level-1][1 : len(p[level-1])-1]) - - if curIdx == aIdx { - of := searchKeys(value, p[level:]...) - - pathsMatched++ - pathFlags |= bitwiseFlags[pi+1] - - if of != -1 { - v, dt, _, e := Get(value[of:]) - cb(pi, v, dt, e) - } - } - } - } - } - - curIdx += 1 - }) - - if pathsMatched == len(paths) { - return i - } - - i += arrOff - 1 - } else { - // Do not search for keys inside arrays - if arraySkip := blockEnd(data[i:], '[', ']'); arraySkip == -1 { - return -1 - } else { - i += arraySkip - 1 - } - } - case ']': - level-- - } - - i++ - } - - return -1 -} - -// Data types available in valid JSON data. -type ValueType int - -const ( - NotExist = ValueType(iota) - String - Number - Object - Array - Boolean - Null - Unknown -) - -func (vt ValueType) String() string { - switch vt { - case NotExist: - return "non-existent" - case String: - return "string" - case Number: - return "number" - case Object: - return "object" - case Array: - return "array" - case Boolean: - return "boolean" - case Null: - return "null" - default: - return "unknown" - } -} - -var ( - trueLiteral = []byte("true") - falseLiteral = []byte("false") - nullLiteral = []byte("null") -) - -func createInsertComponent(keys []string, setValue []byte, comma, object bool) []byte { - var buffer bytes.Buffer - isIndex := string(keys[0][0]) == "[" - if comma { - buffer.WriteString(",") - } - if isIndex { - buffer.WriteString("[") - } else { - if object { - buffer.WriteString("{") - } - buffer.WriteString("\"") - buffer.WriteString(keys[0]) - buffer.WriteString("\":") - } - - for i := 1; i < len(keys); i++ { - if string(keys[i][0]) == "[" { - buffer.WriteString("[") - } else { - buffer.WriteString("{\"") - buffer.WriteString(keys[i]) - buffer.WriteString("\":") - } - } - buffer.Write(setValue) - for i := len(keys) - 1; i > 0; i-- { - if string(keys[i][0]) == "[" { - buffer.WriteString("]") - } else { - buffer.WriteString("}") - } - } - if isIndex { - buffer.WriteString("]") - } - if object && !isIndex { - buffer.WriteString("}") - } - return buffer.Bytes() -} - -/* - -Del - Receives existing data structure, path to delete. - -Returns: -`data` - return modified data - -*/ -func Delete(data []byte, keys ...string) []byte { - lk := len(keys) - if lk == 0 { - return data[:0] - } - - array := false - if len(keys[lk-1]) > 0 && string(keys[lk-1][0]) == "[" { - array = true - } - - var startOffset, keyOffset int - endOffset := len(data) - var err error - if !array { - if len(keys) > 1 { - _, _, startOffset, endOffset, err = internalGet(data, keys[:lk-1]...) - if err == KeyPathNotFoundError { - // problem parsing the data - return data - } - } - - keyOffset, err = findKeyStart(data[startOffset:endOffset], keys[lk-1]) - if err == KeyPathNotFoundError { - // problem parsing the data - return data - } - keyOffset += startOffset - _, _, _, subEndOffset, _ := internalGet(data[startOffset:endOffset], keys[lk-1]) - endOffset = startOffset + subEndOffset - tokEnd := tokenEnd(data[endOffset:]) - tokStart := findTokenStart(data[:keyOffset], ","[0]) - - if data[endOffset+tokEnd] == ","[0] { - endOffset += tokEnd + 1 - } else if data[endOffset+tokEnd] == " "[0] && len(data) > endOffset+tokEnd+1 && data[endOffset+tokEnd+1] == ","[0] { - endOffset += tokEnd + 2 - } else if data[endOffset+tokEnd] == "}"[0] && data[tokStart] == ","[0] { - keyOffset = tokStart - } - } else { - _, _, keyOffset, endOffset, err = internalGet(data, keys...) - if err == KeyPathNotFoundError { - // problem parsing the data - return data - } - - tokEnd := tokenEnd(data[endOffset:]) - tokStart := findTokenStart(data[:keyOffset], ","[0]) - - if data[endOffset+tokEnd] == ","[0] { - endOffset += tokEnd + 1 - } else if data[endOffset+tokEnd] == "]"[0] && data[tokStart] == ","[0] { - keyOffset = tokStart - } - } - - // We need to remove remaining trailing comma if we delete las element in the object - prevTok := lastToken(data[:keyOffset]) - remainedValue := data[endOffset:] - - var newOffset int - if nextToken(remainedValue) > -1 && remainedValue[nextToken(remainedValue)] == '}' && data[prevTok] == ',' { - newOffset = prevTok - } else { - newOffset = prevTok + 1 - } - - data = append(data[:newOffset], data[endOffset:]...) - return data -} - -/* - -Set - Receives existing data structure, path to set, and data to set at that key. - -Returns: -`value` - modified byte array -`err` - On any parsing error - -*/ -func Set(data []byte, setValue []byte, keys ...string) (value []byte, err error) { - // ensure keys are set - if len(keys) == 0 { - return nil, KeyPathNotFoundError - } - - _, _, startOffset, endOffset, err := internalGet(data, keys...) - if err != nil { - if err != KeyPathNotFoundError { - // problem parsing the data - return nil, err - } - // full path doesnt exist - // does any subpath exist? - var depth int - for i := range keys { - _, _, start, end, sErr := internalGet(data, keys[:i+1]...) - if sErr != nil { - break - } else { - endOffset = end - startOffset = start - depth++ - } - } - comma := true - object := false - if endOffset == -1 { - firstToken := nextToken(data) - // We can't set a top-level key if data isn't an object - if len(data) == 0 || data[firstToken] != '{' { - return nil, KeyPathNotFoundError - } - // Don't need a comma if the input is an empty object - secondToken := firstToken + 1 + nextToken(data[firstToken+1:]) - if data[secondToken] == '}' { - comma = false - } - // Set the top level key at the end (accounting for any trailing whitespace) - // This assumes last token is valid like '}', could check and return error - endOffset = lastToken(data) - } - depthOffset := endOffset - if depth != 0 { - // if subpath is a non-empty object, add to it - if data[startOffset] == '{' && data[startOffset+1+nextToken(data[startOffset+1:])] != '}' { - depthOffset-- - startOffset = depthOffset - // otherwise, over-write it with a new object - } else { - comma = false - object = true - } - } else { - startOffset = depthOffset - } - value = append(data[:startOffset], append(createInsertComponent(keys[depth:], setValue, comma, object), data[depthOffset:]...)...) - } else { - // path currently exists - startComponent := data[:startOffset] - endComponent := data[endOffset:] - - value = make([]byte, len(startComponent)+len(endComponent)+len(setValue)) - newEndOffset := startOffset + len(setValue) - copy(value[0:startOffset], startComponent) - copy(value[startOffset:newEndOffset], setValue) - copy(value[newEndOffset:], endComponent) - } - return value, nil -} - -func getType(data []byte, offset int) ([]byte, ValueType, int, error) { - var dataType ValueType - endOffset := offset - - // if string value - if data[offset] == '"' { - dataType = String - if idx, _ := stringEnd(data[offset+1:]); idx != -1 { - endOffset += idx + 1 - } else { - return nil, dataType, offset, MalformedStringError - } - } else if data[offset] == '[' { // if array value - dataType = Array - // break label, for stopping nested loops - endOffset = blockEnd(data[offset:], '[', ']') - - if endOffset == -1 { - return nil, dataType, offset, MalformedArrayError - } - - endOffset += offset - } else if data[offset] == '{' { // if object value - dataType = Object - // break label, for stopping nested loops - endOffset = blockEnd(data[offset:], '{', '}') - - if endOffset == -1 { - return nil, dataType, offset, MalformedObjectError - } - - endOffset += offset - } else { - // Number, Boolean or None - end := tokenEnd(data[endOffset:]) - - if end == -1 { - return nil, dataType, offset, MalformedValueError - } - - value := data[offset : endOffset+end] - - switch data[offset] { - case 't', 'f': // true or false - if bytes.Equal(value, trueLiteral) || bytes.Equal(value, falseLiteral) { - dataType = Boolean - } else { - return nil, Unknown, offset, UnknownValueTypeError - } - case 'u', 'n': // undefined or null - if bytes.Equal(value, nullLiteral) { - dataType = Null - } else { - return nil, Unknown, offset, UnknownValueTypeError - } - case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-': - dataType = Number - default: - return nil, Unknown, offset, UnknownValueTypeError - } - - endOffset += end - } - return data[offset:endOffset], dataType, endOffset, nil -} - -/* -Get - Receives data structure, and key path to extract value from. - -Returns: -`value` - Pointer to original data structure containing key value, or just empty slice if nothing found or error -`dataType` - Can be: `NotExist`, `String`, `Number`, `Object`, `Array`, `Boolean` or `Null` -`offset` - Offset from provided data structure where key value ends. Used mostly internally, for example for `ArrayEach` helper. -`err` - If key not found or any other parsing issue it should return error. If key not found it also sets `dataType` to `NotExist` - -Accept multiple keys to specify path to JSON value (in case of quering nested structures). -If no keys provided it will try to extract closest JSON value (simple ones or object/array), useful for reading streams or arrays, see `ArrayEach` implementation. -*/ -func Get(data []byte, keys ...string) (value []byte, dataType ValueType, offset int, err error) { - a, b, _, d, e := internalGet(data, keys...) - return a, b, d, e -} - -func internalGet(data []byte, keys ...string) (value []byte, dataType ValueType, offset, endOffset int, err error) { - if len(keys) > 0 { - if offset = searchKeys(data, keys...); offset == -1 { - return nil, NotExist, -1, -1, KeyPathNotFoundError - } - } - - // Go to closest value - nO := nextToken(data[offset:]) - if nO == -1 { - return nil, NotExist, offset, -1, MalformedJsonError - } - - offset += nO - value, dataType, endOffset, err = getType(data, offset) - if err != nil { - return value, dataType, offset, endOffset, err - } - - // Strip quotes from string values - if dataType == String { - value = value[1 : len(value)-1] - } - - return value, dataType, offset, endOffset, nil -} - -// ArrayEach is used when iterating arrays, accepts a callback function with the same return arguments as `Get`. -func ArrayEach(data []byte, cb func(value []byte, dataType ValueType, offset int, err error), keys ...string) (offset int, err error) { - if len(data) == 0 { - return -1, MalformedObjectError - } - - offset = 1 - - if len(keys) > 0 { - if offset = searchKeys(data, keys...); offset == -1 { - return offset, KeyPathNotFoundError - } - - // Go to closest value - nO := nextToken(data[offset:]) - if nO == -1 { - return offset, MalformedJsonError - } - - offset += nO - - if data[offset] != '[' { - return offset, MalformedArrayError - } - - offset++ - } - - nO := nextToken(data[offset:]) - if nO == -1 { - return offset, MalformedJsonError - } - - offset += nO - - if data[offset] == ']' { - return offset, nil - } - - for true { - v, t, o, e := Get(data[offset:]) - - if e != nil { - return offset, e - } - - if o == 0 { - break - } - - if t != NotExist { - cb(v, t, offset+o-len(v), e) - } - - if e != nil { - break - } - - offset += o - - skipToToken := nextToken(data[offset:]) - if skipToToken == -1 { - return offset, MalformedArrayError - } - offset += skipToToken - - if data[offset] == ']' { - break - } - - if data[offset] != ',' { - return offset, MalformedArrayError - } - - offset++ - } - - return offset, nil -} - -// ObjectEach iterates over the key-value pairs of a JSON object, invoking a given callback for each such entry -func ObjectEach(data []byte, callback func(key []byte, value []byte, dataType ValueType, offset int) error, keys ...string) (err error) { - var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings - offset := 0 - - // Descend to the desired key, if requested - if len(keys) > 0 { - if off := searchKeys(data, keys...); off == -1 { - return KeyPathNotFoundError - } else { - offset = off - } - } - - // Validate and skip past opening brace - if off := nextToken(data[offset:]); off == -1 { - return MalformedObjectError - } else if offset += off; data[offset] != '{' { - return MalformedObjectError - } else { - offset++ - } - - // Skip to the first token inside the object, or stop if we find the ending brace - if off := nextToken(data[offset:]); off == -1 { - return MalformedJsonError - } else if offset += off; data[offset] == '}' { - return nil - } - - // Loop pre-condition: data[offset] points to what should be either the next entry's key, or the closing brace (if it's anything else, the JSON is malformed) - for offset < len(data) { - // Step 1: find the next key - var key []byte - - // Check what the the next token is: start of string, end of object, or something else (error) - switch data[offset] { - case '"': - offset++ // accept as string and skip opening quote - case '}': - return nil // we found the end of the object; stop and return success - default: - return MalformedObjectError - } - - // Find the end of the key string - var keyEscaped bool - if off, esc := stringEnd(data[offset:]); off == -1 { - return MalformedJsonError - } else { - key, keyEscaped = data[offset:offset+off-1], esc - offset += off - } - - // Unescape the string if needed - if keyEscaped { - if keyUnescaped, err := Unescape(key, stackbuf[:]); err != nil { - return MalformedStringEscapeError - } else { - key = keyUnescaped - } - } - - // Step 2: skip the colon - if off := nextToken(data[offset:]); off == -1 { - return MalformedJsonError - } else if offset += off; data[offset] != ':' { - return MalformedJsonError - } else { - offset++ - } - - // Step 3: find the associated value, then invoke the callback - if value, valueType, off, err := Get(data[offset:]); err != nil { - return err - } else if err := callback(key, value, valueType, offset+off); err != nil { // Invoke the callback here! - return err - } else { - offset += off - } - - // Step 4: skip over the next comma to the following token, or stop if we hit the ending brace - if off := nextToken(data[offset:]); off == -1 { - return MalformedArrayError - } else { - offset += off - switch data[offset] { - case '}': - return nil // Stop if we hit the close brace - case ',': - offset++ // Ignore the comma - default: - return MalformedObjectError - } - } - - // Skip to the next token after the comma - if off := nextToken(data[offset:]); off == -1 { - return MalformedArrayError - } else { - offset += off - } - } - - return MalformedObjectError // we shouldn't get here; it's expected that we will return via finding the ending brace -} - -// GetUnsafeString returns the value retrieved by `Get`, use creates string without memory allocation by mapping string to slice memory. It does not handle escape symbols. -func GetUnsafeString(data []byte, keys ...string) (val string, err error) { - v, _, _, e := Get(data, keys...) - - if e != nil { - return "", e - } - - return bytesToString(&v), nil -} - -// GetString returns the value retrieved by `Get`, cast to a string if possible, trying to properly handle escape and utf8 symbols -// If key data type do not match, it will return an error. -func GetString(data []byte, keys ...string) (val string, err error) { - v, t, _, e := Get(data, keys...) - - if e != nil { - return "", e - } - - if t != String { - return "", fmt.Errorf("Value is not a string: %s", string(v)) - } - - // If no escapes return raw conten - if bytes.IndexByte(v, '\\') == -1 { - return string(v), nil - } - - return ParseString(v) -} - -// GetFloat returns the value retrieved by `Get`, cast to a float64 if possible. -// The offset is the same as in `Get`. -// If key data type do not match, it will return an error. -func GetFloat(data []byte, keys ...string) (val float64, err error) { - v, t, _, e := Get(data, keys...) - - if e != nil { - return 0, e - } - - if t != Number { - return 0, fmt.Errorf("Value is not a number: %s", string(v)) - } - - return ParseFloat(v) -} - -// GetInt returns the value retrieved by `Get`, cast to a int64 if possible. -// If key data type do not match, it will return an error. -func GetInt(data []byte, keys ...string) (val int64, err error) { - v, t, _, e := Get(data, keys...) - - if e != nil { - return 0, e - } - - if t != Number { - return 0, fmt.Errorf("Value is not a number: %s", string(v)) - } - - return ParseInt(v) -} - -// GetBoolean returns the value retrieved by `Get`, cast to a bool if possible. -// The offset is the same as in `Get`. -// If key data type do not match, it will return error. -func GetBoolean(data []byte, keys ...string) (val bool, err error) { - v, t, _, e := Get(data, keys...) - - if e != nil { - return false, e - } - - if t != Boolean { - return false, fmt.Errorf("Value is not a boolean: %s", string(v)) - } - - return ParseBoolean(v) -} - -// ParseBoolean parses a Boolean ValueType into a Go bool (not particularly useful, but here for completeness) -func ParseBoolean(b []byte) (bool, error) { - switch { - case bytes.Equal(b, trueLiteral): - return true, nil - case bytes.Equal(b, falseLiteral): - return false, nil - default: - return false, MalformedValueError - } -} - -// ParseString parses a String ValueType into a Go string (the main parsing work is unescaping the JSON string) -func ParseString(b []byte) (string, error) { - var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings - if bU, err := Unescape(b, stackbuf[:]); err != nil { - return "", MalformedValueError - } else { - return string(bU), nil - } -} - -// ParseNumber parses a Number ValueType into a Go float64 -func ParseFloat(b []byte) (float64, error) { - if v, err := parseFloat(&b); err != nil { - return 0, MalformedValueError - } else { - return v, nil - } -} - -// ParseInt parses a Number ValueType into a Go int64 -func ParseInt(b []byte) (int64, error) { - if v, ok, overflow := parseInt(b); !ok { - if overflow { - return 0, OverflowIntegerError - } - return 0, MalformedValueError - } else { - return v, nil - } -} diff --git a/vendor/github.com/cenk/backoff/.gitignore b/vendor/github.com/cenk/backoff/.gitignore deleted file mode 100644 index 00268614f04..00000000000 --- a/vendor/github.com/cenk/backoff/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe diff --git a/vendor/github.com/cenk/backoff/.travis.yml b/vendor/github.com/cenk/backoff/.travis.yml deleted file mode 100644 index 47a6a46ec2a..00000000000 --- a/vendor/github.com/cenk/backoff/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -language: go -go: - - 1.7 - - 1.x - - tip -before_install: - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover -script: - - $HOME/gopath/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/cenk/backoff/LICENSE b/vendor/github.com/cenk/backoff/LICENSE deleted file mode 100644 index 89b81799655..00000000000 --- a/vendor/github.com/cenk/backoff/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Cenk Altı - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/cenk/backoff/README.md b/vendor/github.com/cenk/backoff/README.md deleted file mode 100644 index 55ebc98fc25..00000000000 --- a/vendor/github.com/cenk/backoff/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# Exponential Backoff [![GoDoc][godoc image]][godoc] [![Build Status][travis image]][travis] [![Coverage Status][coveralls image]][coveralls] - -This is a Go port of the exponential backoff algorithm from [Google's HTTP Client Library for Java][google-http-java-client]. - -[Exponential backoff][exponential backoff wiki] -is an algorithm that uses feedback to multiplicatively decrease the rate of some process, -in order to gradually find an acceptable rate. -The retries exponentially increase and stop increasing when a certain threshold is met. - -## Usage - -See https://godoc.org/github.com/cenkalti/backoff#pkg-examples - -## Contributing - -* I would like to keep this library as small as possible. -* Please don't send a PR without opening an issue and discussing it first. -* If proposed change is not a common use case, I will probably not accept it. - -[godoc]: https://godoc.org/github.com/cenkalti/backoff -[godoc image]: https://godoc.org/github.com/cenkalti/backoff?status.png -[travis]: https://travis-ci.org/cenkalti/backoff -[travis image]: https://travis-ci.org/cenkalti/backoff.png?branch=master -[coveralls]: https://coveralls.io/github/cenkalti/backoff?branch=master -[coveralls image]: https://coveralls.io/repos/github/cenkalti/backoff/badge.svg?branch=master - -[google-http-java-client]: https://github.com/google/google-http-java-client/blob/da1aa993e90285ec18579f1553339b00e19b3ab5/google-http-client/src/main/java/com/google/api/client/util/ExponentialBackOff.java -[exponential backoff wiki]: http://en.wikipedia.org/wiki/Exponential_backoff - -[advanced example]: https://godoc.org/github.com/cenkalti/backoff#example_ diff --git a/vendor/github.com/cenk/backoff/backoff.go b/vendor/github.com/cenk/backoff/backoff.go deleted file mode 100644 index 3676ee405d8..00000000000 --- a/vendor/github.com/cenk/backoff/backoff.go +++ /dev/null @@ -1,66 +0,0 @@ -// Package backoff implements backoff algorithms for retrying operations. -// -// Use Retry function for retrying operations that may fail. -// If Retry does not meet your needs, -// copy/paste the function into your project and modify as you wish. -// -// There is also Ticker type similar to time.Ticker. -// You can use it if you need to work with channels. -// -// See Examples section below for usage examples. -package backoff - -import "time" - -// BackOff is a backoff policy for retrying an operation. -type BackOff interface { - // NextBackOff returns the duration to wait before retrying the operation, - // or backoff. Stop to indicate that no more retries should be made. - // - // Example usage: - // - // duration := backoff.NextBackOff(); - // if (duration == backoff.Stop) { - // // Do not retry operation. - // } else { - // // Sleep for duration and retry operation. - // } - // - NextBackOff() time.Duration - - // Reset to initial state. - Reset() -} - -// Stop indicates that no more retries should be made for use in NextBackOff(). -const Stop time.Duration = -1 - -// ZeroBackOff is a fixed backoff policy whose backoff time is always zero, -// meaning that the operation is retried immediately without waiting, indefinitely. -type ZeroBackOff struct{} - -func (b *ZeroBackOff) Reset() {} - -func (b *ZeroBackOff) NextBackOff() time.Duration { return 0 } - -// StopBackOff is a fixed backoff policy that always returns backoff.Stop for -// NextBackOff(), meaning that the operation should never be retried. -type StopBackOff struct{} - -func (b *StopBackOff) Reset() {} - -func (b *StopBackOff) NextBackOff() time.Duration { return Stop } - -// ConstantBackOff is a backoff policy that always returns the same backoff delay. -// This is in contrast to an exponential backoff policy, -// which returns a delay that grows longer as you call NextBackOff() over and over again. -type ConstantBackOff struct { - Interval time.Duration -} - -func (b *ConstantBackOff) Reset() {} -func (b *ConstantBackOff) NextBackOff() time.Duration { return b.Interval } - -func NewConstantBackOff(d time.Duration) *ConstantBackOff { - return &ConstantBackOff{Interval: d} -} diff --git a/vendor/github.com/cenk/backoff/context.go b/vendor/github.com/cenk/backoff/context.go deleted file mode 100644 index 7706faa2b60..00000000000 --- a/vendor/github.com/cenk/backoff/context.go +++ /dev/null @@ -1,63 +0,0 @@ -package backoff - -import ( - "context" - "time" -) - -// BackOffContext is a backoff policy that stops retrying after the context -// is canceled. -type BackOffContext interface { - BackOff - Context() context.Context -} - -type backOffContext struct { - BackOff - ctx context.Context -} - -// WithContext returns a BackOffContext with context ctx -// -// ctx must not be nil -func WithContext(b BackOff, ctx context.Context) BackOffContext { - if ctx == nil { - panic("nil context") - } - - if b, ok := b.(*backOffContext); ok { - return &backOffContext{ - BackOff: b.BackOff, - ctx: ctx, - } - } - - return &backOffContext{ - BackOff: b, - ctx: ctx, - } -} - -func ensureContext(b BackOff) BackOffContext { - if cb, ok := b.(BackOffContext); ok { - return cb - } - return WithContext(b, context.Background()) -} - -func (b *backOffContext) Context() context.Context { - return b.ctx -} - -func (b *backOffContext) NextBackOff() time.Duration { - select { - case <-b.ctx.Done(): - return Stop - default: - } - next := b.BackOff.NextBackOff() - if deadline, ok := b.ctx.Deadline(); ok && deadline.Sub(time.Now()) < next { - return Stop - } - return next -} diff --git a/vendor/github.com/cenk/backoff/exponential.go b/vendor/github.com/cenk/backoff/exponential.go deleted file mode 100644 index a031a659799..00000000000 --- a/vendor/github.com/cenk/backoff/exponential.go +++ /dev/null @@ -1,153 +0,0 @@ -package backoff - -import ( - "math/rand" - "time" -) - -/* -ExponentialBackOff is a backoff implementation that increases the backoff -period for each retry attempt using a randomization function that grows exponentially. - -NextBackOff() is calculated using the following formula: - - randomized interval = - RetryInterval * (random value in range [1 - RandomizationFactor, 1 + RandomizationFactor]) - -In other words NextBackOff() will range between the randomization factor -percentage below and above the retry interval. - -For example, given the following parameters: - - RetryInterval = 2 - RandomizationFactor = 0.5 - Multiplier = 2 - -the actual backoff period used in the next retry attempt will range between 1 and 3 seconds, -multiplied by the exponential, that is, between 2 and 6 seconds. - -Note: MaxInterval caps the RetryInterval and not the randomized interval. - -If the time elapsed since an ExponentialBackOff instance is created goes past the -MaxElapsedTime, then the method NextBackOff() starts returning backoff.Stop. - -The elapsed time can be reset by calling Reset(). - -Example: Given the following default arguments, for 10 tries the sequence will be, -and assuming we go over the MaxElapsedTime on the 10th try: - - Request # RetryInterval (seconds) Randomized Interval (seconds) - - 1 0.5 [0.25, 0.75] - 2 0.75 [0.375, 1.125] - 3 1.125 [0.562, 1.687] - 4 1.687 [0.8435, 2.53] - 5 2.53 [1.265, 3.795] - 6 3.795 [1.897, 5.692] - 7 5.692 [2.846, 8.538] - 8 8.538 [4.269, 12.807] - 9 12.807 [6.403, 19.210] - 10 19.210 backoff.Stop - -Note: Implementation is not thread-safe. -*/ -type ExponentialBackOff struct { - InitialInterval time.Duration - RandomizationFactor float64 - Multiplier float64 - MaxInterval time.Duration - // After MaxElapsedTime the ExponentialBackOff stops. - // It never stops if MaxElapsedTime == 0. - MaxElapsedTime time.Duration - Clock Clock - - currentInterval time.Duration - startTime time.Time -} - -// Clock is an interface that returns current time for BackOff. -type Clock interface { - Now() time.Time -} - -// Default values for ExponentialBackOff. -const ( - DefaultInitialInterval = 500 * time.Millisecond - DefaultRandomizationFactor = 0.5 - DefaultMultiplier = 1.5 - DefaultMaxInterval = 60 * time.Second - DefaultMaxElapsedTime = 15 * time.Minute -) - -// NewExponentialBackOff creates an instance of ExponentialBackOff using default values. -func NewExponentialBackOff() *ExponentialBackOff { - b := &ExponentialBackOff{ - InitialInterval: DefaultInitialInterval, - RandomizationFactor: DefaultRandomizationFactor, - Multiplier: DefaultMultiplier, - MaxInterval: DefaultMaxInterval, - MaxElapsedTime: DefaultMaxElapsedTime, - Clock: SystemClock, - } - b.Reset() - return b -} - -type systemClock struct{} - -func (t systemClock) Now() time.Time { - return time.Now() -} - -// SystemClock implements Clock interface that uses time.Now(). -var SystemClock = systemClock{} - -// Reset the interval back to the initial retry interval and restarts the timer. -func (b *ExponentialBackOff) Reset() { - b.currentInterval = b.InitialInterval - b.startTime = b.Clock.Now() -} - -// NextBackOff calculates the next backoff interval using the formula: -// Randomized interval = RetryInterval +/- (RandomizationFactor * RetryInterval) -func (b *ExponentialBackOff) NextBackOff() time.Duration { - // Make sure we have not gone over the maximum elapsed time. - if b.MaxElapsedTime != 0 && b.GetElapsedTime() > b.MaxElapsedTime { - return Stop - } - defer b.incrementCurrentInterval() - return getRandomValueFromInterval(b.RandomizationFactor, rand.Float64(), b.currentInterval) -} - -// GetElapsedTime returns the elapsed time since an ExponentialBackOff instance -// is created and is reset when Reset() is called. -// -// The elapsed time is computed using time.Now().UnixNano(). It is -// safe to call even while the backoff policy is used by a running -// ticker. -func (b *ExponentialBackOff) GetElapsedTime() time.Duration { - return b.Clock.Now().Sub(b.startTime) -} - -// Increments the current interval by multiplying it with the multiplier. -func (b *ExponentialBackOff) incrementCurrentInterval() { - // Check for overflow, if overflow is detected set the current interval to the max interval. - if float64(b.currentInterval) >= float64(b.MaxInterval)/b.Multiplier { - b.currentInterval = b.MaxInterval - } else { - b.currentInterval = time.Duration(float64(b.currentInterval) * b.Multiplier) - } -} - -// Returns a random value from the following interval: -// [randomizationFactor * currentInterval, randomizationFactor * currentInterval]. -func getRandomValueFromInterval(randomizationFactor, random float64, currentInterval time.Duration) time.Duration { - var delta = randomizationFactor * float64(currentInterval) - var minInterval = float64(currentInterval) - delta - var maxInterval = float64(currentInterval) + delta - - // Get a random value from the range [minInterval, maxInterval]. - // The formula used below has a +1 because if the minInterval is 1 and the maxInterval is 3 then - // we want a 33% chance for selecting either 1, 2 or 3. - return time.Duration(minInterval + (random * (maxInterval - minInterval + 1))) -} diff --git a/vendor/github.com/cenk/backoff/retry.go b/vendor/github.com/cenk/backoff/retry.go deleted file mode 100644 index e936a506f84..00000000000 --- a/vendor/github.com/cenk/backoff/retry.go +++ /dev/null @@ -1,82 +0,0 @@ -package backoff - -import "time" - -// An Operation is executing by Retry() or RetryNotify(). -// The operation will be retried using a backoff policy if it returns an error. -type Operation func() error - -// Notify is a notify-on-error function. It receives an operation error and -// backoff delay if the operation failed (with an error). -// -// NOTE that if the backoff policy stated to stop retrying, -// the notify function isn't called. -type Notify func(error, time.Duration) - -// Retry the operation o until it does not return error or BackOff stops. -// o is guaranteed to be run at least once. -// -// If o returns a *PermanentError, the operation is not retried, and the -// wrapped error is returned. -// -// Retry sleeps the goroutine for the duration returned by BackOff after a -// failed operation returns. -func Retry(o Operation, b BackOff) error { return RetryNotify(o, b, nil) } - -// RetryNotify calls notify function with the error and wait duration -// for each failed attempt before sleep. -func RetryNotify(operation Operation, b BackOff, notify Notify) error { - var err error - var next time.Duration - var t *time.Timer - - cb := ensureContext(b) - - b.Reset() - for { - if err = operation(); err == nil { - return nil - } - - if permanent, ok := err.(*PermanentError); ok { - return permanent.Err - } - - if next = cb.NextBackOff(); next == Stop { - return err - } - - if notify != nil { - notify(err, next) - } - - if t == nil { - t = time.NewTimer(next) - defer t.Stop() - } else { - t.Reset(next) - } - - select { - case <-cb.Context().Done(): - return err - case <-t.C: - } - } -} - -// PermanentError signals that the operation should not be retried. -type PermanentError struct { - Err error -} - -func (e *PermanentError) Error() string { - return e.Err.Error() -} - -// Permanent wraps the given err in a *PermanentError. -func Permanent(err error) *PermanentError { - return &PermanentError{ - Err: err, - } -} diff --git a/vendor/github.com/cenk/backoff/ticker.go b/vendor/github.com/cenk/backoff/ticker.go deleted file mode 100644 index e41084b0eff..00000000000 --- a/vendor/github.com/cenk/backoff/ticker.go +++ /dev/null @@ -1,82 +0,0 @@ -package backoff - -import ( - "sync" - "time" -) - -// Ticker holds a channel that delivers `ticks' of a clock at times reported by a BackOff. -// -// Ticks will continue to arrive when the previous operation is still running, -// so operations that take a while to fail could run in quick succession. -type Ticker struct { - C <-chan time.Time - c chan time.Time - b BackOffContext - stop chan struct{} - stopOnce sync.Once -} - -// NewTicker returns a new Ticker containing a channel that will send -// the time at times specified by the BackOff argument. Ticker is -// guaranteed to tick at least once. The channel is closed when Stop -// method is called or BackOff stops. It is not safe to manipulate the -// provided backoff policy (notably calling NextBackOff or Reset) -// while the ticker is running. -func NewTicker(b BackOff) *Ticker { - c := make(chan time.Time) - t := &Ticker{ - C: c, - c: c, - b: ensureContext(b), - stop: make(chan struct{}), - } - t.b.Reset() - go t.run() - return t -} - -// Stop turns off a ticker. After Stop, no more ticks will be sent. -func (t *Ticker) Stop() { - t.stopOnce.Do(func() { close(t.stop) }) -} - -func (t *Ticker) run() { - c := t.c - defer close(c) - - // Ticker is guaranteed to tick at least once. - afterC := t.send(time.Now()) - - for { - if afterC == nil { - return - } - - select { - case tick := <-afterC: - afterC = t.send(tick) - case <-t.stop: - t.c = nil // Prevent future ticks from being sent to the channel. - return - case <-t.b.Context().Done(): - return - } - } -} - -func (t *Ticker) send(tick time.Time) <-chan time.Time { - select { - case t.c <- tick: - case <-t.stop: - return nil - } - - next := t.b.NextBackOff() - if next == Stop { - t.Stop() - return nil - } - - return time.After(next) -} diff --git a/vendor/github.com/cenk/backoff/tries.go b/vendor/github.com/cenk/backoff/tries.go deleted file mode 100644 index cfeefd9b764..00000000000 --- a/vendor/github.com/cenk/backoff/tries.go +++ /dev/null @@ -1,35 +0,0 @@ -package backoff - -import "time" - -/* -WithMaxRetries creates a wrapper around another BackOff, which will -return Stop if NextBackOff() has been called too many times since -the last time Reset() was called - -Note: Implementation is not thread-safe. -*/ -func WithMaxRetries(b BackOff, max uint64) BackOff { - return &backOffTries{delegate: b, maxTries: max} -} - -type backOffTries struct { - delegate BackOff - maxTries uint64 - numTries uint64 -} - -func (b *backOffTries) NextBackOff() time.Duration { - if b.maxTries > 0 { - if b.maxTries <= b.numTries { - return Stop - } - b.numTries++ - } - return b.delegate.NextBackOff() -} - -func (b *backOffTries) Reset() { - b.numTries = 0 - b.delegate.Reset() -} diff --git a/vendor/github.com/cenkalti/backoff/v4/.gitignore b/vendor/github.com/cenkalti/backoff/v4/.gitignore deleted file mode 100644 index 00268614f04..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe diff --git a/vendor/github.com/cenkalti/backoff/v4/.travis.yml b/vendor/github.com/cenkalti/backoff/v4/.travis.yml deleted file mode 100644 index 871150c4672..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -language: go -go: - - 1.12 - - 1.x - - tip -before_install: - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover -script: - - $HOME/gopath/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/cenkalti/backoff/v4/LICENSE b/vendor/github.com/cenkalti/backoff/v4/LICENSE deleted file mode 100644 index 89b81799655..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Cenk Altı - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/cenkalti/backoff/v4/README.md b/vendor/github.com/cenkalti/backoff/v4/README.md deleted file mode 100644 index cabfc9c7017..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# Exponential Backoff [![GoDoc][godoc image]][godoc] [![Build Status][travis image]][travis] [![Coverage Status][coveralls image]][coveralls] - -This is a Go port of the exponential backoff algorithm from [Google's HTTP Client Library for Java][google-http-java-client]. - -[Exponential backoff][exponential backoff wiki] -is an algorithm that uses feedback to multiplicatively decrease the rate of some process, -in order to gradually find an acceptable rate. -The retries exponentially increase and stop increasing when a certain threshold is met. - -## Usage - -Import path is `github.com/cenkalti/backoff/v4`. Please note the version part at the end. - -godoc.org does not support modules yet, -so you can use https://godoc.org/gopkg.in/cenkalti/backoff.v4 to view the documentation. - -## Contributing - -* I would like to keep this library as small as possible. -* Please don't send a PR without opening an issue and discussing it first. -* If proposed change is not a common use case, I will probably not accept it. - -[godoc]: https://godoc.org/github.com/cenkalti/backoff -[godoc image]: https://godoc.org/github.com/cenkalti/backoff?status.png -[travis]: https://travis-ci.org/cenkalti/backoff -[travis image]: https://travis-ci.org/cenkalti/backoff.png?branch=master -[coveralls]: https://coveralls.io/github/cenkalti/backoff?branch=master -[coveralls image]: https://coveralls.io/repos/github/cenkalti/backoff/badge.svg?branch=master - -[google-http-java-client]: https://github.com/google/google-http-java-client/blob/da1aa993e90285ec18579f1553339b00e19b3ab5/google-http-client/src/main/java/com/google/api/client/util/ExponentialBackOff.java -[exponential backoff wiki]: http://en.wikipedia.org/wiki/Exponential_backoff - -[advanced example]: https://godoc.org/github.com/cenkalti/backoff#example_ diff --git a/vendor/github.com/cenkalti/backoff/v4/backoff.go b/vendor/github.com/cenkalti/backoff/v4/backoff.go deleted file mode 100644 index 3676ee405d8..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/backoff.go +++ /dev/null @@ -1,66 +0,0 @@ -// Package backoff implements backoff algorithms for retrying operations. -// -// Use Retry function for retrying operations that may fail. -// If Retry does not meet your needs, -// copy/paste the function into your project and modify as you wish. -// -// There is also Ticker type similar to time.Ticker. -// You can use it if you need to work with channels. -// -// See Examples section below for usage examples. -package backoff - -import "time" - -// BackOff is a backoff policy for retrying an operation. -type BackOff interface { - // NextBackOff returns the duration to wait before retrying the operation, - // or backoff. Stop to indicate that no more retries should be made. - // - // Example usage: - // - // duration := backoff.NextBackOff(); - // if (duration == backoff.Stop) { - // // Do not retry operation. - // } else { - // // Sleep for duration and retry operation. - // } - // - NextBackOff() time.Duration - - // Reset to initial state. - Reset() -} - -// Stop indicates that no more retries should be made for use in NextBackOff(). -const Stop time.Duration = -1 - -// ZeroBackOff is a fixed backoff policy whose backoff time is always zero, -// meaning that the operation is retried immediately without waiting, indefinitely. -type ZeroBackOff struct{} - -func (b *ZeroBackOff) Reset() {} - -func (b *ZeroBackOff) NextBackOff() time.Duration { return 0 } - -// StopBackOff is a fixed backoff policy that always returns backoff.Stop for -// NextBackOff(), meaning that the operation should never be retried. -type StopBackOff struct{} - -func (b *StopBackOff) Reset() {} - -func (b *StopBackOff) NextBackOff() time.Duration { return Stop } - -// ConstantBackOff is a backoff policy that always returns the same backoff delay. -// This is in contrast to an exponential backoff policy, -// which returns a delay that grows longer as you call NextBackOff() over and over again. -type ConstantBackOff struct { - Interval time.Duration -} - -func (b *ConstantBackOff) Reset() {} -func (b *ConstantBackOff) NextBackOff() time.Duration { return b.Interval } - -func NewConstantBackOff(d time.Duration) *ConstantBackOff { - return &ConstantBackOff{Interval: d} -} diff --git a/vendor/github.com/cenkalti/backoff/v4/context.go b/vendor/github.com/cenkalti/backoff/v4/context.go deleted file mode 100644 index fcff86c1b3d..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/context.go +++ /dev/null @@ -1,66 +0,0 @@ -package backoff - -import ( - "context" - "time" -) - -// BackOffContext is a backoff policy that stops retrying after the context -// is canceled. -type BackOffContext interface { // nolint: golint - BackOff - Context() context.Context -} - -type backOffContext struct { - BackOff - ctx context.Context -} - -// WithContext returns a BackOffContext with context ctx -// -// ctx must not be nil -func WithContext(b BackOff, ctx context.Context) BackOffContext { // nolint: golint - if ctx == nil { - panic("nil context") - } - - if b, ok := b.(*backOffContext); ok { - return &backOffContext{ - BackOff: b.BackOff, - ctx: ctx, - } - } - - return &backOffContext{ - BackOff: b, - ctx: ctx, - } -} - -func getContext(b BackOff) context.Context { - if cb, ok := b.(BackOffContext); ok { - return cb.Context() - } - if tb, ok := b.(*backOffTries); ok { - return getContext(tb.delegate) - } - return context.Background() -} - -func (b *backOffContext) Context() context.Context { - return b.ctx -} - -func (b *backOffContext) NextBackOff() time.Duration { - select { - case <-b.ctx.Done(): - return Stop - default: - } - next := b.BackOff.NextBackOff() - if deadline, ok := b.ctx.Deadline(); ok && deadline.Sub(time.Now()) < next { // nolint: gosimple - return Stop - } - return next -} diff --git a/vendor/github.com/cenkalti/backoff/v4/exponential.go b/vendor/github.com/cenkalti/backoff/v4/exponential.go deleted file mode 100644 index 3d3453215bb..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/exponential.go +++ /dev/null @@ -1,158 +0,0 @@ -package backoff - -import ( - "math/rand" - "time" -) - -/* -ExponentialBackOff is a backoff implementation that increases the backoff -period for each retry attempt using a randomization function that grows exponentially. - -NextBackOff() is calculated using the following formula: - - randomized interval = - RetryInterval * (random value in range [1 - RandomizationFactor, 1 + RandomizationFactor]) - -In other words NextBackOff() will range between the randomization factor -percentage below and above the retry interval. - -For example, given the following parameters: - - RetryInterval = 2 - RandomizationFactor = 0.5 - Multiplier = 2 - -the actual backoff period used in the next retry attempt will range between 1 and 3 seconds, -multiplied by the exponential, that is, between 2 and 6 seconds. - -Note: MaxInterval caps the RetryInterval and not the randomized interval. - -If the time elapsed since an ExponentialBackOff instance is created goes past the -MaxElapsedTime, then the method NextBackOff() starts returning backoff.Stop. - -The elapsed time can be reset by calling Reset(). - -Example: Given the following default arguments, for 10 tries the sequence will be, -and assuming we go over the MaxElapsedTime on the 10th try: - - Request # RetryInterval (seconds) Randomized Interval (seconds) - - 1 0.5 [0.25, 0.75] - 2 0.75 [0.375, 1.125] - 3 1.125 [0.562, 1.687] - 4 1.687 [0.8435, 2.53] - 5 2.53 [1.265, 3.795] - 6 3.795 [1.897, 5.692] - 7 5.692 [2.846, 8.538] - 8 8.538 [4.269, 12.807] - 9 12.807 [6.403, 19.210] - 10 19.210 backoff.Stop - -Note: Implementation is not thread-safe. -*/ -type ExponentialBackOff struct { - InitialInterval time.Duration - RandomizationFactor float64 - Multiplier float64 - MaxInterval time.Duration - // After MaxElapsedTime the ExponentialBackOff returns Stop. - // It never stops if MaxElapsedTime == 0. - MaxElapsedTime time.Duration - Stop time.Duration - Clock Clock - - currentInterval time.Duration - startTime time.Time -} - -// Clock is an interface that returns current time for BackOff. -type Clock interface { - Now() time.Time -} - -// Default values for ExponentialBackOff. -const ( - DefaultInitialInterval = 500 * time.Millisecond - DefaultRandomizationFactor = 0.5 - DefaultMultiplier = 1.5 - DefaultMaxInterval = 60 * time.Second - DefaultMaxElapsedTime = 15 * time.Minute -) - -// NewExponentialBackOff creates an instance of ExponentialBackOff using default values. -func NewExponentialBackOff() *ExponentialBackOff { - b := &ExponentialBackOff{ - InitialInterval: DefaultInitialInterval, - RandomizationFactor: DefaultRandomizationFactor, - Multiplier: DefaultMultiplier, - MaxInterval: DefaultMaxInterval, - MaxElapsedTime: DefaultMaxElapsedTime, - Stop: Stop, - Clock: SystemClock, - } - b.Reset() - return b -} - -type systemClock struct{} - -func (t systemClock) Now() time.Time { - return time.Now() -} - -// SystemClock implements Clock interface that uses time.Now(). -var SystemClock = systemClock{} - -// Reset the interval back to the initial retry interval and restarts the timer. -// Reset must be called before using b. -func (b *ExponentialBackOff) Reset() { - b.currentInterval = b.InitialInterval - b.startTime = b.Clock.Now() -} - -// NextBackOff calculates the next backoff interval using the formula: -// Randomized interval = RetryInterval * (1 ± RandomizationFactor) -func (b *ExponentialBackOff) NextBackOff() time.Duration { - // Make sure we have not gone over the maximum elapsed time. - elapsed := b.GetElapsedTime() - next := getRandomValueFromInterval(b.RandomizationFactor, rand.Float64(), b.currentInterval) - b.incrementCurrentInterval() - if b.MaxElapsedTime != 0 && elapsed+next > b.MaxElapsedTime { - return b.Stop - } - return next -} - -// GetElapsedTime returns the elapsed time since an ExponentialBackOff instance -// is created and is reset when Reset() is called. -// -// The elapsed time is computed using time.Now().UnixNano(). It is -// safe to call even while the backoff policy is used by a running -// ticker. -func (b *ExponentialBackOff) GetElapsedTime() time.Duration { - return b.Clock.Now().Sub(b.startTime) -} - -// Increments the current interval by multiplying it with the multiplier. -func (b *ExponentialBackOff) incrementCurrentInterval() { - // Check for overflow, if overflow is detected set the current interval to the max interval. - if float64(b.currentInterval) >= float64(b.MaxInterval)/b.Multiplier { - b.currentInterval = b.MaxInterval - } else { - b.currentInterval = time.Duration(float64(b.currentInterval) * b.Multiplier) - } -} - -// Returns a random value from the following interval: -// [currentInterval - randomizationFactor * currentInterval, currentInterval + randomizationFactor * currentInterval]. -func getRandomValueFromInterval(randomizationFactor, random float64, currentInterval time.Duration) time.Duration { - var delta = randomizationFactor * float64(currentInterval) - var minInterval = float64(currentInterval) - delta - var maxInterval = float64(currentInterval) + delta - - // Get a random value from the range [minInterval, maxInterval]. - // The formula used below has a +1 because if the minInterval is 1 and the maxInterval is 3 then - // we want a 33% chance for selecting either 1, 2 or 3. - return time.Duration(minInterval + (random * (maxInterval - minInterval + 1))) -} diff --git a/vendor/github.com/cenkalti/backoff/v4/go.mod b/vendor/github.com/cenkalti/backoff/v4/go.mod deleted file mode 100644 index cef50ea6724..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module github.com/cenkalti/backoff/v4 - -go 1.12 diff --git a/vendor/github.com/cenkalti/backoff/v4/retry.go b/vendor/github.com/cenkalti/backoff/v4/retry.go deleted file mode 100644 index 6c776ccf8ed..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/retry.go +++ /dev/null @@ -1,96 +0,0 @@ -package backoff - -import "time" - -// An Operation is executing by Retry() or RetryNotify(). -// The operation will be retried using a backoff policy if it returns an error. -type Operation func() error - -// Notify is a notify-on-error function. It receives an operation error and -// backoff delay if the operation failed (with an error). -// -// NOTE that if the backoff policy stated to stop retrying, -// the notify function isn't called. -type Notify func(error, time.Duration) - -// Retry the operation o until it does not return error or BackOff stops. -// o is guaranteed to be run at least once. -// -// If o returns a *PermanentError, the operation is not retried, and the -// wrapped error is returned. -// -// Retry sleeps the goroutine for the duration returned by BackOff after a -// failed operation returns. -func Retry(o Operation, b BackOff) error { - return RetryNotify(o, b, nil) -} - -// RetryNotify calls notify function with the error and wait duration -// for each failed attempt before sleep. -func RetryNotify(operation Operation, b BackOff, notify Notify) error { - return RetryNotifyWithTimer(operation, b, notify, nil) -} - -// RetryNotifyWithTimer calls notify function with the error and wait duration using the given Timer -// for each failed attempt before sleep. -// A default timer that uses system timer is used when nil is passed. -func RetryNotifyWithTimer(operation Operation, b BackOff, notify Notify, t Timer) error { - var err error - var next time.Duration - if t == nil { - t = &defaultTimer{} - } - - defer func() { - t.Stop() - }() - - ctx := getContext(b) - - b.Reset() - for { - if err = operation(); err == nil { - return nil - } - - if permanent, ok := err.(*PermanentError); ok { - return permanent.Err - } - - if next = b.NextBackOff(); next == Stop { - return err - } - - if notify != nil { - notify(err, next) - } - - t.Start(next) - - select { - case <-ctx.Done(): - return ctx.Err() - case <-t.C(): - } - } -} - -// PermanentError signals that the operation should not be retried. -type PermanentError struct { - Err error -} - -func (e *PermanentError) Error() string { - return e.Err.Error() -} - -func (e *PermanentError) Unwrap() error { - return e.Err -} - -// Permanent wraps the given err in a *PermanentError. -func Permanent(err error) *PermanentError { - return &PermanentError{ - Err: err, - } -} diff --git a/vendor/github.com/cenkalti/backoff/v4/ticker.go b/vendor/github.com/cenkalti/backoff/v4/ticker.go deleted file mode 100644 index df9d68bce52..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/ticker.go +++ /dev/null @@ -1,97 +0,0 @@ -package backoff - -import ( - "context" - "sync" - "time" -) - -// Ticker holds a channel that delivers `ticks' of a clock at times reported by a BackOff. -// -// Ticks will continue to arrive when the previous operation is still running, -// so operations that take a while to fail could run in quick succession. -type Ticker struct { - C <-chan time.Time - c chan time.Time - b BackOff - ctx context.Context - timer Timer - stop chan struct{} - stopOnce sync.Once -} - -// NewTicker returns a new Ticker containing a channel that will send -// the time at times specified by the BackOff argument. Ticker is -// guaranteed to tick at least once. The channel is closed when Stop -// method is called or BackOff stops. It is not safe to manipulate the -// provided backoff policy (notably calling NextBackOff or Reset) -// while the ticker is running. -func NewTicker(b BackOff) *Ticker { - return NewTickerWithTimer(b, &defaultTimer{}) -} - -// NewTickerWithTimer returns a new Ticker with a custom timer. -// A default timer that uses system timer is used when nil is passed. -func NewTickerWithTimer(b BackOff, timer Timer) *Ticker { - if timer == nil { - timer = &defaultTimer{} - } - c := make(chan time.Time) - t := &Ticker{ - C: c, - c: c, - b: b, - ctx: getContext(b), - timer: timer, - stop: make(chan struct{}), - } - t.b.Reset() - go t.run() - return t -} - -// Stop turns off a ticker. After Stop, no more ticks will be sent. -func (t *Ticker) Stop() { - t.stopOnce.Do(func() { close(t.stop) }) -} - -func (t *Ticker) run() { - c := t.c - defer close(c) - - // Ticker is guaranteed to tick at least once. - afterC := t.send(time.Now()) - - for { - if afterC == nil { - return - } - - select { - case tick := <-afterC: - afterC = t.send(tick) - case <-t.stop: - t.c = nil // Prevent future ticks from being sent to the channel. - return - case <-t.ctx.Done(): - return - } - } -} - -func (t *Ticker) send(tick time.Time) <-chan time.Time { - select { - case t.c <- tick: - case <-t.stop: - return nil - } - - next := t.b.NextBackOff() - if next == Stop { - t.Stop() - return nil - } - - t.timer.Start(next) - return t.timer.C() -} diff --git a/vendor/github.com/cenkalti/backoff/v4/timer.go b/vendor/github.com/cenkalti/backoff/v4/timer.go deleted file mode 100644 index 8120d0213c5..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/timer.go +++ /dev/null @@ -1,35 +0,0 @@ -package backoff - -import "time" - -type Timer interface { - Start(duration time.Duration) - Stop() - C() <-chan time.Time -} - -// defaultTimer implements Timer interface using time.Timer -type defaultTimer struct { - timer *time.Timer -} - -// C returns the timers channel which receives the current time when the timer fires. -func (t *defaultTimer) C() <-chan time.Time { - return t.timer.C -} - -// Start starts the timer to fire after the given duration -func (t *defaultTimer) Start(duration time.Duration) { - if t.timer == nil { - t.timer = time.NewTimer(duration) - } else { - t.timer.Reset(duration) - } -} - -// Stop is called when the timer is not used anymore and resources may be freed. -func (t *defaultTimer) Stop() { - if t.timer != nil { - t.timer.Stop() - } -} diff --git a/vendor/github.com/cenkalti/backoff/v4/tries.go b/vendor/github.com/cenkalti/backoff/v4/tries.go deleted file mode 100644 index 28d58ca37c6..00000000000 --- a/vendor/github.com/cenkalti/backoff/v4/tries.go +++ /dev/null @@ -1,38 +0,0 @@ -package backoff - -import "time" - -/* -WithMaxRetries creates a wrapper around another BackOff, which will -return Stop if NextBackOff() has been called too many times since -the last time Reset() was called - -Note: Implementation is not thread-safe. -*/ -func WithMaxRetries(b BackOff, max uint64) BackOff { - return &backOffTries{delegate: b, maxTries: max} -} - -type backOffTries struct { - delegate BackOff - maxTries uint64 - numTries uint64 -} - -func (b *backOffTries) NextBackOff() time.Duration { - if b.maxTries == 0 { - return Stop - } - if b.maxTries > 0 { - if b.maxTries <= b.numTries { - return Stop - } - b.numTries++ - } - return b.delegate.NextBackOff() -} - -func (b *backOffTries) Reset() { - b.numTries = 0 - b.delegate.Reset() -} diff --git a/vendor/github.com/certifi/gocertifi/LICENSE b/vendor/github.com/certifi/gocertifi/LICENSE deleted file mode 100644 index cfd5dcbbb15..00000000000 --- a/vendor/github.com/certifi/gocertifi/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. diff --git a/vendor/github.com/certifi/gocertifi/README.md b/vendor/github.com/certifi/gocertifi/README.md deleted file mode 100644 index c8bc9f629ea..00000000000 --- a/vendor/github.com/certifi/gocertifi/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# GoCertifi: SSL Certificates for Golang - -This Go package contains a CA bundle that you can reference in your Go code. -This is useful for systems that do not have CA bundles that Golang can find -itself, or where a uniform set of CAs is valuable. - -This is the same CA bundle that ships with the -[Python Requests](https://github.com/kennethreitz/requests) library, and is a -Golang specific port of [certifi](https://github.com/kennethreitz/certifi). The -CA bundle is derived from Mozilla's canonical set. - -## Usage - -You can use the `gocertifi` package as follows: - -```go -import "github.com/certifi/gocertifi" - -cert_pool, err := gocertifi.CACerts() -``` - -You can use the returned `*x509.CertPool` as part of an HTTP transport, for example: - -```go -import ( - "net/http" - "crypto/tls" -) - -// Setup an HTTP client with a custom transport -transport := &http.Transport{ - TLSClientConfig: &tls.Config{RootCAs: cert_pool}, -} -client := &http.Client{Transport: transport} - -// Make an HTTP request using our custom transport -resp, err := client.Get("https://example.com") -``` - -## Detailed Documentation - -Import as follows: - -```go -import "github.com/certifi/gocertifi" -``` - -### Errors - -```go -var ErrParseFailed = errors.New("gocertifi: error when parsing certificates") -``` - -### Functions - -```go -func CACerts() (*x509.CertPool, error) -``` -CACerts builds an X.509 certificate pool containing the Mozilla CA Certificate -bundle. Returns nil on error along with an appropriate error code. diff --git a/vendor/github.com/certifi/gocertifi/certifi.go b/vendor/github.com/certifi/gocertifi/certifi.go deleted file mode 100644 index a152a0d4496..00000000000 --- a/vendor/github.com/certifi/gocertifi/certifi.go +++ /dev/null @@ -1,4680 +0,0 @@ -// Code generated by go generate; DO NOT EDIT. -// 2019-04-09 17:53:46.117677 -0700 PDT m=+1.272386477 -// https://mkcert.org/generate/ - -package gocertifi - -//go:generate go run gen.go - -import "crypto/x509" - -const pemcerts string = ` - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Label: "QuoVadis Root CA" -# Serial: 985026699 -# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 -# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 -# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz -MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw -IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR -dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp -li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D -rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ -WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug -F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU -xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC -Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv -dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw -ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl -IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh -c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy -ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI -KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T -KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq -y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p -dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD -VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk -fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 -7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R -cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y -mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW -xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK -SnQ2+Q== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=Sonera Class2 CA O=Sonera -# Subject: CN=Sonera Class2 CA O=Sonera -# Label: "Sonera Class 2 Root CA" -# Serial: 29 -# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb -# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 -# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP -MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx -MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV -BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o -Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt -5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s -3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej -vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu -8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw -DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG -MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil -zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ -3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD -FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 -Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 -ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) -# Label: "NetLock Arany (Class Gold) Főtanúsítvány" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Label: "Chambers of Commerce Root - 2008" -# Serial: 11806822484801597146 -# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 -# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c -# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz -IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz -MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj -dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw -EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp -MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 -28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq -VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q -DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR -5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL -ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a -Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl -UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s -+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 -Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx -hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV -HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 -+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN -YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t -L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy -ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt -IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV -HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w -DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW -PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF -5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 -glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH -FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 -pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD -xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG -tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq -jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De -fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ -d0jQ ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Label: "Global Chambersign Root - 2008" -# Serial: 14541511773111788494 -# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 -# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c -# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx -MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy -cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG -A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl -BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed -KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 -G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 -zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 -ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG -HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 -Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V -yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e -beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r -6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog -zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW -BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr -ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp -ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk -cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt -YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC -CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow -KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI -hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ -UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz -X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x -fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz -a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd -Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd -SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O -AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso -M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge -v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z -09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: O=Trustis Limited OU=Trustis FPS Root CA -# Subject: O=Trustis Limited OU=Trustis FPS Root CA -# Label: "Trustis FPS Root CA" -# Serial: 36053640375399034304724988975563710553 -# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d -# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 -# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL -ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx -MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc -MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ -AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH -iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj -vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA -0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB -OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ -BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E -FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 -GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW -zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 -1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE -f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F -jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN -ZetX2fNXlrtIzYE= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G3" -# Serial: 10003001 -# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 -# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc -# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX -DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP -cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW -IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX -xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy -KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR -9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az -5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 -6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 -Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP -bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt -BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt -XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd -INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD -U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp -LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 -Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp -gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh -/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw -0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A -fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq -4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR -1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ -QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM -94B7IWcnMFk= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Label: "Certinomis - Root CA" -# Serial: 1 -# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f -# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 -# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 ------BEGIN CERTIFICATE----- -MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET -MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb -BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz -MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx -FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g -Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 -fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl -LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV -WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF -TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb -5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc -CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri -wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ -wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG -m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 -F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng -WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 -2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF -AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ -0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw -F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS -g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj -qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN -h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ -ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V -btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj -Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ -8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW -gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 146587175971765017618439757810265552097 -# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 -# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 -# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM -f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX -mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 -zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P -fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc -vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 -Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp -zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO -Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW -k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ -DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF -lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW -Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 -d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z -XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR -gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 -d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv -J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg -DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM -+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy -F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 -SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws -E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 146587176055767053814479386953112547951 -# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b -# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d -# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv -CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg -GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu -XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd -re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu -PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 -mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K -8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj -x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR -nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 -kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok -twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp -8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT -vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT -z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA -pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb -pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB -R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R -RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk -0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC -5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF -izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn -yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 146587176140553309517047991083707763997 -# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 -# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 -# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 ------BEGIN CERTIFICATE----- -MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout -736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A -DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk -fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA -njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 146587176229350439916519468929765261721 -# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 -# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb -# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd ------BEGIN CERTIFICATE----- -MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu -hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l -xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 -CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx -sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -` - -// CACerts builds an X.509 certificate pool containing the Mozilla CA -// Certificate bundle. Returns nil on error along with an appropriate error -// code. -func CACerts() (*x509.CertPool, error) { - pool := x509.NewCertPool() - pool.AppendCertsFromPEM([]byte(pemcerts)) - return pool, nil -} diff --git a/vendor/github.com/cespare/xxhash/LICENSE.txt b/vendor/github.com/cespare/xxhash/LICENSE.txt deleted file mode 100644 index 24b53065f40..00000000000 --- a/vendor/github.com/cespare/xxhash/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2016 Caleb Spare - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/cespare/xxhash/README.md b/vendor/github.com/cespare/xxhash/README.md deleted file mode 100644 index 0982fd25e5c..00000000000 --- a/vendor/github.com/cespare/xxhash/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# xxhash - -[![GoDoc](https://godoc.org/github.com/cespare/xxhash?status.svg)](https://godoc.org/github.com/cespare/xxhash) - -xxhash is a Go implementation of the 64-bit -[xxHash](http://cyan4973.github.io/xxHash/) algorithm, XXH64. This is a -high-quality hashing algorithm that is much faster than anything in the Go -standard library. - -The API is very small, taking its cue from the other hashing packages in the -standard library: - - $ go doc github.com/cespare/xxhash ! - package xxhash // import "github.com/cespare/xxhash" - - Package xxhash implements the 64-bit variant of xxHash (XXH64) as described - at http://cyan4973.github.io/xxHash/. - - func New() hash.Hash64 - func Sum64(b []byte) uint64 - func Sum64String(s string) uint64 - -This implementation provides a fast pure-Go implementation and an even faster -assembly implementation for amd64. - -## Benchmarks - -Here are some quick benchmarks comparing the pure-Go and assembly -implementations of Sum64 against another popular Go XXH64 implementation, -[github.com/OneOfOne/xxhash](https://github.com/OneOfOne/xxhash): - -| input size | OneOfOne | cespare (purego) | cespare | -| --- | --- | --- | --- | -| 5 B | 416 MB/s | 720 MB/s | 872 MB/s | -| 100 B | 3980 MB/s | 5013 MB/s | 5252 MB/s | -| 4 KB | 12727 MB/s | 12999 MB/s | 13026 MB/s | -| 10 MB | 9879 MB/s | 10775 MB/s | 10913 MB/s | - -These numbers were generated with: - -``` -$ go test -benchtime 10s -bench '/OneOfOne,' -$ go test -tags purego -benchtime 10s -bench '/xxhash,' -$ go test -benchtime 10s -bench '/xxhash,' -``` - -## Projects using this package - -- [InfluxDB](https://github.com/influxdata/influxdb) -- [Prometheus](https://github.com/prometheus/prometheus) diff --git a/vendor/github.com/cespare/xxhash/go.mod b/vendor/github.com/cespare/xxhash/go.mod deleted file mode 100644 index 10605a6a5e4..00000000000 --- a/vendor/github.com/cespare/xxhash/go.mod +++ /dev/null @@ -1,6 +0,0 @@ -module github.com/cespare/xxhash - -require ( - github.com/OneOfOne/xxhash v1.2.2 - github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 -) diff --git a/vendor/github.com/cespare/xxhash/go.sum b/vendor/github.com/cespare/xxhash/go.sum deleted file mode 100644 index f6b5542617a..00000000000 --- a/vendor/github.com/cespare/xxhash/go.sum +++ /dev/null @@ -1,4 +0,0 @@ -github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= diff --git a/vendor/github.com/cespare/xxhash/rotate.go b/vendor/github.com/cespare/xxhash/rotate.go deleted file mode 100644 index f3eac5ebc02..00000000000 --- a/vendor/github.com/cespare/xxhash/rotate.go +++ /dev/null @@ -1,14 +0,0 @@ -// +build !go1.9 - -package xxhash - -// TODO(caleb): After Go 1.10 comes out, remove this fallback code. - -func rol1(x uint64) uint64 { return (x << 1) | (x >> (64 - 1)) } -func rol7(x uint64) uint64 { return (x << 7) | (x >> (64 - 7)) } -func rol11(x uint64) uint64 { return (x << 11) | (x >> (64 - 11)) } -func rol12(x uint64) uint64 { return (x << 12) | (x >> (64 - 12)) } -func rol18(x uint64) uint64 { return (x << 18) | (x >> (64 - 18)) } -func rol23(x uint64) uint64 { return (x << 23) | (x >> (64 - 23)) } -func rol27(x uint64) uint64 { return (x << 27) | (x >> (64 - 27)) } -func rol31(x uint64) uint64 { return (x << 31) | (x >> (64 - 31)) } diff --git a/vendor/github.com/cespare/xxhash/rotate19.go b/vendor/github.com/cespare/xxhash/rotate19.go deleted file mode 100644 index b99612bab88..00000000000 --- a/vendor/github.com/cespare/xxhash/rotate19.go +++ /dev/null @@ -1,14 +0,0 @@ -// +build go1.9 - -package xxhash - -import "math/bits" - -func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) } -func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) } -func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) } -func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) } -func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) } -func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) } -func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) } -func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) } diff --git a/vendor/github.com/cespare/xxhash/v2/.travis.yml b/vendor/github.com/cespare/xxhash/v2/.travis.yml deleted file mode 100644 index c516ea88da7..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: go -go: - - "1.x" - - master -env: - - TAGS="" - - TAGS="-tags purego" -script: go test $TAGS -v ./... diff --git a/vendor/github.com/cespare/xxhash/v2/LICENSE.txt b/vendor/github.com/cespare/xxhash/v2/LICENSE.txt deleted file mode 100644 index 24b53065f40..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2016 Caleb Spare - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/cespare/xxhash/v2/README.md b/vendor/github.com/cespare/xxhash/v2/README.md deleted file mode 100644 index 2fd8693c21b..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# xxhash - -[![GoDoc](https://godoc.org/github.com/cespare/xxhash?status.svg)](https://godoc.org/github.com/cespare/xxhash) -[![Build Status](https://travis-ci.org/cespare/xxhash.svg?branch=master)](https://travis-ci.org/cespare/xxhash) - -xxhash is a Go implementation of the 64-bit -[xxHash](http://cyan4973.github.io/xxHash/) algorithm, XXH64. This is a -high-quality hashing algorithm that is much faster than anything in the Go -standard library. - -This package provides a straightforward API: - -``` -func Sum64(b []byte) uint64 -func Sum64String(s string) uint64 -type Digest struct{ ... } - func New() *Digest -``` - -The `Digest` type implements hash.Hash64. Its key methods are: - -``` -func (*Digest) Write([]byte) (int, error) -func (*Digest) WriteString(string) (int, error) -func (*Digest) Sum64() uint64 -``` - -This implementation provides a fast pure-Go implementation and an even faster -assembly implementation for amd64. - -## Compatibility - -This package is in a module and the latest code is in version 2 of the module. -You need a version of Go with at least "minimal module compatibility" to use -github.com/cespare/xxhash/v2: - -* 1.9.7+ for Go 1.9 -* 1.10.3+ for Go 1.10 -* Go 1.11 or later - -I recommend using the latest release of Go. - -## Benchmarks - -Here are some quick benchmarks comparing the pure-Go and assembly -implementations of Sum64. - -| input size | purego | asm | -| --- | --- | --- | -| 5 B | 979.66 MB/s | 1291.17 MB/s | -| 100 B | 7475.26 MB/s | 7973.40 MB/s | -| 4 KB | 17573.46 MB/s | 17602.65 MB/s | -| 10 MB | 17131.46 MB/s | 17142.16 MB/s | - -These numbers were generated on Ubuntu 18.04 with an Intel i7-8700K CPU using -the following commands under Go 1.11.2: - -``` -$ go test -tags purego -benchtime 10s -bench '/xxhash,direct,bytes' -$ go test -benchtime 10s -bench '/xxhash,direct,bytes' -``` - -## Projects using this package - -- [InfluxDB](https://github.com/influxdata/influxdb) -- [Prometheus](https://github.com/prometheus/prometheus) -- [FreeCache](https://github.com/coocood/freecache) diff --git a/vendor/github.com/cespare/xxhash/v2/go.mod b/vendor/github.com/cespare/xxhash/v2/go.mod deleted file mode 100644 index 49f67608bf6..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module github.com/cespare/xxhash/v2 - -go 1.11 diff --git a/vendor/github.com/cespare/xxhash/v2/go.sum b/vendor/github.com/cespare/xxhash/v2/go.sum deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash.go b/vendor/github.com/cespare/xxhash/v2/xxhash.go deleted file mode 100644 index db0b35fbe39..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash.go +++ /dev/null @@ -1,236 +0,0 @@ -// Package xxhash implements the 64-bit variant of xxHash (XXH64) as described -// at http://cyan4973.github.io/xxHash/. -package xxhash - -import ( - "encoding/binary" - "errors" - "math/bits" -) - -const ( - prime1 uint64 = 11400714785074694791 - prime2 uint64 = 14029467366897019727 - prime3 uint64 = 1609587929392839161 - prime4 uint64 = 9650029242287828579 - prime5 uint64 = 2870177450012600261 -) - -// NOTE(caleb): I'm using both consts and vars of the primes. Using consts where -// possible in the Go code is worth a small (but measurable) performance boost -// by avoiding some MOVQs. Vars are needed for the asm and also are useful for -// convenience in the Go code in a few places where we need to intentionally -// avoid constant arithmetic (e.g., v1 := prime1 + prime2 fails because the -// result overflows a uint64). -var ( - prime1v = prime1 - prime2v = prime2 - prime3v = prime3 - prime4v = prime4 - prime5v = prime5 -) - -// Digest implements hash.Hash64. -type Digest struct { - v1 uint64 - v2 uint64 - v3 uint64 - v4 uint64 - total uint64 - mem [32]byte - n int // how much of mem is used -} - -// New creates a new Digest that computes the 64-bit xxHash algorithm. -func New() *Digest { - var d Digest - d.Reset() - return &d -} - -// Reset clears the Digest's state so that it can be reused. -func (d *Digest) Reset() { - d.v1 = prime1v + prime2 - d.v2 = prime2 - d.v3 = 0 - d.v4 = -prime1v - d.total = 0 - d.n = 0 -} - -// Size always returns 8 bytes. -func (d *Digest) Size() int { return 8 } - -// BlockSize always returns 32 bytes. -func (d *Digest) BlockSize() int { return 32 } - -// Write adds more data to d. It always returns len(b), nil. -func (d *Digest) Write(b []byte) (n int, err error) { - n = len(b) - d.total += uint64(n) - - if d.n+n < 32 { - // This new data doesn't even fill the current block. - copy(d.mem[d.n:], b) - d.n += n - return - } - - if d.n > 0 { - // Finish off the partial block. - copy(d.mem[d.n:], b) - d.v1 = round(d.v1, u64(d.mem[0:8])) - d.v2 = round(d.v2, u64(d.mem[8:16])) - d.v3 = round(d.v3, u64(d.mem[16:24])) - d.v4 = round(d.v4, u64(d.mem[24:32])) - b = b[32-d.n:] - d.n = 0 - } - - if len(b) >= 32 { - // One or more full blocks left. - nw := writeBlocks(d, b) - b = b[nw:] - } - - // Store any remaining partial block. - copy(d.mem[:], b) - d.n = len(b) - - return -} - -// Sum appends the current hash to b and returns the resulting slice. -func (d *Digest) Sum(b []byte) []byte { - s := d.Sum64() - return append( - b, - byte(s>>56), - byte(s>>48), - byte(s>>40), - byte(s>>32), - byte(s>>24), - byte(s>>16), - byte(s>>8), - byte(s), - ) -} - -// Sum64 returns the current hash. -func (d *Digest) Sum64() uint64 { - var h uint64 - - if d.total >= 32 { - v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 - h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - h = mergeRound(h, v1) - h = mergeRound(h, v2) - h = mergeRound(h, v3) - h = mergeRound(h, v4) - } else { - h = d.v3 + prime5 - } - - h += d.total - - i, end := 0, d.n - for ; i+8 <= end; i += 8 { - k1 := round(0, u64(d.mem[i:i+8])) - h ^= k1 - h = rol27(h)*prime1 + prime4 - } - if i+4 <= end { - h ^= uint64(u32(d.mem[i:i+4])) * prime1 - h = rol23(h)*prime2 + prime3 - i += 4 - } - for i < end { - h ^= uint64(d.mem[i]) * prime5 - h = rol11(h) * prime1 - i++ - } - - h ^= h >> 33 - h *= prime2 - h ^= h >> 29 - h *= prime3 - h ^= h >> 32 - - return h -} - -const ( - magic = "xxh\x06" - marshaledSize = len(magic) + 8*5 + 32 -) - -// MarshalBinary implements the encoding.BinaryMarshaler interface. -func (d *Digest) MarshalBinary() ([]byte, error) { - b := make([]byte, 0, marshaledSize) - b = append(b, magic...) - b = appendUint64(b, d.v1) - b = appendUint64(b, d.v2) - b = appendUint64(b, d.v3) - b = appendUint64(b, d.v4) - b = appendUint64(b, d.total) - b = append(b, d.mem[:d.n]...) - b = b[:len(b)+len(d.mem)-d.n] - return b, nil -} - -// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. -func (d *Digest) UnmarshalBinary(b []byte) error { - if len(b) < len(magic) || string(b[:len(magic)]) != magic { - return errors.New("xxhash: invalid hash state identifier") - } - if len(b) != marshaledSize { - return errors.New("xxhash: invalid hash state size") - } - b = b[len(magic):] - b, d.v1 = consumeUint64(b) - b, d.v2 = consumeUint64(b) - b, d.v3 = consumeUint64(b) - b, d.v4 = consumeUint64(b) - b, d.total = consumeUint64(b) - copy(d.mem[:], b) - b = b[len(d.mem):] - d.n = int(d.total % uint64(len(d.mem))) - return nil -} - -func appendUint64(b []byte, x uint64) []byte { - var a [8]byte - binary.LittleEndian.PutUint64(a[:], x) - return append(b, a[:]...) -} - -func consumeUint64(b []byte) ([]byte, uint64) { - x := u64(b) - return b[8:], x -} - -func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) } -func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) } - -func round(acc, input uint64) uint64 { - acc += input * prime2 - acc = rol31(acc) - acc *= prime1 - return acc -} - -func mergeRound(acc, val uint64) uint64 { - val = round(0, val) - acc ^= val - acc = acc*prime1 + prime4 - return acc -} - -func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) } -func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) } -func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) } -func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) } -func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) } -func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) } -func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) } -func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) } diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.go b/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.go deleted file mode 100644 index ad14b807f4d..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.go +++ /dev/null @@ -1,13 +0,0 @@ -// +build !appengine -// +build gc -// +build !purego - -package xxhash - -// Sum64 computes the 64-bit xxHash digest of b. -// -//go:noescape -func Sum64(b []byte) uint64 - -//go:noescape -func writeBlocks(d *Digest, b []byte) int diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.s b/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.s deleted file mode 100644 index d580e32aed4..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.s +++ /dev/null @@ -1,215 +0,0 @@ -// +build !appengine -// +build gc -// +build !purego - -#include "textflag.h" - -// Register allocation: -// AX h -// CX pointer to advance through b -// DX n -// BX loop end -// R8 v1, k1 -// R9 v2 -// R10 v3 -// R11 v4 -// R12 tmp -// R13 prime1v -// R14 prime2v -// R15 prime4v - -// round reads from and advances the buffer pointer in CX. -// It assumes that R13 has prime1v and R14 has prime2v. -#define round(r) \ - MOVQ (CX), R12 \ - ADDQ $8, CX \ - IMULQ R14, R12 \ - ADDQ R12, r \ - ROLQ $31, r \ - IMULQ R13, r - -// mergeRound applies a merge round on the two registers acc and val. -// It assumes that R13 has prime1v, R14 has prime2v, and R15 has prime4v. -#define mergeRound(acc, val) \ - IMULQ R14, val \ - ROLQ $31, val \ - IMULQ R13, val \ - XORQ val, acc \ - IMULQ R13, acc \ - ADDQ R15, acc - -// func Sum64(b []byte) uint64 -TEXT ·Sum64(SB), NOSPLIT, $0-32 - // Load fixed primes. - MOVQ ·prime1v(SB), R13 - MOVQ ·prime2v(SB), R14 - MOVQ ·prime4v(SB), R15 - - // Load slice. - MOVQ b_base+0(FP), CX - MOVQ b_len+8(FP), DX - LEAQ (CX)(DX*1), BX - - // The first loop limit will be len(b)-32. - SUBQ $32, BX - - // Check whether we have at least one block. - CMPQ DX, $32 - JLT noBlocks - - // Set up initial state (v1, v2, v3, v4). - MOVQ R13, R8 - ADDQ R14, R8 - MOVQ R14, R9 - XORQ R10, R10 - XORQ R11, R11 - SUBQ R13, R11 - - // Loop until CX > BX. -blockLoop: - round(R8) - round(R9) - round(R10) - round(R11) - - CMPQ CX, BX - JLE blockLoop - - MOVQ R8, AX - ROLQ $1, AX - MOVQ R9, R12 - ROLQ $7, R12 - ADDQ R12, AX - MOVQ R10, R12 - ROLQ $12, R12 - ADDQ R12, AX - MOVQ R11, R12 - ROLQ $18, R12 - ADDQ R12, AX - - mergeRound(AX, R8) - mergeRound(AX, R9) - mergeRound(AX, R10) - mergeRound(AX, R11) - - JMP afterBlocks - -noBlocks: - MOVQ ·prime5v(SB), AX - -afterBlocks: - ADDQ DX, AX - - // Right now BX has len(b)-32, and we want to loop until CX > len(b)-8. - ADDQ $24, BX - - CMPQ CX, BX - JG fourByte - -wordLoop: - // Calculate k1. - MOVQ (CX), R8 - ADDQ $8, CX - IMULQ R14, R8 - ROLQ $31, R8 - IMULQ R13, R8 - - XORQ R8, AX - ROLQ $27, AX - IMULQ R13, AX - ADDQ R15, AX - - CMPQ CX, BX - JLE wordLoop - -fourByte: - ADDQ $4, BX - CMPQ CX, BX - JG singles - - MOVL (CX), R8 - ADDQ $4, CX - IMULQ R13, R8 - XORQ R8, AX - - ROLQ $23, AX - IMULQ R14, AX - ADDQ ·prime3v(SB), AX - -singles: - ADDQ $4, BX - CMPQ CX, BX - JGE finalize - -singlesLoop: - MOVBQZX (CX), R12 - ADDQ $1, CX - IMULQ ·prime5v(SB), R12 - XORQ R12, AX - - ROLQ $11, AX - IMULQ R13, AX - - CMPQ CX, BX - JL singlesLoop - -finalize: - MOVQ AX, R12 - SHRQ $33, R12 - XORQ R12, AX - IMULQ R14, AX - MOVQ AX, R12 - SHRQ $29, R12 - XORQ R12, AX - IMULQ ·prime3v(SB), AX - MOVQ AX, R12 - SHRQ $32, R12 - XORQ R12, AX - - MOVQ AX, ret+24(FP) - RET - -// writeBlocks uses the same registers as above except that it uses AX to store -// the d pointer. - -// func writeBlocks(d *Digest, b []byte) int -TEXT ·writeBlocks(SB), NOSPLIT, $0-40 - // Load fixed primes needed for round. - MOVQ ·prime1v(SB), R13 - MOVQ ·prime2v(SB), R14 - - // Load slice. - MOVQ b_base+8(FP), CX - MOVQ b_len+16(FP), DX - LEAQ (CX)(DX*1), BX - SUBQ $32, BX - - // Load vN from d. - MOVQ d+0(FP), AX - MOVQ 0(AX), R8 // v1 - MOVQ 8(AX), R9 // v2 - MOVQ 16(AX), R10 // v3 - MOVQ 24(AX), R11 // v4 - - // We don't need to check the loop condition here; this function is - // always called with at least one block of data to process. -blockLoop: - round(R8) - round(R9) - round(R10) - round(R11) - - CMPQ CX, BX - JLE blockLoop - - // Copy vN back to d. - MOVQ R8, 0(AX) - MOVQ R9, 8(AX) - MOVQ R10, 16(AX) - MOVQ R11, 24(AX) - - // The number of bytes written is CX minus the old base pointer. - SUBQ b_base+8(FP), CX - MOVQ CX, ret+32(FP) - - RET diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_other.go b/vendor/github.com/cespare/xxhash/v2/xxhash_other.go deleted file mode 100644 index 4a5a821603e..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_other.go +++ /dev/null @@ -1,76 +0,0 @@ -// +build !amd64 appengine !gc purego - -package xxhash - -// Sum64 computes the 64-bit xxHash digest of b. -func Sum64(b []byte) uint64 { - // A simpler version would be - // d := New() - // d.Write(b) - // return d.Sum64() - // but this is faster, particularly for small inputs. - - n := len(b) - var h uint64 - - if n >= 32 { - v1 := prime1v + prime2 - v2 := prime2 - v3 := uint64(0) - v4 := -prime1v - for len(b) >= 32 { - v1 = round(v1, u64(b[0:8:len(b)])) - v2 = round(v2, u64(b[8:16:len(b)])) - v3 = round(v3, u64(b[16:24:len(b)])) - v4 = round(v4, u64(b[24:32:len(b)])) - b = b[32:len(b):len(b)] - } - h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - h = mergeRound(h, v1) - h = mergeRound(h, v2) - h = mergeRound(h, v3) - h = mergeRound(h, v4) - } else { - h = prime5 - } - - h += uint64(n) - - i, end := 0, len(b) - for ; i+8 <= end; i += 8 { - k1 := round(0, u64(b[i:i+8:len(b)])) - h ^= k1 - h = rol27(h)*prime1 + prime4 - } - if i+4 <= end { - h ^= uint64(u32(b[i:i+4:len(b)])) * prime1 - h = rol23(h)*prime2 + prime3 - i += 4 - } - for ; i < end; i++ { - h ^= uint64(b[i]) * prime5 - h = rol11(h) * prime1 - } - - h ^= h >> 33 - h *= prime2 - h ^= h >> 29 - h *= prime3 - h ^= h >> 32 - - return h -} - -func writeBlocks(d *Digest, b []byte) int { - v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 - n := len(b) - for len(b) >= 32 { - v1 = round(v1, u64(b[0:8:len(b)])) - v2 = round(v2, u64(b[8:16:len(b)])) - v3 = round(v3, u64(b[16:24:len(b)])) - v4 = round(v4, u64(b[24:32:len(b)])) - b = b[32:len(b):len(b)] - } - d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4 - return n - len(b) -} diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go b/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go deleted file mode 100644 index fc9bea7a31f..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build appengine - -// This file contains the safe implementations of otherwise unsafe-using code. - -package xxhash - -// Sum64String computes the 64-bit xxHash digest of s. -func Sum64String(s string) uint64 { - return Sum64([]byte(s)) -} - -// WriteString adds more data to d. It always returns len(s), nil. -func (d *Digest) WriteString(s string) (n int, err error) { - return d.Write([]byte(s)) -} diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go b/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go deleted file mode 100644 index 53bf76efbc2..00000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go +++ /dev/null @@ -1,46 +0,0 @@ -// +build !appengine - -// This file encapsulates usage of unsafe. -// xxhash_safe.go contains the safe implementations. - -package xxhash - -import ( - "reflect" - "unsafe" -) - -// Notes: -// -// See https://groups.google.com/d/msg/golang-nuts/dcjzJy-bSpw/tcZYBzQqAQAJ -// for some discussion about these unsafe conversions. -// -// In the future it's possible that compiler optimizations will make these -// unsafe operations unnecessary: https://golang.org/issue/2205. -// -// Both of these wrapper functions still incur function call overhead since they -// will not be inlined. We could write Go/asm copies of Sum64 and Digest.Write -// for strings to squeeze out a bit more speed. Mid-stack inlining should -// eventually fix this. - -// Sum64String computes the 64-bit xxHash digest of s. -// It may be faster than Sum64([]byte(s)) by avoiding a copy. -func Sum64String(s string) uint64 { - var b []byte - bh := (*reflect.SliceHeader)(unsafe.Pointer(&b)) - bh.Data = (*reflect.StringHeader)(unsafe.Pointer(&s)).Data - bh.Len = len(s) - bh.Cap = len(s) - return Sum64(b) -} - -// WriteString adds more data to d. It always returns len(s), nil. -// It may be faster than Write([]byte(s)) by avoiding a copy. -func (d *Digest) WriteString(s string) (n int, err error) { - var b []byte - bh := (*reflect.SliceHeader)(unsafe.Pointer(&b)) - bh.Data = (*reflect.StringHeader)(unsafe.Pointer(&s)).Data - bh.Len = len(s) - bh.Cap = len(s) - return d.Write(b) -} diff --git a/vendor/github.com/cespare/xxhash/xxhash.go b/vendor/github.com/cespare/xxhash/xxhash.go deleted file mode 100644 index f896bd28f05..00000000000 --- a/vendor/github.com/cespare/xxhash/xxhash.go +++ /dev/null @@ -1,168 +0,0 @@ -// Package xxhash implements the 64-bit variant of xxHash (XXH64) as described -// at http://cyan4973.github.io/xxHash/. -package xxhash - -import ( - "encoding/binary" - "hash" -) - -const ( - prime1 uint64 = 11400714785074694791 - prime2 uint64 = 14029467366897019727 - prime3 uint64 = 1609587929392839161 - prime4 uint64 = 9650029242287828579 - prime5 uint64 = 2870177450012600261 -) - -// NOTE(caleb): I'm using both consts and vars of the primes. Using consts where -// possible in the Go code is worth a small (but measurable) performance boost -// by avoiding some MOVQs. Vars are needed for the asm and also are useful for -// convenience in the Go code in a few places where we need to intentionally -// avoid constant arithmetic (e.g., v1 := prime1 + prime2 fails because the -// result overflows a uint64). -var ( - prime1v = prime1 - prime2v = prime2 - prime3v = prime3 - prime4v = prime4 - prime5v = prime5 -) - -type xxh struct { - v1 uint64 - v2 uint64 - v3 uint64 - v4 uint64 - total int - mem [32]byte - n int // how much of mem is used -} - -// New creates a new hash.Hash64 that implements the 64-bit xxHash algorithm. -func New() hash.Hash64 { - var x xxh - x.Reset() - return &x -} - -func (x *xxh) Reset() { - x.n = 0 - x.total = 0 - x.v1 = prime1v + prime2 - x.v2 = prime2 - x.v3 = 0 - x.v4 = -prime1v -} - -func (x *xxh) Size() int { return 8 } -func (x *xxh) BlockSize() int { return 32 } - -// Write adds more data to x. It always returns len(b), nil. -func (x *xxh) Write(b []byte) (n int, err error) { - n = len(b) - x.total += len(b) - - if x.n+len(b) < 32 { - // This new data doesn't even fill the current block. - copy(x.mem[x.n:], b) - x.n += len(b) - return - } - - if x.n > 0 { - // Finish off the partial block. - copy(x.mem[x.n:], b) - x.v1 = round(x.v1, u64(x.mem[0:8])) - x.v2 = round(x.v2, u64(x.mem[8:16])) - x.v3 = round(x.v3, u64(x.mem[16:24])) - x.v4 = round(x.v4, u64(x.mem[24:32])) - b = b[32-x.n:] - x.n = 0 - } - - if len(b) >= 32 { - // One or more full blocks left. - b = writeBlocks(x, b) - } - - // Store any remaining partial block. - copy(x.mem[:], b) - x.n = len(b) - - return -} - -func (x *xxh) Sum(b []byte) []byte { - s := x.Sum64() - return append( - b, - byte(s>>56), - byte(s>>48), - byte(s>>40), - byte(s>>32), - byte(s>>24), - byte(s>>16), - byte(s>>8), - byte(s), - ) -} - -func (x *xxh) Sum64() uint64 { - var h uint64 - - if x.total >= 32 { - v1, v2, v3, v4 := x.v1, x.v2, x.v3, x.v4 - h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - h = mergeRound(h, v1) - h = mergeRound(h, v2) - h = mergeRound(h, v3) - h = mergeRound(h, v4) - } else { - h = x.v3 + prime5 - } - - h += uint64(x.total) - - i, end := 0, x.n - for ; i+8 <= end; i += 8 { - k1 := round(0, u64(x.mem[i:i+8])) - h ^= k1 - h = rol27(h)*prime1 + prime4 - } - if i+4 <= end { - h ^= uint64(u32(x.mem[i:i+4])) * prime1 - h = rol23(h)*prime2 + prime3 - i += 4 - } - for i < end { - h ^= uint64(x.mem[i]) * prime5 - h = rol11(h) * prime1 - i++ - } - - h ^= h >> 33 - h *= prime2 - h ^= h >> 29 - h *= prime3 - h ^= h >> 32 - - return h -} - -func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) } -func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) } - -func round(acc, input uint64) uint64 { - acc += input * prime2 - acc = rol31(acc) - acc *= prime1 - return acc -} - -func mergeRound(acc, val uint64) uint64 { - val = round(0, val) - acc ^= val - acc = acc*prime1 + prime4 - return acc -} diff --git a/vendor/github.com/cespare/xxhash/xxhash_amd64.go b/vendor/github.com/cespare/xxhash/xxhash_amd64.go deleted file mode 100644 index d6176526802..00000000000 --- a/vendor/github.com/cespare/xxhash/xxhash_amd64.go +++ /dev/null @@ -1,12 +0,0 @@ -// +build !appengine -// +build gc -// +build !purego - -package xxhash - -// Sum64 computes the 64-bit xxHash digest of b. -// -//go:noescape -func Sum64(b []byte) uint64 - -func writeBlocks(x *xxh, b []byte) []byte diff --git a/vendor/github.com/cespare/xxhash/xxhash_amd64.s b/vendor/github.com/cespare/xxhash/xxhash_amd64.s deleted file mode 100644 index 757f2011f0f..00000000000 --- a/vendor/github.com/cespare/xxhash/xxhash_amd64.s +++ /dev/null @@ -1,233 +0,0 @@ -// +build !appengine -// +build gc -// +build !purego - -#include "textflag.h" - -// Register allocation: -// AX h -// CX pointer to advance through b -// DX n -// BX loop end -// R8 v1, k1 -// R9 v2 -// R10 v3 -// R11 v4 -// R12 tmp -// R13 prime1v -// R14 prime2v -// R15 prime4v - -// round reads from and advances the buffer pointer in CX. -// It assumes that R13 has prime1v and R14 has prime2v. -#define round(r) \ - MOVQ (CX), R12 \ - ADDQ $8, CX \ - IMULQ R14, R12 \ - ADDQ R12, r \ - ROLQ $31, r \ - IMULQ R13, r - -// mergeRound applies a merge round on the two registers acc and val. -// It assumes that R13 has prime1v, R14 has prime2v, and R15 has prime4v. -#define mergeRound(acc, val) \ - IMULQ R14, val \ - ROLQ $31, val \ - IMULQ R13, val \ - XORQ val, acc \ - IMULQ R13, acc \ - ADDQ R15, acc - -// func Sum64(b []byte) uint64 -TEXT ·Sum64(SB), NOSPLIT, $0-32 - // Load fixed primes. - MOVQ ·prime1v(SB), R13 - MOVQ ·prime2v(SB), R14 - MOVQ ·prime4v(SB), R15 - - // Load slice. - MOVQ b_base+0(FP), CX - MOVQ b_len+8(FP), DX - LEAQ (CX)(DX*1), BX - - // The first loop limit will be len(b)-32. - SUBQ $32, BX - - // Check whether we have at least one block. - CMPQ DX, $32 - JLT noBlocks - - // Set up initial state (v1, v2, v3, v4). - MOVQ R13, R8 - ADDQ R14, R8 - MOVQ R14, R9 - XORQ R10, R10 - XORQ R11, R11 - SUBQ R13, R11 - - // Loop until CX > BX. -blockLoop: - round(R8) - round(R9) - round(R10) - round(R11) - - CMPQ CX, BX - JLE blockLoop - - MOVQ R8, AX - ROLQ $1, AX - MOVQ R9, R12 - ROLQ $7, R12 - ADDQ R12, AX - MOVQ R10, R12 - ROLQ $12, R12 - ADDQ R12, AX - MOVQ R11, R12 - ROLQ $18, R12 - ADDQ R12, AX - - mergeRound(AX, R8) - mergeRound(AX, R9) - mergeRound(AX, R10) - mergeRound(AX, R11) - - JMP afterBlocks - -noBlocks: - MOVQ ·prime5v(SB), AX - -afterBlocks: - ADDQ DX, AX - - // Right now BX has len(b)-32, and we want to loop until CX > len(b)-8. - ADDQ $24, BX - - CMPQ CX, BX - JG fourByte - -wordLoop: - // Calculate k1. - MOVQ (CX), R8 - ADDQ $8, CX - IMULQ R14, R8 - ROLQ $31, R8 - IMULQ R13, R8 - - XORQ R8, AX - ROLQ $27, AX - IMULQ R13, AX - ADDQ R15, AX - - CMPQ CX, BX - JLE wordLoop - -fourByte: - ADDQ $4, BX - CMPQ CX, BX - JG singles - - MOVL (CX), R8 - ADDQ $4, CX - IMULQ R13, R8 - XORQ R8, AX - - ROLQ $23, AX - IMULQ R14, AX - ADDQ ·prime3v(SB), AX - -singles: - ADDQ $4, BX - CMPQ CX, BX - JGE finalize - -singlesLoop: - MOVBQZX (CX), R12 - ADDQ $1, CX - IMULQ ·prime5v(SB), R12 - XORQ R12, AX - - ROLQ $11, AX - IMULQ R13, AX - - CMPQ CX, BX - JL singlesLoop - -finalize: - MOVQ AX, R12 - SHRQ $33, R12 - XORQ R12, AX - IMULQ R14, AX - MOVQ AX, R12 - SHRQ $29, R12 - XORQ R12, AX - IMULQ ·prime3v(SB), AX - MOVQ AX, R12 - SHRQ $32, R12 - XORQ R12, AX - - MOVQ AX, ret+24(FP) - RET - -// writeBlocks uses the same registers as above except that it uses AX to store -// the x pointer. - -// func writeBlocks(x *xxh, b []byte) []byte -TEXT ·writeBlocks(SB), NOSPLIT, $0-56 - // Load fixed primes needed for round. - MOVQ ·prime1v(SB), R13 - MOVQ ·prime2v(SB), R14 - - // Load slice. - MOVQ b_base+8(FP), CX - MOVQ CX, ret_base+32(FP) // initialize return base pointer; see NOTE below - MOVQ b_len+16(FP), DX - LEAQ (CX)(DX*1), BX - SUBQ $32, BX - - // Load vN from x. - MOVQ x+0(FP), AX - MOVQ 0(AX), R8 // v1 - MOVQ 8(AX), R9 // v2 - MOVQ 16(AX), R10 // v3 - MOVQ 24(AX), R11 // v4 - - // We don't need to check the loop condition here; this function is - // always called with at least one block of data to process. -blockLoop: - round(R8) - round(R9) - round(R10) - round(R11) - - CMPQ CX, BX - JLE blockLoop - - // Copy vN back to x. - MOVQ R8, 0(AX) - MOVQ R9, 8(AX) - MOVQ R10, 16(AX) - MOVQ R11, 24(AX) - - // Construct return slice. - // NOTE: It's important that we don't construct a slice that has a base - // pointer off the end of the original slice, as in Go 1.7+ this will - // cause runtime crashes. (See discussion in, for example, - // https://github.com/golang/go/issues/16772.) - // Therefore, we calculate the length/cap first, and if they're zero, we - // keep the old base. This is what the compiler does as well if you - // write code like - // b = b[len(b):] - - // New length is 32 - (CX - BX) -> BX+32 - CX. - ADDQ $32, BX - SUBQ CX, BX - JZ afterSetBase - - MOVQ CX, ret_base+32(FP) - -afterSetBase: - MOVQ BX, ret_len+40(FP) - MOVQ BX, ret_cap+48(FP) // set cap == len - - RET diff --git a/vendor/github.com/cespare/xxhash/xxhash_other.go b/vendor/github.com/cespare/xxhash/xxhash_other.go deleted file mode 100644 index c68d13f89e9..00000000000 --- a/vendor/github.com/cespare/xxhash/xxhash_other.go +++ /dev/null @@ -1,75 +0,0 @@ -// +build !amd64 appengine !gc purego - -package xxhash - -// Sum64 computes the 64-bit xxHash digest of b. -func Sum64(b []byte) uint64 { - // A simpler version would be - // x := New() - // x.Write(b) - // return x.Sum64() - // but this is faster, particularly for small inputs. - - n := len(b) - var h uint64 - - if n >= 32 { - v1 := prime1v + prime2 - v2 := prime2 - v3 := uint64(0) - v4 := -prime1v - for len(b) >= 32 { - v1 = round(v1, u64(b[0:8:len(b)])) - v2 = round(v2, u64(b[8:16:len(b)])) - v3 = round(v3, u64(b[16:24:len(b)])) - v4 = round(v4, u64(b[24:32:len(b)])) - b = b[32:len(b):len(b)] - } - h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - h = mergeRound(h, v1) - h = mergeRound(h, v2) - h = mergeRound(h, v3) - h = mergeRound(h, v4) - } else { - h = prime5 - } - - h += uint64(n) - - i, end := 0, len(b) - for ; i+8 <= end; i += 8 { - k1 := round(0, u64(b[i:i+8:len(b)])) - h ^= k1 - h = rol27(h)*prime1 + prime4 - } - if i+4 <= end { - h ^= uint64(u32(b[i:i+4:len(b)])) * prime1 - h = rol23(h)*prime2 + prime3 - i += 4 - } - for ; i < end; i++ { - h ^= uint64(b[i]) * prime5 - h = rol11(h) * prime1 - } - - h ^= h >> 33 - h *= prime2 - h ^= h >> 29 - h *= prime3 - h ^= h >> 32 - - return h -} - -func writeBlocks(x *xxh, b []byte) []byte { - v1, v2, v3, v4 := x.v1, x.v2, x.v3, x.v4 - for len(b) >= 32 { - v1 = round(v1, u64(b[0:8:len(b)])) - v2 = round(v2, u64(b[8:16:len(b)])) - v3 = round(v3, u64(b[16:24:len(b)])) - v4 = round(v4, u64(b[24:32:len(b)])) - b = b[32:len(b):len(b)] - } - x.v1, x.v2, x.v3, x.v4 = v1, v2, v3, v4 - return b -} diff --git a/vendor/github.com/cespare/xxhash/xxhash_safe.go b/vendor/github.com/cespare/xxhash/xxhash_safe.go deleted file mode 100644 index dfa15ab7e27..00000000000 --- a/vendor/github.com/cespare/xxhash/xxhash_safe.go +++ /dev/null @@ -1,10 +0,0 @@ -// +build appengine - -// This file contains the safe implementations of otherwise unsafe-using code. - -package xxhash - -// Sum64String computes the 64-bit xxHash digest of s. -func Sum64String(s string) uint64 { - return Sum64([]byte(s)) -} diff --git a/vendor/github.com/cespare/xxhash/xxhash_unsafe.go b/vendor/github.com/cespare/xxhash/xxhash_unsafe.go deleted file mode 100644 index d2b64e8bb00..00000000000 --- a/vendor/github.com/cespare/xxhash/xxhash_unsafe.go +++ /dev/null @@ -1,30 +0,0 @@ -// +build !appengine - -// This file encapsulates usage of unsafe. -// xxhash_safe.go contains the safe implementations. - -package xxhash - -import ( - "reflect" - "unsafe" -) - -// Sum64String computes the 64-bit xxHash digest of s. -// It may be faster than Sum64([]byte(s)) by avoiding a copy. -// -// TODO(caleb): Consider removing this if an optimization is ever added to make -// it unnecessary: https://golang.org/issue/2205. -// -// TODO(caleb): We still have a function call; we could instead write Go/asm -// copies of Sum64 for strings to squeeze out a bit more speed. -func Sum64String(s string) uint64 { - // See https://groups.google.com/d/msg/golang-nuts/dcjzJy-bSpw/tcZYBzQqAQAJ - // for some discussion about this unsafe conversion. - var b []byte - bh := (*reflect.SliceHeader)(unsafe.Pointer(&b)) - bh.Data = (*reflect.StringHeader)(unsafe.Pointer(&s)).Data - bh.Len = len(s) - bh.Cap = len(s) - return Sum64(b) -} diff --git a/vendor/github.com/clbanning/mxj/LICENSE b/vendor/github.com/clbanning/mxj/LICENSE deleted file mode 100644 index f27bccdf06e..00000000000 --- a/vendor/github.com/clbanning/mxj/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -Copyright (c) 2012-2016 Charles Banning . All rights reserved. - -The MIT License (MIT) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -=============================================================================== - -Go Language Copyright & License - - -Copyright 2009 The Go Authors. All rights reserved. -Use of this source code is governed by a BSD-style -license that can be found in the LICENSE file. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/clbanning/mxj/anyxml.go b/vendor/github.com/clbanning/mxj/anyxml.go deleted file mode 100644 index ec2f3dfddac..00000000000 --- a/vendor/github.com/clbanning/mxj/anyxml.go +++ /dev/null @@ -1,189 +0,0 @@ -package mxj - -import ( - "encoding/xml" - "reflect" -) - -const ( - DefaultElementTag = "element" -) - -// Encode arbitrary value as XML. -// -// Note: unmarshaling the resultant -// XML may not return the original value, since tag labels may have been injected -// to create the XML representation of the value. -/* - Encode an arbitrary JSON object. - package main - - import ( - "encoding/json" - "fmt" - "github.com/clbanning/mxj" - ) - - func main() { - jsondata := []byte(`[ - { "somekey":"somevalue" }, - "string", - 3.14159265, - true - ]`) - var i interface{} - err := json.Unmarshal(jsondata, &i) - if err != nil { - // do something - } - x, err := mxj.AnyXmlIndent(i, "", " ", "mydoc") - if err != nil { - // do something else - } - fmt.Println(string(x)) - } - - output: - - somevalue - string - 3.14159265 - true - -*/ -// Alternative values for DefaultRootTag and DefaultElementTag can be set as: -// AnyXml( v, myRootTag, myElementTag). -func AnyXml(v interface{}, tags ...string) ([]byte, error) { - var rt, et string - if len(tags) == 1 || len(tags) == 2 { - rt = tags[0] - } else { - rt = DefaultRootTag - } - if len(tags) == 2 { - et = tags[1] - } else { - et = DefaultElementTag - } - - if v == nil { - if useGoXmlEmptyElemSyntax { - return []byte("<" + rt + ">"), nil - } - return []byte("<" + rt + "/>"), nil - } - if reflect.TypeOf(v).Kind() == reflect.Struct { - return xml.Marshal(v) - } - - var err error - s := new(string) - p := new(pretty) - - var ss string - var b []byte - switch v.(type) { - case []interface{}: - ss = "<" + rt + ">" - for _, vv := range v.([]interface{}) { - switch vv.(type) { - case map[string]interface{}: - m := vv.(map[string]interface{}) - if len(m) == 1 { - for tag, val := range m { - err = mapToXmlIndent(false, s, tag, val, p) - } - } else { - err = mapToXmlIndent(false, s, et, vv, p) - } - default: - err = mapToXmlIndent(false, s, et, vv, p) - } - if err != nil { - break - } - } - ss += *s + "" - b = []byte(ss) - case map[string]interface{}: - m := Map(v.(map[string]interface{})) - b, err = m.Xml(rt) - default: - err = mapToXmlIndent(false, s, rt, v, p) - b = []byte(*s) - } - - return b, err -} - -// Encode an arbitrary value as a pretty XML string. -// Alternative values for DefaultRootTag and DefaultElementTag can be set as: -// AnyXmlIndent( v, "", " ", myRootTag, myElementTag). -func AnyXmlIndent(v interface{}, prefix, indent string, tags ...string) ([]byte, error) { - var rt, et string - if len(tags) == 1 || len(tags) == 2 { - rt = tags[0] - } else { - rt = DefaultRootTag - } - if len(tags) == 2 { - et = tags[1] - } else { - et = DefaultElementTag - } - - if v == nil { - if useGoXmlEmptyElemSyntax { - return []byte(prefix + "<" + rt + ">"), nil - } - return []byte(prefix + "<" + rt + "/>"), nil - } - if reflect.TypeOf(v).Kind() == reflect.Struct { - return xml.MarshalIndent(v, prefix, indent) - } - - var err error - s := new(string) - p := new(pretty) - p.indent = indent - p.padding = prefix - - var ss string - var b []byte - switch v.(type) { - case []interface{}: - ss = "<" + rt + ">\n" - p.Indent() - for _, vv := range v.([]interface{}) { - switch vv.(type) { - case map[string]interface{}: - m := vv.(map[string]interface{}) - if len(m) == 1 { - for tag, val := range m { - err = mapToXmlIndent(true, s, tag, val, p) - } - } else { - p.start = 1 // we 1 tag in - err = mapToXmlIndent(true, s, et, vv, p) - *s += "\n" - } - default: - p.start = 0 // in case trailing p.start = 1 - err = mapToXmlIndent(true, s, et, vv, p) - } - if err != nil { - break - } - } - ss += *s + "" - b = []byte(ss) - case map[string]interface{}: - m := Map(v.(map[string]interface{})) - b, err = m.XmlIndent(prefix, indent, rt) - default: - err = mapToXmlIndent(true, s, rt, v, p) - b = []byte(*s) - } - - return b, err -} diff --git a/vendor/github.com/clbanning/mxj/atomFeedString.xml b/vendor/github.com/clbanning/mxj/atomFeedString.xml deleted file mode 100644 index 474575a41ca..00000000000 --- a/vendor/github.com/clbanning/mxj/atomFeedString.xml +++ /dev/null @@ -1,54 +0,0 @@ - -Code Review - My issueshttp://codereview.appspot.com/rietveld<>rietveld: an attempt at pubsubhubbub -2009-10-04T01:35:58+00:00email-address-removedurn:md5:134d9179c41f806be79b3a5f7877d19a - An attempt at adding pubsubhubbub support to Rietveld. -http://code.google.com/p/pubsubhubbub -http://code.google.com/p/rietveld/issues/detail?id=155 - -The server side of the protocol is trivial: - 1. add a &lt;link rel=&quot;hub&quot; href=&quot;hub-server&quot;&gt; tag to all - feeds that will be pubsubhubbubbed. - 2. every time one of those feeds changes, tell the hub - with a simple POST request. - -I have tested this by adding debug prints to a local hub -server and checking that the server got the right publish -requests. - -I can&#39;t quite get the server to work, but I think the bug -is not in my code. I think that the server expects to be -able to grab the feed and see the feed&#39;s actual URL in -the link rel=&quot;self&quot;, but the default value for that drops -the :port from the URL, and I cannot for the life of me -figure out how to get the Atom generator deep inside -django not to do that, or even where it is doing that, -or even what code is running to generate the Atom feed. -(I thought I knew but I added some assert False statements -and it kept running!) - -Ignoring that particular problem, I would appreciate -feedback on the right way to get the two values at -the top of feeds.py marked NOTE(rsc). - - -rietveld: correct tab handling -2009-10-03T23:02:17+00:00email-address-removedurn:md5:0a2a4f19bb815101f0ba2904aed7c35a - This fixes the buggy tab rendering that can be seen at -http://codereview.appspot.com/116075/diff/1/2 - -The fundamental problem was that the tab code was -not being told what column the text began in, so it -didn&#39;t know where to put the tab stops. Another problem -was that some of the code assumed that string byte -offsets were the same as column offsets, which is only -true if there are no tabs. - -In the process of fixing this, I cleaned up the arguments -to Fold and ExpandTabs and renamed them Break and -_ExpandTabs so that I could be sure that I found all the -call sites. I also wanted to verify that ExpandTabs was -not being used from outside intra_region_diff.py. - - - ` - diff --git a/vendor/github.com/clbanning/mxj/doc.go b/vendor/github.com/clbanning/mxj/doc.go deleted file mode 100644 index 8ed79a5a77a..00000000000 --- a/vendor/github.com/clbanning/mxj/doc.go +++ /dev/null @@ -1,134 +0,0 @@ -// mxj - A collection of map[string]interface{} and associated XML and JSON utilities. -// Copyright 2012-2015, 2018 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -/* -Marshal/Unmarshal XML to/from map[string]interface{} values (and JSON); extract/modify values from maps by key or key-path, including wildcards. - -mxj supplants the legacy x2j and j2x packages. The subpackage x2j-wrapper is provided to facilitate migrating from the x2j package. The x2j and j2x subpackages provide similar functionality of the old packages but are not function-name compatible with them. - -Note: this library was designed for processing ad hoc anonymous messages. Bulk processing large data sets may be much more efficiently performed using the encoding/xml or encoding/json packages from Go's standard library directly. - -Related Packages: - checkxml: github.com/clbanning/checkxml provides functions for validating XML data. - -Notes: - 2018.04.18: mv.Xml/mv.XmlIndent encodes non-map[string]interface{} map values - map[string]string, map[int]uint, etc. - 2018.03.29: mv.Gob/NewMapGob support gob encoding/decoding of Maps. - 2018.03.26: Added mxj/x2j-wrapper sub-package for migrating from legacy x2j package. - 2017.02.22: LeafNode paths can use ".N" syntax rather than "[N]" for list member indexing. - 2017.02.21: github.com/clbanning/checkxml provides functions for validating XML data. - 2017.02.10: SetFieldSeparator changes field separator for args in UpdateValuesForPath, ValuesFor... methods. - 2017.02.06: Support XMPP stream processing - HandleXMPPStreamTag(). - 2016.11.07: Preserve name space prefix syntax in XmlSeq parser - NewMapXmlSeq(), etc. - 2016.06.25: Support overriding default XML attribute prefix, "-", in Map keys - SetAttrPrefix(). - 2016.05.26: Support customization of xml.Decoder by exposing CustomDecoder variable. - 2016.03.19: Escape invalid chars when encoding XML attribute and element values - XMLEscapeChars(). - 2016.03.02: By default decoding XML with float64 and bool value casting will not cast "NaN", "Inf", and "-Inf". - To cast them to float64, first set flag with CastNanInf(true). - 2016.02.22: New mv.Root(), mv.Elements(), mv.Attributes methods let you examine XML document structure. - 2016.02.16: Add CoerceKeysToLower() option to handle tags with mixed capitalization. - 2016.02.12: Seek for first xml.StartElement token; only return error if io.EOF is reached first (handles BOM). - 2015-12-02: NewMapXmlSeq() with mv.XmlSeq() & co. will try to preserve structure of XML doc when re-encoding. - 2014-08-02: AnyXml() and AnyXmlIndent() will try to marshal arbitrary values to XML. - -SUMMARY - - type Map map[string]interface{} - - Create a Map value, 'mv', from any map[string]interface{} value, 'v': - mv := Map(v) - - Unmarshal / marshal XML as a Map value, 'mv': - mv, err := NewMapXml(xmlValue) // unmarshal - xmlValue, err := mv.Xml() // marshal - - Unmarshal XML from an io.Reader as a Map value, 'mv': - mv, err := NewMapXmlReader(xmlReader) // repeated calls, as with an os.File Reader, will process stream - mv, raw, err := NewMapXmlReaderRaw(xmlReader) // 'raw' is the raw XML that was decoded - - Marshal Map value, 'mv', to an XML Writer (io.Writer): - err := mv.XmlWriter(xmlWriter) - raw, err := mv.XmlWriterRaw(xmlWriter) // 'raw' is the raw XML that was written on xmlWriter - - Also, for prettified output: - xmlValue, err := mv.XmlIndent(prefix, indent, ...) - err := mv.XmlIndentWriter(xmlWriter, prefix, indent, ...) - raw, err := mv.XmlIndentWriterRaw(xmlWriter, prefix, indent, ...) - - Bulk process XML with error handling (note: handlers must return a boolean value): - err := HandleXmlReader(xmlReader, mapHandler(Map), errHandler(error)) - err := HandleXmlReaderRaw(xmlReader, mapHandler(Map, []byte), errHandler(error, []byte)) - - Converting XML to JSON: see Examples for NewMapXml and HandleXmlReader. - - There are comparable functions and methods for JSON processing. - - Arbitrary structure values can be decoded to / encoded from Map values: - mv, err := NewMapStruct(structVal) - err := mv.Struct(structPointer) - - To work with XML tag values, JSON or Map key values or structure field values, decode the XML, JSON - or structure to a Map value, 'mv', or cast a map[string]interface{} value to a Map value, 'mv', then: - paths := mv.PathsForKey(key) - path := mv.PathForKeyShortest(key) - values, err := mv.ValuesForKey(key, subkeys) - values, err := mv.ValuesForPath(path, subkeys) // 'path' can be dot-notation with wildcards and indexed arrays. - count, err := mv.UpdateValuesForPath(newVal, path, subkeys) - - Get everything at once, irrespective of path depth: - leafnodes := mv.LeafNodes() - leafvalues := mv.LeafValues() - - A new Map with whatever keys are desired can be created from the current Map and then encoded in XML - or JSON. (Note: keys can use dot-notation. 'oldKey' can also use wildcards and indexed arrays.) - newMap, err := mv.NewMap("oldKey_1:newKey_1", "oldKey_2:newKey_2", ..., "oldKey_N:newKey_N") - newMap, err := mv.NewMap("oldKey1", "oldKey3", "oldKey5") // a subset of 'mv'; see "examples/partial.go" - newXml, err := newMap.Xml() // for example - newJson, err := newMap.Json() // ditto - -XML PARSING CONVENTIONS - - Using NewMapXml() - - - Attributes are parsed to `map[string]interface{}` values by prefixing a hyphen, `-`, - to the attribute label. (Unless overridden by `PrependAttrWithHyphen(false)` or - `SetAttrPrefix()`.) - - If the element is a simple element and has attributes, the element value - is given the key `#text` for its `map[string]interface{}` representation. (See - the 'atomFeedString.xml' test data, below.) - - XML comments, directives, and process instructions are ignored. - - If CoerceKeysToLower() has been called, then the resultant keys will be lower case. - - Using NewMapXmlSeq() - - - Attributes are parsed to `map["#attr"]map[]map[string]interface{}`values - where the `` value has "#text" and "#seq" keys - the "#text" key holds the - value for ``. - - All elements, except for the root, have a "#seq" key. - - Comments, directives, and process instructions are unmarshalled into the Map using the - keys "#comment", "#directive", and "#procinst", respectively. (See documentation for more - specifics.) - - Name space syntax is preserved: - - something parses to map["ns:key"]interface{}{"something"} - - xmlns:ns="http://myns.com/ns" parses to map["xmlns:ns"]interface{}{"http://myns.com/ns"} - - Both - - - By default, "Nan", "Inf", and "-Inf" values are not cast to float64. If you want them - to be cast, set a flag to cast them using CastNanInf(true). - -XML ENCODING CONVENTIONS - - - 'nil' Map values, which may represent 'null' JSON values, are encoded as "". - NOTE: the operation is not symmetric as "" elements are decoded as 'tag:""' Map values, - which, then, encode in JSON as '"tag":""' values.. - - ALSO: there is no guarantee that the encoded XML doc will be the same as the decoded one. (Go - randomizes the walk through map[string]interface{} values.) If you plan to re-encode the - Map value to XML and want the same sequencing of elements look at NewMapXmlSeq() and - mv.XmlSeq() - these try to preserve the element sequencing but with added complexity when - working with the Map representation. - -*/ -package mxj diff --git a/vendor/github.com/clbanning/mxj/escapechars.go b/vendor/github.com/clbanning/mxj/escapechars.go deleted file mode 100644 index bee0442c9c1..00000000000 --- a/vendor/github.com/clbanning/mxj/escapechars.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2016 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -package mxj - -import ( - "bytes" -) - -var xmlEscapeChars bool - -// XMLEscapeChars(true) forces escaping invalid characters in attribute and element values. -// NOTE: this is brute force with NO interrogation of '&' being escaped already; if it is -// then '&' will be re-escaped as '&amp;'. -// -/* - The values are: - " " - ' ' - < < - > > - & & -*/ -func XMLEscapeChars(b bool) { - xmlEscapeChars = b -} - -// Scan for '&' first, since 's' may contain "&" that is parsed to "&amp;" -// - or "<" that is parsed to "&lt;". -var escapechars = [][2][]byte{ - {[]byte(`&`), []byte(`&`)}, - {[]byte(`<`), []byte(`<`)}, - {[]byte(`>`), []byte(`>`)}, - {[]byte(`"`), []byte(`"`)}, - {[]byte(`'`), []byte(`'`)}, -} - -func escapeChars(s string) string { - if len(s) == 0 { - return s - } - - b := []byte(s) - for _, v := range escapechars { - n := bytes.Count(b, v[0]) - if n == 0 { - continue - } - b = bytes.Replace(b, v[0], v[1], n) - } - return string(b) -} - diff --git a/vendor/github.com/clbanning/mxj/exists.go b/vendor/github.com/clbanning/mxj/exists.go deleted file mode 100644 index 2fb3084b599..00000000000 --- a/vendor/github.com/clbanning/mxj/exists.go +++ /dev/null @@ -1,7 +0,0 @@ -package mxj - -// Checks whether the path exists -func (mv Map) Exists(path string, subkeys ...string) bool { - v, err := mv.ValuesForPath(path, subkeys...) - return err == nil && len(v) > 0 -} diff --git a/vendor/github.com/clbanning/mxj/files.go b/vendor/github.com/clbanning/mxj/files.go deleted file mode 100644 index 27e06e1e801..00000000000 --- a/vendor/github.com/clbanning/mxj/files.go +++ /dev/null @@ -1,287 +0,0 @@ -package mxj - -import ( - "fmt" - "io" - "os" -) - -type Maps []Map - -func NewMaps() Maps { - return make(Maps, 0) -} - -type MapRaw struct { - M Map - R []byte -} - -// NewMapsFromXmlFile - creates an array from a file of JSON values. -func NewMapsFromJsonFile(name string) (Maps, error) { - fi, err := os.Stat(name) - if err != nil { - return nil, err - } - if !fi.Mode().IsRegular() { - return nil, fmt.Errorf("file %s is not a regular file", name) - } - - fh, err := os.Open(name) - if err != nil { - return nil, err - } - defer fh.Close() - - am := make([]Map, 0) - for { - m, raw, err := NewMapJsonReaderRaw(fh) - if err != nil && err != io.EOF { - return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(raw)) - } - if len(m) > 0 { - am = append(am, m) - } - if err == io.EOF { - break - } - } - return am, nil -} - -// ReadMapsFromJsonFileRaw - creates an array of MapRaw from a file of JSON values. -func NewMapsFromJsonFileRaw(name string) ([]MapRaw, error) { - fi, err := os.Stat(name) - if err != nil { - return nil, err - } - if !fi.Mode().IsRegular() { - return nil, fmt.Errorf("file %s is not a regular file", name) - } - - fh, err := os.Open(name) - if err != nil { - return nil, err - } - defer fh.Close() - - am := make([]MapRaw, 0) - for { - mr := new(MapRaw) - mr.M, mr.R, err = NewMapJsonReaderRaw(fh) - if err != nil && err != io.EOF { - return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(mr.R)) - } - if len(mr.M) > 0 { - am = append(am, *mr) - } - if err == io.EOF { - break - } - } - return am, nil -} - -// NewMapsFromXmlFile - creates an array from a file of XML values. -func NewMapsFromXmlFile(name string) (Maps, error) { - fi, err := os.Stat(name) - if err != nil { - return nil, err - } - if !fi.Mode().IsRegular() { - return nil, fmt.Errorf("file %s is not a regular file", name) - } - - fh, err := os.Open(name) - if err != nil { - return nil, err - } - defer fh.Close() - - am := make([]Map, 0) - for { - m, raw, err := NewMapXmlReaderRaw(fh) - if err != nil && err != io.EOF { - return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(raw)) - } - if len(m) > 0 { - am = append(am, m) - } - if err == io.EOF { - break - } - } - return am, nil -} - -// NewMapsFromXmlFileRaw - creates an array of MapRaw from a file of XML values. -// NOTE: the slice with the raw XML is clean with no extra capacity - unlike NewMapXmlReaderRaw(). -// It is slow at parsing a file from disk and is intended for relatively small utility files. -func NewMapsFromXmlFileRaw(name string) ([]MapRaw, error) { - fi, err := os.Stat(name) - if err != nil { - return nil, err - } - if !fi.Mode().IsRegular() { - return nil, fmt.Errorf("file %s is not a regular file", name) - } - - fh, err := os.Open(name) - if err != nil { - return nil, err - } - defer fh.Close() - - am := make([]MapRaw, 0) - for { - mr := new(MapRaw) - mr.M, mr.R, err = NewMapXmlReaderRaw(fh) - if err != nil && err != io.EOF { - return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(mr.R)) - } - if len(mr.M) > 0 { - am = append(am, *mr) - } - if err == io.EOF { - break - } - } - return am, nil -} - -// ------------------------ Maps writing ------------------------- -// These are handy-dandy methods for dumping configuration data, etc. - -// JsonString - analogous to mv.Json() -func (mvs Maps) JsonString(safeEncoding ...bool) (string, error) { - var s string - for _, v := range mvs { - j, err := v.Json() - if err != nil { - return s, err - } - s += string(j) - } - return s, nil -} - -// JsonStringIndent - analogous to mv.JsonIndent() -func (mvs Maps) JsonStringIndent(prefix, indent string, safeEncoding ...bool) (string, error) { - var s string - var haveFirst bool - for _, v := range mvs { - j, err := v.JsonIndent(prefix, indent) - if err != nil { - return s, err - } - if haveFirst { - s += "\n" - } else { - haveFirst = true - } - s += string(j) - } - return s, nil -} - -// XmlString - analogous to mv.Xml() -func (mvs Maps) XmlString() (string, error) { - var s string - for _, v := range mvs { - x, err := v.Xml() - if err != nil { - return s, err - } - s += string(x) - } - return s, nil -} - -// XmlStringIndent - analogous to mv.XmlIndent() -func (mvs Maps) XmlStringIndent(prefix, indent string) (string, error) { - var s string - for _, v := range mvs { - x, err := v.XmlIndent(prefix, indent) - if err != nil { - return s, err - } - s += string(x) - } - return s, nil -} - -// JsonFile - write Maps to named file as JSON -// Note: the file will be created, if necessary; if it exists it will be truncated. -// If you need to append to a file, open it and use JsonWriter method. -func (mvs Maps) JsonFile(file string, safeEncoding ...bool) error { - var encoding bool - if len(safeEncoding) == 1 { - encoding = safeEncoding[0] - } - s, err := mvs.JsonString(encoding) - if err != nil { - return err - } - fh, err := os.Create(file) - if err != nil { - return err - } - defer fh.Close() - fh.WriteString(s) - return nil -} - -// JsonFileIndent - write Maps to named file as pretty JSON -// Note: the file will be created, if necessary; if it exists it will be truncated. -// If you need to append to a file, open it and use JsonIndentWriter method. -func (mvs Maps) JsonFileIndent(file, prefix, indent string, safeEncoding ...bool) error { - var encoding bool - if len(safeEncoding) == 1 { - encoding = safeEncoding[0] - } - s, err := mvs.JsonStringIndent(prefix, indent, encoding) - if err != nil { - return err - } - fh, err := os.Create(file) - if err != nil { - return err - } - defer fh.Close() - fh.WriteString(s) - return nil -} - -// XmlFile - write Maps to named file as XML -// Note: the file will be created, if necessary; if it exists it will be truncated. -// If you need to append to a file, open it and use XmlWriter method. -func (mvs Maps) XmlFile(file string) error { - s, err := mvs.XmlString() - if err != nil { - return err - } - fh, err := os.Create(file) - if err != nil { - return err - } - defer fh.Close() - fh.WriteString(s) - return nil -} - -// XmlFileIndent - write Maps to named file as pretty XML -// Note: the file will be created,if necessary; if it exists it will be truncated. -// If you need to append to a file, open it and use XmlIndentWriter method. -func (mvs Maps) XmlFileIndent(file, prefix, indent string) error { - s, err := mvs.XmlStringIndent(prefix, indent) - if err != nil { - return err - } - fh, err := os.Create(file) - if err != nil { - return err - } - defer fh.Close() - fh.WriteString(s) - return nil -} diff --git a/vendor/github.com/clbanning/mxj/files_test.badjson b/vendor/github.com/clbanning/mxj/files_test.badjson deleted file mode 100644 index d18720044ac..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test.badjson +++ /dev/null @@ -1,2 +0,0 @@ -{ "this":"is", "a":"test", "file":"for", "files_test.go":"case" } -{ "with":"some", "bad":JSON, "in":"it" } diff --git a/vendor/github.com/clbanning/mxj/files_test.badxml b/vendor/github.com/clbanning/mxj/files_test.badxml deleted file mode 100644 index 4736ef973dd..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test.badxml +++ /dev/null @@ -1,9 +0,0 @@ - - test - for files.go - - - some - doc - test case - diff --git a/vendor/github.com/clbanning/mxj/files_test.json b/vendor/github.com/clbanning/mxj/files_test.json deleted file mode 100644 index e9a3ddf40ec..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test.json +++ /dev/null @@ -1,2 +0,0 @@ -{ "this":"is", "a":"test", "file":"for", "files_test.go":"case" } -{ "with":"just", "two":2, "JSON":"values", "true":true } diff --git a/vendor/github.com/clbanning/mxj/files_test.xml b/vendor/github.com/clbanning/mxj/files_test.xml deleted file mode 100644 index 65cf021fb70..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test.xml +++ /dev/null @@ -1,9 +0,0 @@ - - test - for files.go - - - some - doc - test case - diff --git a/vendor/github.com/clbanning/mxj/files_test_dup.json b/vendor/github.com/clbanning/mxj/files_test_dup.json deleted file mode 100644 index 2becb6a4512..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test_dup.json +++ /dev/null @@ -1 +0,0 @@ -{"a":"test","file":"for","files_test.go":"case","this":"is"}{"JSON":"values","true":true,"two":2,"with":"just"} \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/files_test_dup.xml b/vendor/github.com/clbanning/mxj/files_test_dup.xml deleted file mode 100644 index f68d22e28ea..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test_dup.xml +++ /dev/null @@ -1 +0,0 @@ -for files.gotestdoctest casesome \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/files_test_indent.json b/vendor/github.com/clbanning/mxj/files_test_indent.json deleted file mode 100644 index 6fde15634df..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test_indent.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "a": "test", - "file": "for", - "files_test.go": "case", - "this": "is" -} -{ - "JSON": "values", - "true": true, - "two": 2, - "with": "just" -} \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/files_test_indent.xml b/vendor/github.com/clbanning/mxj/files_test_indent.xml deleted file mode 100644 index 8c91a1dc20a..00000000000 --- a/vendor/github.com/clbanning/mxj/files_test_indent.xml +++ /dev/null @@ -1,8 +0,0 @@ - - for files.go - test - - doc - test case - some - \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/gob.go b/vendor/github.com/clbanning/mxj/gob.go deleted file mode 100644 index d56c2fd6fe8..00000000000 --- a/vendor/github.com/clbanning/mxj/gob.go +++ /dev/null @@ -1,35 +0,0 @@ -// gob.go - Encode/Decode a Map into a gob object. - -package mxj - -import ( - "bytes" - "encoding/gob" -) - -// NewMapGob returns a Map value for a gob object that has been -// encoded from a map[string]interface{} (or compatible type) value. -// It is intended to provide symmetric handling of Maps that have -// been encoded using mv.Gob. -func NewMapGob(gobj []byte) (Map, error) { - m := make(map[string]interface{}, 0) - if len(gobj) == 0 { - return m, nil - } - r := bytes.NewReader(gobj) - dec := gob.NewDecoder(r) - if err := dec.Decode(&m); err != nil { - return m, err - } - return m, nil -} - -// Gob returns a gob-encoded value for the Map 'mv'. -func (mv Map) Gob() ([]byte, error) { - var buf bytes.Buffer - enc := gob.NewEncoder(&buf) - if err := enc.Encode(map[string]interface{}(mv)); err != nil { - return nil, err - } - return buf.Bytes(), nil -} diff --git a/vendor/github.com/clbanning/mxj/json.go b/vendor/github.com/clbanning/mxj/json.go deleted file mode 100644 index eb2c05a1869..00000000000 --- a/vendor/github.com/clbanning/mxj/json.go +++ /dev/null @@ -1,323 +0,0 @@ -// Copyright 2012-2014 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -package mxj - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "time" -) - -// ------------------------------ write JSON ----------------------- - -// Just a wrapper on json.Marshal. -// If option safeEncoding is'true' then safe encoding of '<', '>' and '&' -// is preserved. (see encoding/json#Marshal, encoding/json#Encode) -func (mv Map) Json(safeEncoding ...bool) ([]byte, error) { - var s bool - if len(safeEncoding) == 1 { - s = safeEncoding[0] - } - - b, err := json.Marshal(mv) - - if !s { - b = bytes.Replace(b, []byte("\\u003c"), []byte("<"), -1) - b = bytes.Replace(b, []byte("\\u003e"), []byte(">"), -1) - b = bytes.Replace(b, []byte("\\u0026"), []byte("&"), -1) - } - return b, err -} - -// Just a wrapper on json.MarshalIndent. -// If option safeEncoding is'true' then safe encoding of '<' , '>' and '&' -// is preserved. (see encoding/json#Marshal, encoding/json#Encode) -func (mv Map) JsonIndent(prefix, indent string, safeEncoding ...bool) ([]byte, error) { - var s bool - if len(safeEncoding) == 1 { - s = safeEncoding[0] - } - - b, err := json.MarshalIndent(mv, prefix, indent) - if !s { - b = bytes.Replace(b, []byte("\\u003c"), []byte("<"), -1) - b = bytes.Replace(b, []byte("\\u003e"), []byte(">"), -1) - b = bytes.Replace(b, []byte("\\u0026"), []byte("&"), -1) - } - return b, err -} - -// The following implementation is provided for symmetry with NewMapJsonReader[Raw] -// The names will also provide a key for the number of return arguments. - -// Writes the Map as JSON on the Writer. -// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. -func (mv Map) JsonWriter(jsonWriter io.Writer, safeEncoding ...bool) error { - b, err := mv.Json(safeEncoding...) - if err != nil { - return err - } - - _, err = jsonWriter.Write(b) - return err -} - -// Writes the Map as JSON on the Writer. []byte is the raw JSON that was written. -// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. -func (mv Map) JsonWriterRaw(jsonWriter io.Writer, safeEncoding ...bool) ([]byte, error) { - b, err := mv.Json(safeEncoding...) - if err != nil { - return b, err - } - - _, err = jsonWriter.Write(b) - return b, err -} - -// Writes the Map as pretty JSON on the Writer. -// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. -func (mv Map) JsonIndentWriter(jsonWriter io.Writer, prefix, indent string, safeEncoding ...bool) error { - b, err := mv.JsonIndent(prefix, indent, safeEncoding...) - if err != nil { - return err - } - - _, err = jsonWriter.Write(b) - return err -} - -// Writes the Map as pretty JSON on the Writer. []byte is the raw JSON that was written. -// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. -func (mv Map) JsonIndentWriterRaw(jsonWriter io.Writer, prefix, indent string, safeEncoding ...bool) ([]byte, error) { - b, err := mv.JsonIndent(prefix, indent, safeEncoding...) - if err != nil { - return b, err - } - - _, err = jsonWriter.Write(b) - return b, err -} - -// --------------------------- read JSON ----------------------------- - -// Decode numericvalues as json.Number type Map values - see encoding/json#Number. -// NOTE: this is for decoding JSON into a Map with NewMapJson(), NewMapJsonReader(), -// etc.; it does not affect NewMapXml(), etc. The XML encoders mv.Xml() and mv.XmlIndent() -// do recognize json.Number types; a JSON object can be decoded to a Map with json.Number -// value types and the resulting Map can be correctly encoded into a XML object. -var JsonUseNumber bool - -// Just a wrapper on json.Unmarshal -// Converting JSON to XML is a simple as: -// ... -// mapVal, merr := mxj.NewMapJson(jsonVal) -// if merr != nil { -// // handle error -// } -// xmlVal, xerr := mapVal.Xml() -// if xerr != nil { -// // handle error -// } -// NOTE: as a special case, passing a list, e.g., [{"some-null-value":"", "a-non-null-value":"bar"}], -// will be interpreted as having the root key 'object' prepended - {"object":[ ... ]} - to unmarshal to a Map. -// See mxj/j2x/j2x_test.go. -func NewMapJson(jsonVal []byte) (Map, error) { - // empty or nil begets empty - if len(jsonVal) == 0 { - m := make(map[string]interface{}, 0) - return m, nil - } - // handle a goofy case ... - if jsonVal[0] == '[' { - jsonVal = []byte(`{"object":` + string(jsonVal) + `}`) - } - m := make(map[string]interface{}) - // err := json.Unmarshal(jsonVal, &m) - buf := bytes.NewReader(jsonVal) - dec := json.NewDecoder(buf) - if JsonUseNumber { - dec.UseNumber() - } - err := dec.Decode(&m) - return m, err -} - -// Retrieve a Map value from an io.Reader. -// NOTE: The raw JSON off the reader is buffered to []byte using a ByteReader. If the io.Reader is an -// os.File, there may be significant performance impact. If the io.Reader is wrapping a []byte -// value in-memory, however, such as http.Request.Body you CAN use it to efficiently unmarshal -// a JSON object. -func NewMapJsonReader(jsonReader io.Reader) (Map, error) { - jb, err := getJson(jsonReader) - if err != nil || len(*jb) == 0 { - return nil, err - } - - // Unmarshal the 'presumed' JSON string - return NewMapJson(*jb) -} - -// Retrieve a Map value and raw JSON - []byte - from an io.Reader. -// NOTE: The raw JSON off the reader is buffered to []byte using a ByteReader. If the io.Reader is an -// os.File, there may be significant performance impact. If the io.Reader is wrapping a []byte -// value in-memory, however, such as http.Request.Body you CAN use it to efficiently unmarshal -// a JSON object and retrieve the raw JSON in a single call. -func NewMapJsonReaderRaw(jsonReader io.Reader) (Map, []byte, error) { - jb, err := getJson(jsonReader) - if err != nil || len(*jb) == 0 { - return nil, *jb, err - } - - // Unmarshal the 'presumed' JSON string - m, merr := NewMapJson(*jb) - return m, *jb, merr -} - -// Pull the next JSON string off the stream: just read from first '{' to its closing '}'. -// Returning a pointer to the slice saves 16 bytes - maybe unnecessary, but internal to package. -func getJson(rdr io.Reader) (*[]byte, error) { - bval := make([]byte, 1) - jb := make([]byte, 0) - var inQuote, inJson bool - var parenCnt int - var previous byte - - // scan the input for a matched set of {...} - // json.Unmarshal will handle syntax checking. - for { - _, err := rdr.Read(bval) - if err != nil { - if err == io.EOF && inJson && parenCnt > 0 { - return &jb, fmt.Errorf("no closing } for JSON string: %s", string(jb)) - } - return &jb, err - } - switch bval[0] { - case '{': - if !inQuote { - parenCnt++ - inJson = true - } - case '}': - if !inQuote { - parenCnt-- - } - if parenCnt < 0 { - return nil, fmt.Errorf("closing } without opening {: %s", string(jb)) - } - case '"': - if inQuote { - if previous == '\\' { - break - } - inQuote = false - } else { - inQuote = true - } - case '\n', '\r', '\t', ' ': - if !inQuote { - continue - } - } - if inJson { - jb = append(jb, bval[0]) - if parenCnt == 0 { - break - } - } - previous = bval[0] - } - - return &jb, nil -} - -// ------------------------------- JSON Reader handler via Map values ----------------------- - -// Default poll delay to keep Handler from spinning on an open stream -// like sitting on os.Stdin waiting for imput. -var jhandlerPollInterval = time.Duration(1e6) - -// While unnecessary, we make HandleJsonReader() have the same signature as HandleXmlReader(). -// This avoids treating one or other as a special case and discussing the underlying stdlib logic. - -// Bulk process JSON using handlers that process a Map value. -// 'rdr' is an io.Reader for the JSON (stream). -// 'mapHandler' is the Map processing handler. Return of 'false' stops io.Reader processing. -// 'errHandler' is the error processor. Return of 'false' stops io.Reader processing and returns the error. -// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. -// This means that you can stop reading the file on error or after processing a particular message. -// To have reading and handling run concurrently, pass argument to a go routine in handler and return 'true'. -func HandleJsonReader(jsonReader io.Reader, mapHandler func(Map) bool, errHandler func(error) bool) error { - var n int - for { - m, merr := NewMapJsonReader(jsonReader) - n++ - - // handle error condition with errhandler - if merr != nil && merr != io.EOF { - merr = fmt.Errorf("[jsonReader: %d] %s", n, merr.Error()) - if ok := errHandler(merr); !ok { - // caused reader termination - return merr - } - continue - } - - // pass to maphandler - if len(m) != 0 { - if ok := mapHandler(m); !ok { - break - } - } else if merr != io.EOF { - <-time.After(jhandlerPollInterval) - } - - if merr == io.EOF { - break - } - } - return nil -} - -// Bulk process JSON using handlers that process a Map value and the raw JSON. -// 'rdr' is an io.Reader for the JSON (stream). -// 'mapHandler' is the Map and raw JSON - []byte - processor. Return of 'false' stops io.Reader processing. -// 'errHandler' is the error and raw JSON processor. Return of 'false' stops io.Reader processing and returns the error. -// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. -// This means that you can stop reading the file on error or after processing a particular message. -// To have reading and handling run concurrently, pass argument(s) to a go routine in handler and return 'true'. -func HandleJsonReaderRaw(jsonReader io.Reader, mapHandler func(Map, []byte) bool, errHandler func(error, []byte) bool) error { - var n int - for { - m, raw, merr := NewMapJsonReaderRaw(jsonReader) - n++ - - // handle error condition with errhandler - if merr != nil && merr != io.EOF { - merr = fmt.Errorf("[jsonReader: %d] %s", n, merr.Error()) - if ok := errHandler(merr, raw); !ok { - // caused reader termination - return merr - } - continue - } - - // pass to maphandler - if len(m) != 0 { - if ok := mapHandler(m, raw); !ok { - break - } - } else if merr != io.EOF { - <-time.After(jhandlerPollInterval) - } - - if merr == io.EOF { - break - } - } - return nil -} diff --git a/vendor/github.com/clbanning/mxj/keyvalues.go b/vendor/github.com/clbanning/mxj/keyvalues.go deleted file mode 100644 index 0b244c879ce..00000000000 --- a/vendor/github.com/clbanning/mxj/keyvalues.go +++ /dev/null @@ -1,671 +0,0 @@ -// Copyright 2012-2014 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -// keyvalues.go: Extract values from an arbitrary XML doc. Tag path can include wildcard characters. - -package mxj - -import ( - "errors" - "fmt" - "strconv" - "strings" -) - -// ----------------------------- get everything FOR a single key ------------------------- - -const ( - minArraySize = 32 -) - -var defaultArraySize int = minArraySize - -// Adjust the buffers for expected number of values to return from ValuesForKey() and ValuesForPath(). -// This can have the effect of significantly reducing memory allocation-copy functions for large data sets. -// Returns the initial buffer size. -func SetArraySize(size int) int { - if size > minArraySize { - defaultArraySize = size - } else { - defaultArraySize = minArraySize - } - return defaultArraySize -} - -// Return all values in Map, 'mv', associated with a 'key'. If len(returned_values) == 0, then no match. -// On error, the returned slice is 'nil'. NOTE: 'key' can be wildcard, "*". -// 'subkeys' (optional) are "key:val[:type]" strings representing attributes or elements in a list. -// - By default 'val' is of type string. "key:val:bool" and "key:val:float" to coerce them. -// - For attributes prefix the label with a hyphen, '-', e.g., "-seq:3". -// - If the 'key' refers to a list, then "key:value" could select a list member of the list. -// - The subkey can be wildcarded - "key:*" - to require that it's there with some value. -// - If a subkey is preceeded with the '!' character, the key:value[:type] entry is treated as an -// exclusion critera - e.g., "!author:William T. Gaddis". -// - If val contains ":" symbol, use SetFieldSeparator to a unused symbol, perhaps "|". -func (mv Map) ValuesForKey(key string, subkeys ...string) ([]interface{}, error) { - m := map[string]interface{}(mv) - var subKeyMap map[string]interface{} - if len(subkeys) > 0 { - var err error - subKeyMap, err = getSubKeyMap(subkeys...) - if err != nil { - return nil, err - } - } - - ret := make([]interface{}, 0, defaultArraySize) - var cnt int - hasKey(m, key, &ret, &cnt, subKeyMap) - return ret[:cnt], nil -} - -var KeyNotExistError = errors.New("Key does not exist") - -// ValueForKey is a wrapper on ValuesForKey. It returns the first member of []interface{}, if any. -// If there is no value, "nil, nil" is returned. -func (mv Map) ValueForKey(key string, subkeys ...string) (interface{}, error) { - vals, err := mv.ValuesForKey(key, subkeys...) - if err != nil { - return nil, err - } - if len(vals) == 0 { - return nil, KeyNotExistError - } - return vals[0], nil -} - -// hasKey - if the map 'key' exists append it to array -// if it doesn't do nothing except scan array and map values -func hasKey(iv interface{}, key string, ret *[]interface{}, cnt *int, subkeys map[string]interface{}) { - // func hasKey(iv interface{}, key string, ret *[]interface{}, subkeys map[string]interface{}) { - switch iv.(type) { - case map[string]interface{}: - vv := iv.(map[string]interface{}) - // see if the current value is of interest - if v, ok := vv[key]; ok { - switch v.(type) { - case map[string]interface{}: - if hasSubKeys(v, subkeys) { - *ret = append(*ret, v) - *cnt++ - } - case []interface{}: - for _, av := range v.([]interface{}) { - if hasSubKeys(av, subkeys) { - *ret = append(*ret, av) - *cnt++ - } - } - default: - if len(subkeys) == 0 { - *ret = append(*ret, v) - *cnt++ - } - } - } - - // wildcard case - if key == "*" { - for _, v := range vv { - switch v.(type) { - case map[string]interface{}: - if hasSubKeys(v, subkeys) { - *ret = append(*ret, v) - *cnt++ - } - case []interface{}: - for _, av := range v.([]interface{}) { - if hasSubKeys(av, subkeys) { - *ret = append(*ret, av) - *cnt++ - } - } - default: - if len(subkeys) == 0 { - *ret = append(*ret, v) - *cnt++ - } - } - } - } - - // scan the rest - for _, v := range vv { - hasKey(v, key, ret, cnt, subkeys) - } - case []interface{}: - for _, v := range iv.([]interface{}) { - hasKey(v, key, ret, cnt, subkeys) - } - } -} - -// ----------------------- get everything for a node in the Map --------------------------- - -// Allow indexed arrays in "path" specification. (Request from Abhijit Kadam - abhijitk100@gmail.com.) -// 2014.04.28 - implementation note. -// Implemented as a wrapper of (old)ValuesForPath() because we need look-ahead logic to handle expansion -// of wildcards and unindexed arrays. Embedding such logic into valuesForKeyPath() would have made the -// code much more complicated; this wrapper is straightforward, easy to debug, and doesn't add significant overhead. - -// Retrieve all values for a path from the Map. If len(returned_values) == 0, then no match. -// On error, the returned array is 'nil'. -// 'path' is a dot-separated path of key values. -// - If a node in the path is '*', then everything beyond is walked. -// - 'path' can contain indexed array references, such as, "*.data[1]" and "msgs[2].data[0].field" - -// even "*[2].*[0].field". -// 'subkeys' (optional) are "key:val[:type]" strings representing attributes or elements in a list. -// - By default 'val' is of type string. "key:val:bool" and "key:val:float" to coerce them. -// - For attributes prefix the label with a hyphen, '-', e.g., "-seq:3". -// - If the 'path' refers to a list, then "tag:value" would return member of the list. -// - The subkey can be wildcarded - "key:*" - to require that it's there with some value. -// - If a subkey is preceeded with the '!' character, the key:value[:type] entry is treated as an -// exclusion critera - e.g., "!author:William T. Gaddis". -// - If val contains ":" symbol, use SetFieldSeparator to a unused symbol, perhaps "|". -func (mv Map) ValuesForPath(path string, subkeys ...string) ([]interface{}, error) { - // If there are no array indexes in path, use legacy ValuesForPath() logic. - if strings.Index(path, "[") < 0 { - return mv.oldValuesForPath(path, subkeys...) - } - - var subKeyMap map[string]interface{} - if len(subkeys) > 0 { - var err error - subKeyMap, err = getSubKeyMap(subkeys...) - if err != nil { - return nil, err - } - } - - keys, kerr := parsePath(path) - if kerr != nil { - return nil, kerr - } - - vals, verr := valuesForArray(keys, mv) - if verr != nil { - return nil, verr // Vals may be nil, but return empty array. - } - - // Need to handle subkeys ... only return members of vals that satisfy conditions. - retvals := make([]interface{}, 0) - for _, v := range vals { - if hasSubKeys(v, subKeyMap) { - retvals = append(retvals, v) - } - } - return retvals, nil -} - -func valuesForArray(keys []*key, m Map) ([]interface{}, error) { - var tmppath string - var haveFirst bool - var vals []interface{} - var verr error - - lastkey := len(keys) - 1 - for i := 0; i <= lastkey; i++ { - if !haveFirst { - tmppath = keys[i].name - haveFirst = true - } else { - tmppath += "." + keys[i].name - } - - // Look-ahead: explode wildcards and unindexed arrays. - // Need to handle un-indexed list recursively: - // e.g., path is "stuff.data[0]" rather than "stuff[0].data[0]". - // Need to treat it as "stuff[0].data[0]", "stuff[1].data[0]", ... - if !keys[i].isArray && i < lastkey && keys[i+1].isArray { - // Can't pass subkeys because we may not be at literal end of path. - vv, vverr := m.oldValuesForPath(tmppath) - if vverr != nil { - return nil, vverr - } - for _, v := range vv { - // See if we can walk the value. - am, ok := v.(map[string]interface{}) - if !ok { - continue - } - // Work the backend. - nvals, nvalserr := valuesForArray(keys[i+1:], Map(am)) - if nvalserr != nil { - return nil, nvalserr - } - vals = append(vals, nvals...) - } - break // have recursed the whole path - return - } - - if keys[i].isArray || i == lastkey { - // Don't pass subkeys because may not be at literal end of path. - vals, verr = m.oldValuesForPath(tmppath) - } else { - continue - } - if verr != nil { - return nil, verr - } - - if i == lastkey && !keys[i].isArray { - break - } - - // Now we're looking at an array - supposedly. - // Is index in range of vals? - if len(vals) <= keys[i].position { - vals = nil - break - } - - // Return the array member of interest, if at end of path. - if i == lastkey { - vals = vals[keys[i].position:(keys[i].position + 1)] - break - } - - // Extract the array member of interest. - am := vals[keys[i].position:(keys[i].position + 1)] - - // must be a map[string]interface{} value so we can keep walking the path - amm, ok := am[0].(map[string]interface{}) - if !ok { - vals = nil - break - } - - m = Map(amm) - haveFirst = false - } - - return vals, nil -} - -type key struct { - name string - isArray bool - position int -} - -func parsePath(s string) ([]*key, error) { - keys := strings.Split(s, ".") - - ret := make([]*key, 0) - - for i := 0; i < len(keys); i++ { - if keys[i] == "" { - continue - } - - newkey := new(key) - if strings.Index(keys[i], "[") < 0 { - newkey.name = keys[i] - ret = append(ret, newkey) - continue - } - - p := strings.Split(keys[i], "[") - newkey.name = p[0] - p = strings.Split(p[1], "]") - if p[0] == "" { // no right bracket - return nil, fmt.Errorf("no right bracket on key index: %s", keys[i]) - } - // convert p[0] to a int value - pos, nerr := strconv.ParseInt(p[0], 10, 32) - if nerr != nil { - return nil, fmt.Errorf("cannot convert index to int value: %s", p[0]) - } - newkey.position = int(pos) - newkey.isArray = true - ret = append(ret, newkey) - } - - return ret, nil -} - -// legacy ValuesForPath() - now wrapped to handle special case of indexed arrays in 'path'. -func (mv Map) oldValuesForPath(path string, subkeys ...string) ([]interface{}, error) { - m := map[string]interface{}(mv) - var subKeyMap map[string]interface{} - if len(subkeys) > 0 { - var err error - subKeyMap, err = getSubKeyMap(subkeys...) - if err != nil { - return nil, err - } - } - - keys := strings.Split(path, ".") - if keys[len(keys)-1] == "" { - keys = keys[:len(keys)-1] - } - ivals := make([]interface{}, 0, defaultArraySize) - var cnt int - valuesForKeyPath(&ivals, &cnt, m, keys, subKeyMap) - return ivals[:cnt], nil -} - -func valuesForKeyPath(ret *[]interface{}, cnt *int, m interface{}, keys []string, subkeys map[string]interface{}) { - lenKeys := len(keys) - - // load 'm' values into 'ret' - // expand any lists - if lenKeys == 0 { - switch m.(type) { - case map[string]interface{}: - if subkeys != nil { - if ok := hasSubKeys(m, subkeys); !ok { - return - } - } - *ret = append(*ret, m) - *cnt++ - case []interface{}: - for i, v := range m.([]interface{}) { - if subkeys != nil { - if ok := hasSubKeys(v, subkeys); !ok { - continue // only load list members with subkeys - } - } - *ret = append(*ret, (m.([]interface{}))[i]) - *cnt++ - } - default: - if subkeys != nil { - return // must be map[string]interface{} if there are subkeys - } - *ret = append(*ret, m) - *cnt++ - } - return - } - - // key of interest - key := keys[0] - switch key { - case "*": // wildcard - scan all values - switch m.(type) { - case map[string]interface{}: - for _, v := range m.(map[string]interface{}) { - // valuesForKeyPath(ret, v, keys[1:], subkeys) - valuesForKeyPath(ret, cnt, v, keys[1:], subkeys) - } - case []interface{}: - for _, v := range m.([]interface{}) { - switch v.(type) { - // flatten out a list of maps - keys are processed - case map[string]interface{}: - for _, vv := range v.(map[string]interface{}) { - // valuesForKeyPath(ret, vv, keys[1:], subkeys) - valuesForKeyPath(ret, cnt, vv, keys[1:], subkeys) - } - default: - // valuesForKeyPath(ret, v, keys[1:], subkeys) - valuesForKeyPath(ret, cnt, v, keys[1:], subkeys) - } - } - } - default: // key - must be map[string]interface{} - switch m.(type) { - case map[string]interface{}: - if v, ok := m.(map[string]interface{})[key]; ok { - // valuesForKeyPath(ret, v, keys[1:], subkeys) - valuesForKeyPath(ret, cnt, v, keys[1:], subkeys) - } - case []interface{}: // may be buried in list - for _, v := range m.([]interface{}) { - switch v.(type) { - case map[string]interface{}: - if vv, ok := v.(map[string]interface{})[key]; ok { - // valuesForKeyPath(ret, vv, keys[1:], subkeys) - valuesForKeyPath(ret, cnt, vv, keys[1:], subkeys) - } - } - } - } - } -} - -// hasSubKeys() - interface{} equality works for string, float64, bool -// 'v' must be a map[string]interface{} value to have subkeys -// 'a' can have k:v pairs with v.(string) == "*", which is treated like a wildcard. -func hasSubKeys(v interface{}, subkeys map[string]interface{}) bool { - if len(subkeys) == 0 { - return true - } - - switch v.(type) { - case map[string]interface{}: - // do all subKey name:value pairs match? - mv := v.(map[string]interface{}) - for skey, sval := range subkeys { - isNotKey := false - if skey[:1] == "!" { // a NOT-key - skey = skey[1:] - isNotKey = true - } - vv, ok := mv[skey] - if !ok { // key doesn't exist - if isNotKey { // key not there, but that's what we want - if kv, ok := sval.(string); ok && kv == "*" { - continue - } - } - return false - } - // wildcard check - if kv, ok := sval.(string); ok && kv == "*" { - if isNotKey { // key is there, and we don't want it - return false - } - continue - } - switch sval.(type) { - case string: - if s, ok := vv.(string); ok && s == sval.(string) { - if isNotKey { - return false - } - continue - } - case bool: - if b, ok := vv.(bool); ok && b == sval.(bool) { - if isNotKey { - return false - } - continue - } - case float64: - if f, ok := vv.(float64); ok && f == sval.(float64) { - if isNotKey { - return false - } - continue - } - } - // key there but didn't match subkey value - if isNotKey { // that's what we want - continue - } - return false - } - // all subkeys matched - return true - } - - // not a map[string]interface{} value, can't have subkeys - return false -} - -// Generate map of key:value entries as map[string]string. -// 'kv' arguments are "name:value" pairs: attribute keys are designated with prepended hyphen, '-'. -// If len(kv) == 0, the return is (nil, nil). -func getSubKeyMap(kv ...string) (map[string]interface{}, error) { - if len(kv) == 0 { - return nil, nil - } - m := make(map[string]interface{}, 0) - for _, v := range kv { - vv := strings.Split(v, fieldSep) - switch len(vv) { - case 2: - m[vv[0]] = interface{}(vv[1]) - case 3: - switch vv[2] { - case "string", "char", "text": - m[vv[0]] = interface{}(vv[1]) - case "bool", "boolean": - // ParseBool treats "1"==true & "0"==false - b, err := strconv.ParseBool(vv[1]) - if err != nil { - return nil, fmt.Errorf("can't convert subkey value to bool: %s", vv[1]) - } - m[vv[0]] = interface{}(b) - case "float", "float64", "num", "number", "numeric": - f, err := strconv.ParseFloat(vv[1], 64) - if err != nil { - return nil, fmt.Errorf("can't convert subkey value to float: %s", vv[1]) - } - m[vv[0]] = interface{}(f) - default: - return nil, fmt.Errorf("unknown subkey conversion spec: %s", v) - } - default: - return nil, fmt.Errorf("unknown subkey spec: %s", v) - } - } - return m, nil -} - -// ------------------------------- END of valuesFor ... ---------------------------- - -// ----------------------- locate where a key value is in the tree ------------------- - -//----------------------------- find all paths to a key -------------------------------- - -// Get all paths through Map, 'mv', (in dot-notation) that terminate with the specified key. -// Results can be used with ValuesForPath. -func (mv Map) PathsForKey(key string) []string { - m := map[string]interface{}(mv) - breadbasket := make(map[string]bool, 0) - breadcrumbs := "" - - hasKeyPath(breadcrumbs, m, key, breadbasket) - if len(breadbasket) == 0 { - return nil - } - - // unpack map keys to return - res := make([]string, len(breadbasket)) - var i int - for k := range breadbasket { - res[i] = k - i++ - } - - return res -} - -// Extract the shortest path from all possible paths - from PathsForKey() - in Map, 'mv'.. -// Paths are strings using dot-notation. -func (mv Map) PathForKeyShortest(key string) string { - paths := mv.PathsForKey(key) - - lp := len(paths) - if lp == 0 { - return "" - } - if lp == 1 { - return paths[0] - } - - shortest := paths[0] - shortestLen := len(strings.Split(shortest, ".")) - - for i := 1; i < len(paths); i++ { - vlen := len(strings.Split(paths[i], ".")) - if vlen < shortestLen { - shortest = paths[i] - shortestLen = vlen - } - } - - return shortest -} - -// hasKeyPath - if the map 'key' exists append it to KeyPath.path and increment KeyPath.depth -// This is really just a breadcrumber that saves all trails that hit the prescribed 'key'. -func hasKeyPath(crumbs string, iv interface{}, key string, basket map[string]bool) { - switch iv.(type) { - case map[string]interface{}: - vv := iv.(map[string]interface{}) - if _, ok := vv[key]; ok { - // create a new breadcrumb, intialized with the one we have - var nbc string - if crumbs == "" { - nbc = key - } else { - nbc = crumbs + "." + key - } - basket[nbc] = true - } - // walk on down the path, key could occur again at deeper node - for k, v := range vv { - // create a new breadcrumb, intialized with the one we have - var nbc string - if crumbs == "" { - nbc = k - } else { - nbc = crumbs + "." + k - } - hasKeyPath(nbc, v, key, basket) - } - case []interface{}: - // crumb-trail doesn't change, pass it on - for _, v := range iv.([]interface{}) { - hasKeyPath(crumbs, v, key, basket) - } - } -} - -var PathNotExistError = errors.New("Path does not exist") - -// ValueForPath wrap ValuesFor Path and returns the first value returned. -// If no value is found it returns 'nil' and PathNotExistError. -func (mv Map) ValueForPath(path string) (interface{}, error) { - vals, err := mv.ValuesForPath(path) - if err != nil { - return nil, err - } - if len(vals) == 0 { - return nil, PathNotExistError - } - return vals[0], nil -} - -// Returns the first found value for the path as a string. -func (mv Map) ValueForPathString(path string) (string, error) { - vals, err := mv.ValuesForPath(path) - if err != nil { - return "", err - } - if len(vals) == 0 { - return "", errors.New("ValueForPath: path not found") - } - val := vals[0] - switch str := val.(type) { - case string: - return str, nil - default: - return "", fmt.Errorf("ValueForPath: unsupported type: %T", str) - } -} - -// Returns the first found value for the path as a string. -// If the path is not found then it returns an empty string. -func (mv Map) ValueOrEmptyForPathString(path string) string { - str, _ := mv.ValueForPathString(path) - return str -} diff --git a/vendor/github.com/clbanning/mxj/leafnode.go b/vendor/github.com/clbanning/mxj/leafnode.go deleted file mode 100644 index cf413ebdd4f..00000000000 --- a/vendor/github.com/clbanning/mxj/leafnode.go +++ /dev/null @@ -1,112 +0,0 @@ -package mxj - -// leafnode.go - return leaf nodes with paths and values for the Map -// inspired by: https://groups.google.com/forum/#!topic/golang-nuts/3JhuVKRuBbw - -import ( - "strconv" - "strings" -) - -const ( - NoAttributes = true // suppress LeafNode values that are attributes -) - -// LeafNode - a terminal path value in a Map. -// For XML Map values it represents an attribute or simple element value - of type -// string unless Map was created using Cast flag. For JSON Map values it represents -// a string, numeric, boolean, or null value. -type LeafNode struct { - Path string // a dot-notation representation of the path with array subscripting - Value interface{} // the value at the path termination -} - -// LeafNodes - returns an array of all LeafNode values for the Map. -// The option no_attr argument suppresses attribute values (keys with prepended hyphen, '-') -// as well as the "#text" key for the associated simple element value. -// -// PrependAttrWithHypen(false) will result in attributes having .attr-name as -// terminal node in 'path' while the path for the element value, itself, will be -// the base path w/o "#text". -// -// LeafUseDotNotation(true) causes list members to be identified using ".N" syntax -// rather than "[N]" syntax. -func (mv Map) LeafNodes(no_attr ...bool) []LeafNode { - var a bool - if len(no_attr) == 1 { - a = no_attr[0] - } - - l := make([]LeafNode, 0) - getLeafNodes("", "", map[string]interface{}(mv), &l, a) - return l -} - -func getLeafNodes(path, node string, mv interface{}, l *[]LeafNode, noattr bool) { - // if stripping attributes, then also strip "#text" key - if !noattr || node != "#text" { - if path != "" && node[:1] != "[" { - path += "." - } - path += node - } - switch mv.(type) { - case map[string]interface{}: - for k, v := range mv.(map[string]interface{}) { - // if noattr && k[:1] == "-" { - if noattr && len(attrPrefix) > 0 && strings.Index(k, attrPrefix) == 0 { - continue - } - getLeafNodes(path, k, v, l, noattr) - } - case []interface{}: - for i, v := range mv.([]interface{}) { - if useDotNotation { - getLeafNodes(path, strconv.Itoa(i), v, l, noattr) - } else { - getLeafNodes(path, "["+strconv.Itoa(i)+"]", v, l, noattr) - } - } - default: - // can't walk any further, so create leaf - n := LeafNode{path, mv} - *l = append(*l, n) - } -} - -// LeafPaths - all paths that terminate in LeafNode values. -func (mv Map) LeafPaths(no_attr ...bool) []string { - ln := mv.LeafNodes() - ss := make([]string, len(ln)) - for i := 0; i < len(ln); i++ { - ss[i] = ln[i].Path - } - return ss -} - -// LeafValues - all terminal values in the Map. -func (mv Map) LeafValues(no_attr ...bool) []interface{} { - ln := mv.LeafNodes() - vv := make([]interface{}, len(ln)) - for i := 0; i < len(ln); i++ { - vv[i] = ln[i].Value - } - return vv -} - -// ====================== utilities ====================== - -// https://groups.google.com/forum/#!topic/golang-nuts/pj0C5IrZk4I -var useDotNotation bool - -// LeafUseDotNotation sets a flag that list members in LeafNode paths -// should be identified using ".N" syntax rather than the default "[N]" -// syntax. Calling LeafUseDotNotation with no arguments toggles the -// flag on/off; otherwise, the argument sets the flag value 'true'/'false'. -func LeafUseDotNotation(b ...bool) { - if len(b) == 0 { - useDotNotation = !useDotNotation - return - } - useDotNotation = b[0] -} diff --git a/vendor/github.com/clbanning/mxj/misc.go b/vendor/github.com/clbanning/mxj/misc.go deleted file mode 100644 index 5b4fab2165d..00000000000 --- a/vendor/github.com/clbanning/mxj/misc.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2016 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -// misc.go - mimic functions (+others) called out in: -// https://groups.google.com/forum/#!topic/golang-nuts/jm_aGsJNbdQ -// Primarily these methods let you retrive XML structure information. - -package mxj - -import ( - "fmt" - "sort" - "strings" -) - -// Return the root element of the Map. If there is not a single key in Map, -// then an error is returned. -func (mv Map) Root() (string, error) { - mm := map[string]interface{}(mv) - if len(mm) != 1 { - return "", fmt.Errorf("Map does not have singleton root. Len: %d.", len(mm)) - } - for k, _ := range mm { - return k, nil - } - return "", nil -} - -// If the path is an element with sub-elements, return a list of the sub-element -// keys. (The list is alphabeticly sorted.) NOTE: Map keys that are prefixed with -// '-', a hyphen, are considered attributes; see m.Attributes(path). -func (mv Map) Elements(path string) ([]string, error) { - e, err := mv.ValueForPath(path) - if err != nil { - return nil, err - } - switch e.(type) { - case map[string]interface{}: - ee := e.(map[string]interface{}) - elems := make([]string, len(ee)) - var i int - for k, _ := range ee { - if len(attrPrefix) > 0 && strings.Index(k, attrPrefix) == 0 { - continue // skip attributes - } - elems[i] = k - i++ - } - elems = elems[:i] - // alphabetic sort keeps things tidy - sort.Strings(elems) - return elems, nil - } - return nil, fmt.Errorf("no elements for path: %s", path) -} - -// If the path is an element with attributes, return a list of the attribute -// keys. (The list is alphabeticly sorted.) NOTE: Map keys that are not prefixed with -// '-', a hyphen, are not treated as attributes; see m.Elements(path). Also, if the -// attribute prefix is "" - SetAttrPrefix("") or PrependAttrWithHyphen(false) - then -// there are no identifiable attributes. -func (mv Map) Attributes(path string) ([]string, error) { - a, err := mv.ValueForPath(path) - if err != nil { - return nil, err - } - switch a.(type) { - case map[string]interface{}: - aa := a.(map[string]interface{}) - attrs := make([]string, len(aa)) - var i int - for k, _ := range aa { - if len(attrPrefix) == 0 || strings.Index(k, attrPrefix) != 0 { - continue // skip non-attributes - } - attrs[i] = k[len(attrPrefix):] - i++ - } - attrs = attrs[:i] - // alphabetic sort keeps things tidy - sort.Strings(attrs) - return attrs, nil - } - return nil, fmt.Errorf("no attributes for path: %s", path) -} diff --git a/vendor/github.com/clbanning/mxj/mxj.go b/vendor/github.com/clbanning/mxj/mxj.go deleted file mode 100644 index f0592f06c8e..00000000000 --- a/vendor/github.com/clbanning/mxj/mxj.go +++ /dev/null @@ -1,128 +0,0 @@ -// mxj - A collection of map[string]interface{} and associated XML and JSON utilities. -// Copyright 2012-2014 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -package mxj - -import ( - "fmt" - "sort" -) - -const ( - Cast = true // for clarity - e.g., mxj.NewMapXml(doc, mxj.Cast) - SafeEncoding = true // ditto - e.g., mv.Json(mxj.SafeEncoding) -) - -type Map map[string]interface{} - -// Allocate a Map. -func New() Map { - m := make(map[string]interface{}, 0) - return m -} - -// Cast a Map to map[string]interface{} -func (mv Map) Old() map[string]interface{} { - return mv -} - -// Return a copy of mv as a newly allocated Map. If the Map only contains string, -// numeric, map[string]interface{}, and []interface{} values, then it can be thought -// of as a "deep copy." Copying a structure (or structure reference) value is subject -// to the noted restrictions. -// NOTE: If 'mv' includes structure values with, possibly, JSON encoding tags -// then only public fields of the structure are in the new Map - and with -// keys that conform to any encoding tag instructions. The structure itself will -// be represented as a map[string]interface{} value. -func (mv Map) Copy() (Map, error) { - // this is the poor-man's deep copy - // not efficient, but it works - j, jerr := mv.Json() - // must handle, we don't know how mv got built - if jerr != nil { - return nil, jerr - } - return NewMapJson(j) -} - -// --------------- StringIndent ... from x2j.WriteMap ------------- - -// Pretty print a Map. -func (mv Map) StringIndent(offset ...int) string { - return writeMap(map[string]interface{}(mv), true, true, offset...) -} - -// Pretty print a Map without the value type information - just key:value entries. -func (mv Map) StringIndentNoTypeInfo(offset ...int) string { - return writeMap(map[string]interface{}(mv), false, true, offset...) -} - -// writeMap - dumps the map[string]interface{} for examination. -// 'typeInfo' causes value type to be printed. -// 'offset' is initial indentation count; typically: Write(m). -func writeMap(m interface{}, typeInfo, root bool, offset ...int) string { - var indent int - if len(offset) == 1 { - indent = offset[0] - } - - var s string - switch m.(type) { - case []interface{}: - if typeInfo { - s += "[[]interface{}]" - } - for _, v := range m.([]interface{}) { - s += "\n" - for i := 0; i < indent; i++ { - s += " " - } - s += writeMap(v, typeInfo, false, indent+1) - } - case map[string]interface{}: - list := make([][2]string, len(m.(map[string]interface{}))) - var n int - for k, v := range m.(map[string]interface{}) { - list[n][0] = k - list[n][1] = writeMap(v, typeInfo, false, indent+1) - n++ - } - sort.Sort(mapList(list)) - for _, v := range list { - if root { - root = false - } else { - s += "\n" - } - for i := 0; i < indent; i++ { - s += " " - } - s += v[0] + " : " + v[1] - } - default: - if typeInfo { - s += fmt.Sprintf("[%T] %+v", m, m) - } else { - s += fmt.Sprintf("%+v", m) - } - } - return s -} - -// ======================== utility =============== - -type mapList [][2]string - -func (ml mapList) Len() int { - return len(ml) -} - -func (ml mapList) Swap(i, j int) { - ml[i], ml[j] = ml[j], ml[i] -} - -func (ml mapList) Less(i, j int) bool { - return ml[i][0] <= ml[j][0] -} diff --git a/vendor/github.com/clbanning/mxj/newmap.go b/vendor/github.com/clbanning/mxj/newmap.go deleted file mode 100644 index b293949056d..00000000000 --- a/vendor/github.com/clbanning/mxj/newmap.go +++ /dev/null @@ -1,184 +0,0 @@ -// mxj - A collection of map[string]interface{} and associated XML and JSON utilities. -// Copyright 2012-2014, 2018 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -// remap.go - build a new Map from the current Map based on keyOld:keyNew mapppings -// keys can use dot-notation, keyOld can use wildcard, '*' -// -// Computational strategy - -// Using the key path - []string - traverse a new map[string]interface{} and -// insert the oldVal as the newVal when we arrive at the end of the path. -// If the type at the end is nil, then that is newVal -// If the type at the end is a singleton (string, float64, bool) an array is created. -// If the type at the end is an array, newVal is just appended. -// If the type at the end is a map, it is inserted if possible or the map value -// is converted into an array if necessary. - -package mxj - -import ( - "errors" - "strings" -) - -// (Map)NewMap - create a new Map from data in the current Map. -// 'keypairs' are key mappings "oldKey:newKey" and specify that the current value of 'oldKey' -// should be the value for 'newKey' in the returned Map. -// - 'oldKey' supports dot-notation as described for (Map)ValuesForPath() -// - 'newKey' supports dot-notation but with no wildcards, '*', or indexed arrays -// - "oldKey" is shorthand for the keypair value "oldKey:oldKey" -// - "oldKey:" and ":newKey" are invalid keypair values -// - if 'oldKey' does not exist in the current Map, it is not written to the new Map. -// "null" is not supported unless it is the current Map. -// - see newmap_test.go for several syntax examples -// - mv.NewMap() == mxj.New() -// -// NOTE: "examples/partial.go" shows how to create arbitrary sub-docs of an XML doc. -func (mv Map) NewMap(keypairs ...string) (Map, error) { - n := make(map[string]interface{}, 0) - if len(keypairs) == 0 { - return n, nil - } - - // loop through the pairs - var oldKey, newKey string - var path []string - for _, v := range keypairs { - if len(v) == 0 { - continue // just skip over empty keypair arguments - } - - // initialize oldKey, newKey and check - vv := strings.Split(v, ":") - if len(vv) > 2 { - return n, errors.New("oldKey:newKey keypair value not valid - " + v) - } - if len(vv) == 1 { - oldKey, newKey = vv[0], vv[0] - } else { - oldKey, newKey = vv[0], vv[1] - } - strings.TrimSpace(oldKey) - strings.TrimSpace(newKey) - if i := strings.Index(newKey, "*"); i > -1 { - return n, errors.New("newKey value cannot contain wildcard character - " + v) - } - if i := strings.Index(newKey, "["); i > -1 { - return n, errors.New("newKey value cannot contain indexed arrays - " + v) - } - if oldKey == "" || newKey == "" { - return n, errors.New("oldKey or newKey is not specified - " + v) - } - - // get oldKey value - oldVal, err := mv.ValuesForPath(oldKey) - if err != nil { - return n, err - } - if len(oldVal) == 0 { - continue // oldKey has no value, may not exist in mv - } - - // break down path - path = strings.Split(newKey, ".") - if path[len(path)-1] == "" { // ignore a trailing dot in newKey spec - path = path[:len(path)-1] - } - - addNewVal(&n, path, oldVal) - } - - return n, nil -} - -// navigate 'n' to end of path and add val -func addNewVal(n *map[string]interface{}, path []string, val []interface{}) { - // newVal - either singleton or array - var newVal interface{} - if len(val) == 1 { - newVal = val[0] // is type interface{} - } else { - newVal = interface{}(val) - } - - // walk to the position of interest, create it if necessary - m := (*n) // initialize map walker - var k string // key for m - lp := len(path) - 1 // when to stop looking - for i := 0; i < len(path); i++ { - k = path[i] - if i == lp { - break - } - var nm map[string]interface{} // holds position of next-map - switch m[k].(type) { - case nil: // need a map for next node in path, so go there - nm = make(map[string]interface{}, 0) - m[k] = interface{}(nm) - m = m[k].(map[string]interface{}) - case map[string]interface{}: - // OK - got somewhere to walk to, go there - m = m[k].(map[string]interface{}) - case []interface{}: - // add a map and nm points to new map unless there's already - // a map in the array, then nm points there - // The placement of the next value in the array is dependent - // on the sequence of members - could land on a map or a nil - // value first. TODO: how to test this. - a := make([]interface{}, 0) - var foundmap bool - for _, vv := range m[k].([]interface{}) { - switch vv.(type) { - case nil: // doesn't appear that this occurs, need a test case - if foundmap { // use the first one in array - a = append(a, vv) - continue - } - nm = make(map[string]interface{}, 0) - a = append(a, interface{}(nm)) - foundmap = true - case map[string]interface{}: - if foundmap { // use the first one in array - a = append(a, vv) - continue - } - nm = vv.(map[string]interface{}) - a = append(a, vv) - foundmap = true - default: - a = append(a, vv) - } - } - // no map found in array - if !foundmap { - nm = make(map[string]interface{}, 0) - a = append(a, interface{}(nm)) - } - m[k] = interface{}(a) // must insert in map - m = nm - default: // it's a string, float, bool, etc. - aa := make([]interface{}, 0) - nm = make(map[string]interface{}, 0) - aa = append(aa, m[k], nm) - m[k] = interface{}(aa) - m = nm - } - } - - // value is nil, array or a singleton of some kind - // initially m.(type) == map[string]interface{} - v := m[k] - switch v.(type) { - case nil: // initialized - m[k] = newVal - case []interface{}: - a := m[k].([]interface{}) - a = append(a, newVal) - m[k] = interface{}(a) - default: // v exists:string, float64, bool, map[string]interface, etc. - a := make([]interface{}, 0) - a = append(a, v, newVal) - m[k] = interface{}(a) - } -} diff --git a/vendor/github.com/clbanning/mxj/readme.md b/vendor/github.com/clbanning/mxj/readme.md deleted file mode 100644 index 6bb21dca82a..00000000000 --- a/vendor/github.com/clbanning/mxj/readme.md +++ /dev/null @@ -1,179 +0,0 @@ -

mxj - to/from maps, XML and JSON

-Decode/encode XML to/from map[string]interface{} (or JSON) values, and extract/modify values from maps by key or key-path, including wildcards. - -mxj supplants the legacy x2j and j2x packages. If you want the old syntax, use mxj/x2j and mxj/j2x packages. - -

Related Packages

- -https://github.com/clbanning/checkxml provides functions for validating XML data. - -

Refactor Decoder - 2015.11.15

-For over a year I've wanted to refactor the XML-to-map[string]interface{} decoder to make it more performant. I recently took the time to do that, since we were using github.com/clbanning/mxj in a production system that could be deployed on a Raspberry Pi. Now the decoder is comparable to the stdlib JSON-to-map[string]interface{} decoder in terms of its additional processing overhead relative to decoding to a structure value. As shown by: - - BenchmarkNewMapXml-4 100000 18043 ns/op - BenchmarkNewStructXml-4 100000 14892 ns/op - BenchmarkNewMapJson-4 300000 4633 ns/op - BenchmarkNewStructJson-4 300000 3427 ns/op - BenchmarkNewMapXmlBooks-4 20000 82850 ns/op - BenchmarkNewStructXmlBooks-4 20000 67822 ns/op - BenchmarkNewMapJsonBooks-4 100000 17222 ns/op - BenchmarkNewStructJsonBooks-4 100000 15309 ns/op - -

Notices

- - 2018.04.18: mv.Xml/mv.XmlIndent encodes non-map[string]interface{} map values - map[string]string, map[int]uint, etc. - 2018.03.29: mv.Gob/NewMapGob support gob encoding/decoding of Maps. - 2018.03.26: Added mxj/x2j-wrapper sub-package for migrating from legacy x2j package. - 2017.02.22: LeafNode paths can use ".N" syntax rather than "[N]" for list member indexing. - 2017.02.10: SetFieldSeparator changes field separator for args in UpdateValuesForPath, ValuesFor... methods. - 2017.02.06: Support XMPP stream processing - HandleXMPPStreamTag(). - 2016.11.07: Preserve name space prefix syntax in XmlSeq parser - NewMapXmlSeq(), etc. - 2016.06.25: Support overriding default XML attribute prefix, "-", in Map keys - SetAttrPrefix(). - 2016.05.26: Support customization of xml.Decoder by exposing CustomDecoder variable. - 2016.03.19: Escape invalid chars when encoding XML attribute and element values - XMLEscapeChars(). - 2016.03.02: By default decoding XML with float64 and bool value casting will not cast "NaN", "Inf", and "-Inf". - To cast them to float64, first set flag with CastNanInf(true). - 2016.02.22: New mv.Root(), mv.Elements(), mv.Attributes methods let you examine XML document structure. - 2016.02.16: Add CoerceKeysToLower() option to handle tags with mixed capitalization. - 2016.02.12: Seek for first xml.StartElement token; only return error if io.EOF is reached first (handles BOM). - 2015.12.02: XML decoding/encoding that preserves original structure of document. See NewMapXmlSeq() - and mv.XmlSeq() / mv.XmlSeqIndent(). - 2015-05-20: New: mv.StringIndentNoTypeInfo(). - Also, alphabetically sort map[string]interface{} values by key to prettify output for mv.Xml(), - mv.XmlIndent(), mv.StringIndent(), mv.StringIndentNoTypeInfo(). - 2014-11-09: IncludeTagSeqNum() adds "_seq" key with XML doc positional information. - (NOTE: PreserveXmlList() is similar and will be here soon.) - 2014-09-18: inspired by NYTimes fork, added PrependAttrWithHyphen() to allow stripping hyphen from attribute tag. - 2014-08-02: AnyXml() and AnyXmlIndent() will try to marshal arbitrary values to XML. - 2014-04-28: ValuesForPath() and NewMap() now accept path with indexed array references. - -

Basic Unmarshal XML to map[string]interface{}

-
type Map map[string]interface{}
- -Create a `Map` value, 'mv', from any `map[string]interface{}` value, 'v': -
mv := Map(v)
- -Unmarshal / marshal XML as a `Map` value, 'mv': -
mv, err := NewMapXml(xmlValue) // unmarshal
-xmlValue, err := mv.Xml()      // marshal
- -Unmarshal XML from an `io.Reader` as a `Map` value, 'mv': -
mv, err := NewMapXmlReader(xmlReader)         // repeated calls, as with an os.File Reader, will process stream
-mv, raw, err := NewMapXmlReaderRaw(xmlReader) // 'raw' is the raw XML that was decoded
- -Marshal `Map` value, 'mv', to an XML Writer (`io.Writer`): -
err := mv.XmlWriter(xmlWriter)
-raw, err := mv.XmlWriterRaw(xmlWriter) // 'raw' is the raw XML that was written on xmlWriter
- -Also, for prettified output: -
xmlValue, err := mv.XmlIndent(prefix, indent, ...)
-err := mv.XmlIndentWriter(xmlWriter, prefix, indent, ...)
-raw, err := mv.XmlIndentWriterRaw(xmlWriter, prefix, indent, ...)
- -Bulk process XML with error handling (note: handlers must return a boolean value): -
err := HandleXmlReader(xmlReader, mapHandler(Map), errHandler(error))
-err := HandleXmlReaderRaw(xmlReader, mapHandler(Map, []byte), errHandler(error, []byte))
- -Converting XML to JSON: see Examples for `NewMapXml` and `HandleXmlReader`. - -There are comparable functions and methods for JSON processing. - -Arbitrary structure values can be decoded to / encoded from `Map` values: -
mv, err := NewMapStruct(structVal)
-err := mv.Struct(structPointer)
- -

Extract / modify Map values

-To work with XML tag values, JSON or Map key values or structure field values, decode the XML, JSON -or structure to a `Map` value, 'mv', or cast a `map[string]interface{}` value to a `Map` value, 'mv', then: -
paths := mv.PathsForKey(key)
-path := mv.PathForKeyShortest(key)
-values, err := mv.ValuesForKey(key, subkeys)
-values, err := mv.ValuesForPath(path, subkeys)
-count, err := mv.UpdateValuesForPath(newVal, path, subkeys)
- -Get everything at once, irrespective of path depth: -
leafnodes := mv.LeafNodes()
-leafvalues := mv.LeafValues()
- -A new `Map` with whatever keys are desired can be created from the current `Map` and then encoded in XML -or JSON. (Note: keys can use dot-notation.) -
newMap, err := mv.NewMap("oldKey_1:newKey_1", "oldKey_2:newKey_2", ..., "oldKey_N:newKey_N")
-newMap, err := mv.NewMap("oldKey1", "oldKey3", "oldKey5") // a subset of 'mv'; see "examples/partial.go"
-newXml, err := newMap.Xml()   // for example
-newJson, err := newMap.Json() // ditto
- -

Usage

- -The package is fairly well [self-documented with examples](http://godoc.org/github.com/clbanning/mxj). - -Also, the subdirectory "examples" contains a wide range of examples, several taken from golang-nuts discussions. - -

XML parsing conventions

- -Using NewMapXml() - - - Attributes are parsed to `map[string]interface{}` values by prefixing a hyphen, `-`, - to the attribute label. (Unless overridden by `PrependAttrWithHyphen(false)` or - `SetAttrPrefix()`.) - - If the element is a simple element and has attributes, the element value - is given the key `#text` for its `map[string]interface{}` representation. (See - the 'atomFeedString.xml' test data, below.) - - XML comments, directives, and process instructions are ignored. - - If CoerceKeysToLower() has been called, then the resultant keys will be lower case. - -Using NewMapXmlSeq() - - - Attributes are parsed to `map["#attr"]map[]map[string]interface{}`values - where the `` value has "#text" and "#seq" keys - the "#text" key holds the - value for ``. - - All elements, except for the root, have a "#seq" key. - - Comments, directives, and process instructions are unmarshalled into the Map using the - keys "#comment", "#directive", and "#procinst", respectively. (See documentation for more - specifics.) - - Name space syntax is preserved: - - `something` parses to `map["ns:key"]interface{}{"something"}` - - `xmlns:ns="http://myns.com/ns"` parses to `map["xmlns:ns"]interface{}{"http://myns.com/ns"}` - -Both - - - By default, "Nan", "Inf", and "-Inf" values are not cast to float64. If you want them - to be cast, set a flag to cast them using CastNanInf(true). - -

XML encoding conventions

- - - 'nil' `Map` values, which may represent 'null' JSON values, are encoded as ``. - NOTE: the operation is not symmetric as `` elements are decoded as `tag:""` `Map` values, - which, then, encode in JSON as `"tag":""` values. - - ALSO: there is no guarantee that the encoded XML doc will be the same as the decoded one. (Go - randomizes the walk through map[string]interface{} values.) If you plan to re-encode the - Map value to XML and want the same sequencing of elements look at NewMapXmlSeq() and - mv.XmlSeq() - these try to preserve the element sequencing but with added complexity when - working with the Map representation. - -

Running "go test"

- -Because there are no guarantees on the sequence map elements are retrieved, the tests have been -written for visual verification in most cases. One advantage is that you can easily use the -output from running "go test" as examples of calling the various functions and methods. - -

Motivation

- -I make extensive use of JSON for messaging and typically unmarshal the messages into -`map[string]interface{}` values. This is easily done using `json.Unmarshal` from the -standard Go libraries. Unfortunately, many legacy solutions use structured -XML messages; in those environments the applications would have to be refactored to -interoperate with my components. - -The better solution is to just provide an alternative HTTP handler that receives -XML messages and parses it into a `map[string]interface{}` value and then reuse -all the JSON-based code. The Go `xml.Unmarshal()` function does not provide the same -option of unmarshaling XML messages into `map[string]interface{}` values. So I wrote -a couple of small functions to fill this gap and released them as the x2j package. - -Over the next year and a half additional features were added, and the companion j2x -package was released to address XML encoding of arbitrary JSON and `map[string]interface{}` -values. As part of a refactoring of our production system and looking at how we had been -using the x2j and j2x packages we found that we rarely performed direct XML-to-JSON or -JSON-to_XML conversion and that working with the XML or JSON as `map[string]interface{}` -values was the primary value. Thus, everything was refactored into the mxj package. - diff --git a/vendor/github.com/clbanning/mxj/remove.go b/vendor/github.com/clbanning/mxj/remove.go deleted file mode 100644 index 8362ab17fa4..00000000000 --- a/vendor/github.com/clbanning/mxj/remove.go +++ /dev/null @@ -1,37 +0,0 @@ -package mxj - -import "strings" - -// Removes the path. -func (mv Map) Remove(path string) error { - m := map[string]interface{}(mv) - return remove(m, path) -} - -func remove(m interface{}, path string) error { - val, err := prevValueByPath(m, path) - if err != nil { - return err - } - - lastKey := lastKey(path) - delete(val, lastKey) - - return nil -} - -// returns the last key of the path. -// lastKey("a.b.c") would had returned "c" -func lastKey(path string) string { - keys := strings.Split(path, ".") - key := keys[len(keys)-1] - return key -} - -// returns the path without the last key -// parentPath("a.b.c") whould had returned "a.b" -func parentPath(path string) string { - keys := strings.Split(path, ".") - parentPath := strings.Join(keys[0:len(keys)-1], ".") - return parentPath -} diff --git a/vendor/github.com/clbanning/mxj/rename.go b/vendor/github.com/clbanning/mxj/rename.go deleted file mode 100644 index e95a9639af7..00000000000 --- a/vendor/github.com/clbanning/mxj/rename.go +++ /dev/null @@ -1,54 +0,0 @@ -package mxj - -import ( - "errors" - "strings" -) - -// RenameKey renames a key in a Map. -// It works only for nested maps. It doesn't work for cases when it buried in a list. -func (mv Map) RenameKey(path string, newName string) error { - if !mv.Exists(path) { - return errors.New("RenameKey: path not found: " + path) - } - if mv.Exists(parentPath(path) + "." + newName) { - return errors.New("RenameKey: key already exists: " + newName) - } - - m := map[string]interface{}(mv) - return renameKey(m, path, newName) -} - -func renameKey(m interface{}, path string, newName string) error { - val, err := prevValueByPath(m, path) - if err != nil { - return err - } - - oldName := lastKey(path) - val[newName] = val[oldName] - delete(val, oldName) - - return nil -} - -// returns a value which contains a last key in the path -// For example: prevValueByPath("a.b.c", {a{b{c: 3}}}) returns {c: 3} -func prevValueByPath(m interface{}, path string) (map[string]interface{}, error) { - keys := strings.Split(path, ".") - - switch mValue := m.(type) { - case map[string]interface{}: - for key, value := range mValue { - if key == keys[0] { - if len(keys) == 1 { - return mValue, nil - } else { - // keep looking for the full path to the key - return prevValueByPath(value, strings.Join(keys[1:], ".")) - } - } - } - } - return nil, errors.New("prevValueByPath: didn't find path – " + path) -} diff --git a/vendor/github.com/clbanning/mxj/set.go b/vendor/github.com/clbanning/mxj/set.go deleted file mode 100644 index a297fc38887..00000000000 --- a/vendor/github.com/clbanning/mxj/set.go +++ /dev/null @@ -1,26 +0,0 @@ -package mxj - -import ( - "strings" -) - -// Sets the value for the path -func (mv Map) SetValueForPath(value interface{}, path string) error { - pathAry := strings.Split(path, ".") - parentPathAry := pathAry[0 : len(pathAry)-1] - parentPath := strings.Join(parentPathAry, ".") - - val, err := mv.ValueForPath(parentPath) - if err != nil { - return err - } - if val == nil { - return nil // we just ignore the request if there's no val - } - - key := pathAry[len(pathAry)-1] - cVal := val.(map[string]interface{}) - cVal[key] = value - - return nil -} diff --git a/vendor/github.com/clbanning/mxj/setfieldsep.go b/vendor/github.com/clbanning/mxj/setfieldsep.go deleted file mode 100644 index b70715ebc65..00000000000 --- a/vendor/github.com/clbanning/mxj/setfieldsep.go +++ /dev/null @@ -1,20 +0,0 @@ -package mxj - -// Per: https://github.com/clbanning/mxj/issues/37#issuecomment-278651862 -var fieldSep string = ":" - -// SetFieldSeparator changes the default field separator, ":", for the -// newVal argument in mv.UpdateValuesForPath and the optional 'subkey' arguments -// in mv.ValuesForKey and mv.ValuesForPath. -// -// E.g., if the newVal value is "http://blah/blah", setting the field separator -// to "|" will allow the newVal specification, "|http://blah/blah" to parse -// properly. If called with no argument or an empty string value, the field -// separator is set to the default, ":". -func SetFieldSeparator(s ...string) { - if len(s) == 0 || s[0] == "" { - fieldSep = ":" // the default - return - } - fieldSep = s[0] -} diff --git a/vendor/github.com/clbanning/mxj/songtext.xml b/vendor/github.com/clbanning/mxj/songtext.xml deleted file mode 100644 index 8c0f2becb12..00000000000 --- a/vendor/github.com/clbanning/mxj/songtext.xml +++ /dev/null @@ -1,29 +0,0 @@ - - help me! - - - - Henry was a renegade - Didn't like to play it safe - One component at a time - There's got to be a better way - Oh, people came from miles around - Searching for a steady job - Welcome to the Motor Town - Booming like an atom bomb - - - Oh, Henry was the end of the story - Then everything went wrong - And we'll return it to its former glory - But it just takes so long - - - - It's going to take a long time - It's going to take it, but we'll make it one day - It's going to take a long time - It's going to take it, but we'll make it one day - - - diff --git a/vendor/github.com/clbanning/mxj/strict.go b/vendor/github.com/clbanning/mxj/strict.go deleted file mode 100644 index 1e769560ba0..00000000000 --- a/vendor/github.com/clbanning/mxj/strict.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2016 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -// strict.go actually addresses setting xml.Decoder attribute -// values. This'll let you parse non-standard XML. - -package mxj - -import ( - "encoding/xml" -) - -// CustomDecoder can be used to specify xml.Decoder attribute -// values, e.g., Strict:false, to be used. By default CustomDecoder -// is nil. If CustomeDecoder != nil, then mxj.XmlCharsetReader variable is -// ignored and must be set as part of the CustomDecoder value, if needed. -// Usage: -// mxj.CustomDecoder = &xml.Decoder{Strict:false} -var CustomDecoder *xml.Decoder - -// useCustomDecoder copy over public attributes from customDecoder -func useCustomDecoder(d *xml.Decoder) { - d.Strict = CustomDecoder.Strict - d.AutoClose = CustomDecoder.AutoClose - d.Entity = CustomDecoder.Entity - d.CharsetReader = CustomDecoder.CharsetReader - d.DefaultSpace = CustomDecoder.DefaultSpace -} - diff --git a/vendor/github.com/clbanning/mxj/struct.go b/vendor/github.com/clbanning/mxj/struct.go deleted file mode 100644 index 9be636cdcab..00000000000 --- a/vendor/github.com/clbanning/mxj/struct.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2012-2017 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -package mxj - -import ( - "encoding/json" - "errors" - "reflect" - - // "github.com/fatih/structs" -) - -// Create a new Map value from a structure. Error returned if argument is not a structure. -// Only public structure fields are decoded in the Map value. See github.com/fatih/structs#Map -// for handling of "structs" tags. - -// DEPRECATED - import github.com/fatih/structs and cast result of structs.Map to mxj.Map. -// import "github.com/fatih/structs" -// ... -// sm, err := structs.Map() -// if err != nil { -// // handle error -// } -// m := mxj.Map(sm) -// Alernatively uncomment the old source and import in struct.go. -func NewMapStruct(structVal interface{}) (Map, error) { - return nil, errors.New("deprecated - see package documentation") - /* - if !structs.IsStruct(structVal) { - return nil, errors.New("NewMapStruct() error: argument is not type Struct") - } - return structs.Map(structVal), nil - */ -} - -// Marshal a map[string]interface{} into a structure referenced by 'structPtr'. Error returned -// if argument is not a pointer or if json.Unmarshal returns an error. -// json.Unmarshal structure encoding rules are followed to encode public structure fields. -func (mv Map) Struct(structPtr interface{}) error { - // should check that we're getting a pointer. - if reflect.ValueOf(structPtr).Kind() != reflect.Ptr { - return errors.New("mv.Struct() error: argument is not type Ptr") - } - - m := map[string]interface{}(mv) - j, err := json.Marshal(m) - if err != nil { - return err - } - - return json.Unmarshal(j, structPtr) -} diff --git a/vendor/github.com/clbanning/mxj/updatevalues.go b/vendor/github.com/clbanning/mxj/updatevalues.go deleted file mode 100644 index 46779f4f063..00000000000 --- a/vendor/github.com/clbanning/mxj/updatevalues.go +++ /dev/null @@ -1,256 +0,0 @@ -// Copyright 2012-2014, 2017 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -// updatevalues.go - modify a value based on path and possibly sub-keys -// TODO(clb): handle simple elements with attributes and NewMapXmlSeq Map values. - -package mxj - -import ( - "fmt" - "strconv" - "strings" -) - -// Update value based on path and possible sub-key values. -// A count of the number of values changed and any error are returned. -// If the count == 0, then no path (and subkeys) matched. -// 'newVal' can be a Map or map[string]interface{} value with a single 'key' that is the key to be modified -// or a string value "key:value[:type]" where type is "bool" or "num" to cast the value. -// 'path' is dot-notation list of keys to traverse; last key in path can be newVal key -// NOTE: 'path' spec does not currently support indexed array references. -// 'subkeys' are "key:value[:type]" entries that must match for path node -// The subkey can be wildcarded - "key:*" - to require that it's there with some value. -// If a subkey is preceeded with the '!' character, the key:value[:type] entry is treated as an -// exclusion critera - e.g., "!author:William T. Gaddis". -// -// NOTES: -// 1. Simple elements with attributes need a path terminated as ".#text" to modify the actual value. -// 2. Values in Maps created using NewMapXmlSeq are map[string]interface{} values with a "#text" key. -// 3. If values in 'newVal' or 'subkeys' args contain ":", use SetFieldSeparator to an unused symbol, -// perhaps "|". -func (mv Map) UpdateValuesForPath(newVal interface{}, path string, subkeys ...string) (int, error) { - m := map[string]interface{}(mv) - - // extract the subkeys - var subKeyMap map[string]interface{} - if len(subkeys) > 0 { - var err error - subKeyMap, err = getSubKeyMap(subkeys...) - if err != nil { - return 0, err - } - } - - // extract key and value from newVal - var key string - var val interface{} - switch newVal.(type) { - case map[string]interface{}, Map: - switch newVal.(type) { // "fallthrough is not permitted in type switch" (Spec) - case Map: - newVal = newVal.(Map).Old() - } - if len(newVal.(map[string]interface{})) != 1 { - return 0, fmt.Errorf("newVal map can only have len == 1 - %+v", newVal) - } - for key, val = range newVal.(map[string]interface{}) { - } - case string: // split it as a key:value pair - ss := strings.Split(newVal.(string), fieldSep) - n := len(ss) - if n < 2 || n > 3 { - return 0, fmt.Errorf("unknown newVal spec - %+v", newVal) - } - key = ss[0] - if n == 2 { - val = interface{}(ss[1]) - } else if n == 3 { - switch ss[2] { - case "bool", "boolean": - nv, err := strconv.ParseBool(ss[1]) - if err != nil { - return 0, fmt.Errorf("can't convert newVal to bool - %+v", newVal) - } - val = interface{}(nv) - case "num", "numeric", "float", "int": - nv, err := strconv.ParseFloat(ss[1], 64) - if err != nil { - return 0, fmt.Errorf("can't convert newVal to float64 - %+v", newVal) - } - val = interface{}(nv) - default: - return 0, fmt.Errorf("unknown type for newVal value - %+v", newVal) - } - } - default: - return 0, fmt.Errorf("invalid newVal type - %+v", newVal) - } - - // parse path - keys := strings.Split(path, ".") - - var count int - updateValuesForKeyPath(key, val, m, keys, subKeyMap, &count) - - return count, nil -} - -// navigate the path -func updateValuesForKeyPath(key string, value interface{}, m interface{}, keys []string, subkeys map[string]interface{}, cnt *int) { - // ----- at end node: looking at possible node to get 'key' ---- - if len(keys) == 1 { - updateValue(key, value, m, keys[0], subkeys, cnt) - return - } - - // ----- here we are navigating the path thru the penultimate node -------- - // key of interest is keys[0] - the next in the path - switch keys[0] { - case "*": // wildcard - scan all values - switch m.(type) { - case map[string]interface{}: - for _, v := range m.(map[string]interface{}) { - updateValuesForKeyPath(key, value, v, keys[1:], subkeys, cnt) - } - case []interface{}: - for _, v := range m.([]interface{}) { - switch v.(type) { - // flatten out a list of maps - keys are processed - case map[string]interface{}: - for _, vv := range v.(map[string]interface{}) { - updateValuesForKeyPath(key, value, vv, keys[1:], subkeys, cnt) - } - default: - updateValuesForKeyPath(key, value, v, keys[1:], subkeys, cnt) - } - } - } - default: // key - must be map[string]interface{} - switch m.(type) { - case map[string]interface{}: - if v, ok := m.(map[string]interface{})[keys[0]]; ok { - updateValuesForKeyPath(key, value, v, keys[1:], subkeys, cnt) - } - case []interface{}: // may be buried in list - for _, v := range m.([]interface{}) { - switch v.(type) { - case map[string]interface{}: - if vv, ok := v.(map[string]interface{})[keys[0]]; ok { - updateValuesForKeyPath(key, value, vv, keys[1:], subkeys, cnt) - } - } - } - } - } -} - -// change value if key and subkeys are present -func updateValue(key string, value interface{}, m interface{}, keys0 string, subkeys map[string]interface{}, cnt *int) { - // there are two possible options for the value of 'keys0': map[string]interface, []interface{} - // and 'key' is a key in the map or is a key in a map in a list. - switch m.(type) { - case map[string]interface{}: // gotta have the last key - if keys0 == "*" { - for k := range m.(map[string]interface{}) { - updateValue(key, value, m, k, subkeys, cnt) - } - return - } - endVal, _ := m.(map[string]interface{})[keys0] - - // if newV key is the end of path, replace the value for path-end - // may be []interface{} - means replace just an entry w/ subkeys - // otherwise replace the keys0 value if subkeys are there - // NOTE: this will replace the subkeys, also - if key == keys0 { - switch endVal.(type) { - case map[string]interface{}: - if hasSubKeys(m, subkeys) { - (m.(map[string]interface{}))[keys0] = value - (*cnt)++ - } - case []interface{}: - // without subkeys can't select list member to modify - // so key:value spec is it ... - if hasSubKeys(m, subkeys) { - (m.(map[string]interface{}))[keys0] = value - (*cnt)++ - break - } - nv := make([]interface{}, 0) - var valmodified bool - for _, v := range endVal.([]interface{}) { - // check entry subkeys - if hasSubKeys(v, subkeys) { - // replace v with value - nv = append(nv, value) - valmodified = true - (*cnt)++ - continue - } - nv = append(nv, v) - } - if valmodified { - (m.(map[string]interface{}))[keys0] = interface{}(nv) - } - default: // anything else is a strict replacement - if hasSubKeys(m, subkeys) { - (m.(map[string]interface{}))[keys0] = value - (*cnt)++ - } - } - return - } - - // so value is for an element of endVal - // if endVal is a map then 'key' must be there w/ subkeys - // if endVal is a list then 'key' must be in a list member w/ subkeys - switch endVal.(type) { - case map[string]interface{}: - if !hasSubKeys(endVal, subkeys) { - return - } - if _, ok := (endVal.(map[string]interface{}))[key]; ok { - (endVal.(map[string]interface{}))[key] = value - (*cnt)++ - } - case []interface{}: // keys0 points to a list, check subkeys - for _, v := range endVal.([]interface{}) { - // got to be a map so we can replace value for 'key' - vv, vok := v.(map[string]interface{}) - if !vok { - continue - } - if _, ok := vv[key]; !ok { - continue - } - if !hasSubKeys(vv, subkeys) { - continue - } - vv[key] = value - (*cnt)++ - } - } - case []interface{}: // key may be in a list member - // don't need to handle keys0 == "*"; we're looking at everything, anyway. - for _, v := range m.([]interface{}) { - // only map values - we're looking for 'key' - mm, ok := v.(map[string]interface{}) - if !ok { - continue - } - if _, ok := mm[key]; !ok { - continue - } - if !hasSubKeys(mm, subkeys) { - continue - } - mm[key] = value - (*cnt)++ - } - } - - // return -} diff --git a/vendor/github.com/clbanning/mxj/xml.go b/vendor/github.com/clbanning/mxj/xml.go deleted file mode 100644 index fac0f1d3bb5..00000000000 --- a/vendor/github.com/clbanning/mxj/xml.go +++ /dev/null @@ -1,1139 +0,0 @@ -// Copyright 2012-2016 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -// xml.go - basically the core of X2j for map[string]interface{} values. -// NewMapXml, NewMapXmlReader, mv.Xml, mv.XmlWriter -// see x2j and j2x for wrappers to provide end-to-end transformation of XML and JSON messages. - -package mxj - -import ( - "bytes" - "encoding/json" - "encoding/xml" - "errors" - "fmt" - "io" - "reflect" - "sort" - "strconv" - "strings" - "time" -) - -// ------------------- NewMapXml & NewMapXmlReader ... ------------------------- - -// If XmlCharsetReader != nil, it will be used to decode the XML, if required. -// Note: if CustomDecoder != nil, then XmlCharsetReader is ignored; -// set the CustomDecoder attribute instead. -// import ( -// charset "code.google.com/p/go-charset/charset" -// github.com/clbanning/mxj -// ) -// ... -// mxj.XmlCharsetReader = charset.NewReader -// m, merr := mxj.NewMapXml(xmlValue) -var XmlCharsetReader func(charset string, input io.Reader) (io.Reader, error) - -// NewMapXml - convert a XML doc into a Map -// (This is analogous to unmarshalling a JSON string to map[string]interface{} using json.Unmarshal().) -// If the optional argument 'cast' is 'true', then values will be converted to boolean or float64 if possible. -// -// Converting XML to JSON is a simple as: -// ... -// mapVal, merr := mxj.NewMapXml(xmlVal) -// if merr != nil { -// // handle error -// } -// jsonVal, jerr := mapVal.Json() -// if jerr != nil { -// // handle error -// } -// -// NOTES: -// 1. The 'xmlVal' will be parsed looking for an xml.StartElement, so BOM and other -// extraneous xml.CharData will be ignored unless io.EOF is reached first. -// 2. If CoerceKeysToLower() has been called, then all key values will be lower case. -// 3. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. -func NewMapXml(xmlVal []byte, cast ...bool) (Map, error) { - var r bool - if len(cast) == 1 { - r = cast[0] - } - return xmlToMap(xmlVal, r) -} - -// Get next XML doc from an io.Reader as a Map value. Returns Map value. -// NOTES: -// 1. The 'xmlReader' will be parsed looking for an xml.StartElement, so BOM and other -// extraneous xml.CharData will be ignored unless io.EOF is reached first. -// 2. If CoerceKeysToLower() has been called, then all key values will be lower case. -// 3. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. -func NewMapXmlReader(xmlReader io.Reader, cast ...bool) (Map, error) { - var r bool - if len(cast) == 1 { - r = cast[0] - } - - // We need to put an *os.File reader in a ByteReader or the xml.NewDecoder - // will wrap it in a bufio.Reader and seek on the file beyond where the - // xml.Decoder parses! - if _, ok := xmlReader.(io.ByteReader); !ok { - xmlReader = myByteReader(xmlReader) // see code at EOF - } - - // build the map - return xmlReaderToMap(xmlReader, r) -} - -// Get next XML doc from an io.Reader as a Map value. Returns Map value and slice with the raw XML. -// NOTES: -// 1. Due to the implementation of xml.Decoder, the raw XML off the reader is buffered to []byte -// using a ByteReader. If the io.Reader is an os.File, there may be significant performance impact. -// See the examples - getmetrics1.go through getmetrics4.go - for comparative use cases on a large -// data set. If the io.Reader is wrapping a []byte value in-memory, however, such as http.Request.Body -// you CAN use it to efficiently unmarshal a XML doc and retrieve the raw XML in a single call. -// 2. The 'raw' return value may be larger than the XML text value. -// 3. The 'xmlReader' will be parsed looking for an xml.StartElement, so BOM and other -// extraneous xml.CharData will be ignored unless io.EOF is reached first. -// 4. If CoerceKeysToLower() has been called, then all key values will be lower case. -// 5. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. -func NewMapXmlReaderRaw(xmlReader io.Reader, cast ...bool) (Map, []byte, error) { - var r bool - if len(cast) == 1 { - r = cast[0] - } - // create TeeReader so we can retrieve raw XML - buf := make([]byte, 0) - wb := bytes.NewBuffer(buf) - trdr := myTeeReader(xmlReader, wb) // see code at EOF - - m, err := xmlReaderToMap(trdr, r) - - // retrieve the raw XML that was decoded - b := wb.Bytes() - - if err != nil { - return nil, b, err - } - - return m, b, nil -} - -// xmlReaderToMap() - parse a XML io.Reader to a map[string]interface{} value -func xmlReaderToMap(rdr io.Reader, r bool) (map[string]interface{}, error) { - // parse the Reader - p := xml.NewDecoder(rdr) - if CustomDecoder != nil { - useCustomDecoder(p) - } else { - p.CharsetReader = XmlCharsetReader - } - return xmlToMapParser("", nil, p, r) -} - -// xmlToMap - convert a XML doc into map[string]interface{} value -func xmlToMap(doc []byte, r bool) (map[string]interface{}, error) { - b := bytes.NewReader(doc) - p := xml.NewDecoder(b) - if CustomDecoder != nil { - useCustomDecoder(p) - } else { - p.CharsetReader = XmlCharsetReader - } - return xmlToMapParser("", nil, p, r) -} - -// ===================================== where the work happens ============================= - -// PrependAttrWithHyphen. Prepend attribute tags with a hyphen. -// Default is 'true'. (Not applicable to NewMapXmlSeq(), mv.XmlSeq(), etc.) -// Note: -// If 'false', unmarshaling and marshaling is not symmetric. Attributes will be -// marshal'd as attr and may be part of a list. -func PrependAttrWithHyphen(v bool) { - if v { - attrPrefix = "-" - lenAttrPrefix = len(attrPrefix) - return - } - attrPrefix = "" - lenAttrPrefix = len(attrPrefix) -} - -// Include sequence id with inner tags. - per Sean Murphy, murphysean84@gmail.com. -var includeTagSeqNum bool - -// IncludeTagSeqNum - include a "_seq":N key:value pair with each inner tag, denoting -// its position when parsed. This is of limited usefulness, since list values cannot -// be tagged with "_seq" without changing their depth in the Map. -// So THIS SHOULD BE USED WITH CAUTION - see the test cases. Here's a sample of what -// you get. -/* - - - - - hello - - - parses as: - - { - Obj:{ - "-c":"la", - "-h":"da", - "-x":"dee", - "intObj":[ - { - "-id"="3", - "_seq":"0" // if mxj.Cast is passed, then: "_seq":0 - }, - { - "-id"="2", - "_seq":"2" - }], - "intObj1":{ - "-id":"1", - "_seq":"1" - }, - "StrObj":{ - "#text":"hello", // simple element value gets "#text" tag - "_seq":"3" - } - } - } -*/ -func IncludeTagSeqNum(b bool) { - includeTagSeqNum = b -} - -// all keys will be "lower case" -var lowerCase bool - -// Coerce all tag values to keys in lower case. This is useful if you've got sources with variable -// tag capitalization, and you want to use m.ValuesForKeys(), etc., with the key or path spec -// in lower case. -// CoerceKeysToLower() will toggle the coercion flag true|false - on|off -// CoerceKeysToLower(true|false) will set the coercion flag on|off -// -// NOTE: only recognized by NewMapXml, NewMapXmlReader, and NewMapXmlReaderRaw functions as well as -// the associated HandleXmlReader and HandleXmlReaderRaw. -func CoerceKeysToLower(b ...bool) { - if len(b) == 0 { - lowerCase = !lowerCase - } else if len(b) == 1 { - lowerCase = b[0] - } -} - -// 25jun16: Allow user to specify the "prefix" character for XML attribute key labels. -// We do this by replacing '`' constant with attrPrefix var, replacing useHyphen with attrPrefix = "", -// and adding a SetAttrPrefix(s string) function. - -var attrPrefix string = `-` // the default -var lenAttrPrefix int = 1 // the default - -// SetAttrPrefix changes the default, "-", to the specified value, s. -// SetAttrPrefix("") is the same as PrependAttrWithHyphen(false). -// (Not applicable for NewMapXmlSeq(), mv.XmlSeq(), etc.) -func SetAttrPrefix(s string) { - attrPrefix = s - lenAttrPrefix = len(attrPrefix) -} - -// 18jan17: Allows user to specify if the map keys should be in snake case instead -// of the default hyphenated notation. -var snakeCaseKeys bool - -// CoerceKeysToSnakeCase changes the default, false, to the specified value, b. -// Note: the attribute prefix will be a hyphen, '-', or what ever string value has -// been specified using SetAttrPrefix. -func CoerceKeysToSnakeCase(b ...bool) { - if len(b) == 0 { - snakeCaseKeys = !snakeCaseKeys - } else if len(b) == 1 { - snakeCaseKeys = b[0] - } -} - -// 05feb17: support processing XMPP streams (issue #36) -var handleXMPPStreamTag bool - -// HandleXMPPStreamTag causes decoder to parse XMPP elements. -// If called with no argument, XMPP stream element handling is toggled on/off. -// (See xmppStream_test.go for example.) -// If called with NewMapXml, NewMapXmlReader, New MapXmlReaderRaw the "stream" -// element will be returned as: -// map["stream"]interface{}{map[-]interface{}}. -// If called with NewMapSeq, NewMapSeqReader, NewMapSeqReaderRaw the "stream" -// element will be returned as: -// map["stream:stream"]interface{}{map["#attr"]interface{}{map[string]interface{}}} -// where the "#attr" values have "#text" and "#seq" keys. (See NewMapXmlSeq.) -func HandleXMPPStreamTag(b ...bool) { - if len(b) == 0 { - handleXMPPStreamTag = !handleXMPPStreamTag - } else if len(b) == 1 { - handleXMPPStreamTag = b[0] - } -} - -// 21jan18 - decode all values as map["#text":value] (issue #56) -var decodeSimpleValuesAsMap bool - -// DecodeSimpleValuesAsMap forces all values to be decoded as map["#text":]. -// If called with no argument, the decoding is toggled on/off. -// -// By default the NewMapXml functions decode simple values without attributes as -// map[:]. This function causes simple values without attributes to be -// decoded the same as simple values with attributes - map[:map["#text":]]. -func DecodeSimpleValuesAsMap(b ...bool) { - if len(b) == 0 { - decodeSimpleValuesAsMap = !decodeSimpleValuesAsMap - } else if len(b) == 1 { - decodeSimpleValuesAsMap = b[0] - } -} - -// xmlToMapParser (2015.11.12) - load a 'clean' XML doc into a map[string]interface{} directly. -// A refactoring of xmlToTreeParser(), markDuplicate() and treeToMap() - here, all-in-one. -// We've removed the intermediate *node tree with the allocation and subsequent rescanning. -func xmlToMapParser(skey string, a []xml.Attr, p *xml.Decoder, r bool) (map[string]interface{}, error) { - if lowerCase { - skey = strings.ToLower(skey) - } - if snakeCaseKeys { - skey = strings.Replace(skey, "-", "_", -1) - } - - // NOTE: all attributes and sub-elements parsed into 'na', 'na' is returned as value for 'skey' in 'n'. - // Unless 'skey' is a simple element w/o attributes, in which case the xml.CharData value is the value. - var n, na map[string]interface{} - var seq int // for includeTagSeqNum - - // Allocate maps and load attributes, if any. - // NOTE: on entry from NewMapXml(), etc., skey=="", and we fall through - // to get StartElement then recurse with skey==xml.StartElement.Name.Local - // where we begin allocating map[string]interface{} values 'n' and 'na'. - if skey != "" { - n = make(map[string]interface{}) // old n - na = make(map[string]interface{}) // old n.nodes - if len(a) > 0 { - for _, v := range a { - if snakeCaseKeys { - v.Name.Local = strings.Replace(v.Name.Local, "-", "_", -1) - } - var key string - key = attrPrefix + v.Name.Local - if lowerCase { - key = strings.ToLower(key) - } - na[key] = cast(v.Value, r) - } - } - } - // Return XMPP message. - if handleXMPPStreamTag && skey == "stream" { - n[skey] = na - return n, nil - } - - for { - t, err := p.Token() - if err != nil { - if err != io.EOF { - return nil, errors.New("xml.Decoder.Token() - " + err.Error()) - } - return nil, err - } - switch t.(type) { - case xml.StartElement: - tt := t.(xml.StartElement) - - // First call to xmlToMapParser() doesn't pass xml.StartElement - the map key. - // So when the loop is first entered, the first token is the root tag along - // with any attributes, which we process here. - // - // Subsequent calls to xmlToMapParser() will pass in tag+attributes for - // processing before getting the next token which is the element value, - // which is done above. - if skey == "" { - return xmlToMapParser(tt.Name.Local, tt.Attr, p, r) - } - - // If not initializing the map, parse the element. - // len(nn) == 1, necessarily - it is just an 'n'. - nn, err := xmlToMapParser(tt.Name.Local, tt.Attr, p, r) - if err != nil { - return nil, err - } - - // The nn map[string]interface{} value is a na[nn_key] value. - // We need to see if nn_key already exists - means we're parsing a list. - // This may require converting na[nn_key] value into []interface{} type. - // First, extract the key:val for the map - it's a singleton. - // Note: - // * if CoerceKeysToLower() called, then key will be lower case. - // * if CoerceKeysToSnakeCase() called, then key will be converted to snake case. - var key string - var val interface{} - for key, val = range nn { - break - } - - // IncludeTagSeqNum requests that the element be augmented with a "_seq" sub-element. - // In theory, we don't need this if len(na) == 1. But, we don't know what might - // come next - we're only parsing forward. So if you ask for 'includeTagSeqNum' you - // get it on every element. (Personally, I never liked this, but I added it on request - // and did get a $50 Amazon gift card in return - now we support it for backwards compatibility!) - if includeTagSeqNum { - switch val.(type) { - case []interface{}: - // noop - There's no clean way to handle this w/o changing message structure. - case map[string]interface{}: - val.(map[string]interface{})["_seq"] = seq // will overwrite an "_seq" XML tag - seq++ - case interface{}: // a non-nil simple element: string, float64, bool - v := map[string]interface{}{"#text": val} - v["_seq"] = seq - seq++ - val = v - } - } - - // 'na' holding sub-elements of n. - // See if 'key' already exists. - // If 'key' exists, then this is a list, if not just add key:val to na. - if v, ok := na[key]; ok { - var a []interface{} - switch v.(type) { - case []interface{}: - a = v.([]interface{}) - default: // anything else - note: v.(type) != nil - a = []interface{}{v} - } - a = append(a, val) - na[key] = a - } else { - na[key] = val // save it as a singleton - } - case xml.EndElement: - // len(n) > 0 if this is a simple element w/o xml.Attrs - see xml.CharData case. - if len(n) == 0 { - // If len(na)==0 we have an empty element == ""; - // it has no xml.Attr nor xml.CharData. - // Note: in original node-tree parser, val defaulted to ""; - // so we always had the default if len(node.nodes) == 0. - if len(na) > 0 { - n[skey] = na - } else { - n[skey] = "" // empty element - } - } - return n, nil - case xml.CharData: - // clean up possible noise - tt := strings.Trim(string(t.(xml.CharData)), "\t\r\b\n ") - if len(tt) > 0 { - if len(na) > 0 || decodeSimpleValuesAsMap { - na["#text"] = cast(tt, r) - } else if skey != "" { - n[skey] = cast(tt, r) - } else { - // per Adrian (http://www.adrianlungu.com/) catch stray text - // in decoder stream - - // https://github.com/clbanning/mxj/pull/14#issuecomment-182816374 - // NOTE: CharSetReader must be set to non-UTF-8 CharSet or you'll get - // a p.Token() decoding error when the BOM is UTF-16 or UTF-32. - continue - } - } - default: - // noop - } - } -} - -var castNanInf bool - -// Cast "Nan", "Inf", "-Inf" XML values to 'float64'. -// By default, these values will be decoded as 'string'. -func CastNanInf(b bool) { - castNanInf = b -} - -// cast - try to cast string values to bool or float64 -func cast(s string, r bool) interface{} { - if r { - // handle nan and inf - if !castNanInf { - switch strings.ToLower(s) { - case "nan", "inf", "-inf": - return s - } - } - - // handle numeric strings ahead of boolean - if f, err := strconv.ParseFloat(s, 64); err == nil { - return f - } - // ParseBool treats "1"==true & "0"==false, we've already scanned those - // values as float64. See if value has 't' or 'f' as initial screen to - // minimize calls to ParseBool; also, see if len(s) < 6. - if len(s) > 0 && len(s) < 6 { - switch s[:1] { - case "t", "T", "f", "F": - if b, err := strconv.ParseBool(s); err == nil { - return b - } - } - } - } - return s -} - -// ------------------ END: NewMapXml & NewMapXmlReader ------------------------- - -// ------------------ mv.Xml & mv.XmlWriter - from j2x ------------------------ - -const ( - DefaultRootTag = "doc" -) - -var useGoXmlEmptyElemSyntax bool - -// XmlGoEmptyElemSyntax() - rather than . -// Go's encoding/xml package marshals empty XML elements as . By default this package -// encodes empty elements as . If you're marshaling Map values that include structures -// (which are passed to xml.Marshal for encoding), this will let you conform to the standard package. -func XmlGoEmptyElemSyntax() { - useGoXmlEmptyElemSyntax = true -} - -// XmlDefaultEmptyElemSyntax() - rather than . -// Return XML encoding for empty elements to the default package setting. -// Reverses effect of XmlGoEmptyElemSyntax(). -func XmlDefaultEmptyElemSyntax() { - useGoXmlEmptyElemSyntax = false -} - -// Encode a Map as XML. The companion of NewMapXml(). -// The following rules apply. -// - The key label "#text" is treated as the value for a simple element with attributes. -// - Map keys that begin with a hyphen, '-', are interpreted as attributes. -// It is an error if the attribute doesn't have a []byte, string, number, or boolean value. -// - Map value type encoding: -// > string, bool, float64, int, int32, int64, float32: per "%v" formating -// > []bool, []uint8: by casting to string -// > structures, etc.: handed to xml.Marshal() - if there is an error, the element -// value is "UNKNOWN" -// - Elements with only attribute values or are null are terminated using "/>". -// - If len(mv) == 1 and no rootTag is provided, then the map key is used as the root tag, possible. -// Thus, `{ "key":"value" }` encodes as "value". -// - To encode empty elements in a syntax consistent with encoding/xml call UseGoXmlEmptyElementSyntax(). -// The attributes tag=value pairs are alphabetized by "tag". Also, when encoding map[string]interface{} values - -// complex elements, etc. - the key:value pairs are alphabetized by key so the resulting tags will appear sorted. -func (mv Map) Xml(rootTag ...string) ([]byte, error) { - m := map[string]interface{}(mv) - var err error - s := new(string) - p := new(pretty) // just a stub - - if len(m) == 1 && len(rootTag) == 0 { - for key, value := range m { - // if it an array, see if all values are map[string]interface{} - // we force a new root tag if we'll end up with no key:value in the list - // so: key:[string_val, bool:true] --> string_valtrue - switch value.(type) { - case []interface{}: - for _, v := range value.([]interface{}) { - switch v.(type) { - case map[string]interface{}: // noop - default: // anything else - err = mapToXmlIndent(false, s, DefaultRootTag, m, p) - goto done - } - } - } - err = mapToXmlIndent(false, s, key, value, p) - } - } else if len(rootTag) == 1 { - err = mapToXmlIndent(false, s, rootTag[0], m, p) - } else { - err = mapToXmlIndent(false, s, DefaultRootTag, m, p) - } -done: - return []byte(*s), err -} - -// The following implementation is provided only for symmetry with NewMapXmlReader[Raw] -// The names will also provide a key for the number of return arguments. - -// Writes the Map as XML on the Writer. -// See Xml() for encoding rules. -func (mv Map) XmlWriter(xmlWriter io.Writer, rootTag ...string) error { - x, err := mv.Xml(rootTag...) - if err != nil { - return err - } - - _, err = xmlWriter.Write(x) - return err -} - -// Writes the Map as XML on the Writer. []byte is the raw XML that was written. -// See Xml() for encoding rules. -func (mv Map) XmlWriterRaw(xmlWriter io.Writer, rootTag ...string) ([]byte, error) { - x, err := mv.Xml(rootTag...) - if err != nil { - return x, err - } - - _, err = xmlWriter.Write(x) - return x, err -} - -// Writes the Map as pretty XML on the Writer. -// See Xml() for encoding rules. -func (mv Map) XmlIndentWriter(xmlWriter io.Writer, prefix, indent string, rootTag ...string) error { - x, err := mv.XmlIndent(prefix, indent, rootTag...) - if err != nil { - return err - } - - _, err = xmlWriter.Write(x) - return err -} - -// Writes the Map as pretty XML on the Writer. []byte is the raw XML that was written. -// See Xml() for encoding rules. -func (mv Map) XmlIndentWriterRaw(xmlWriter io.Writer, prefix, indent string, rootTag ...string) ([]byte, error) { - x, err := mv.XmlIndent(prefix, indent, rootTag...) - if err != nil { - return x, err - } - - _, err = xmlWriter.Write(x) - return x, err -} - -// -------------------- END: mv.Xml & mv.XmlWriter ------------------------------- - -// -------------- Handle XML stream by processing Map value -------------------- - -// Default poll delay to keep Handler from spinning on an open stream -// like sitting on os.Stdin waiting for imput. -var xhandlerPollInterval = time.Millisecond - -// Bulk process XML using handlers that process a Map value. -// 'rdr' is an io.Reader for XML (stream) -// 'mapHandler' is the Map processor. Return of 'false' stops io.Reader processing. -// 'errHandler' is the error processor. Return of 'false' stops io.Reader processing and returns the error. -// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. -// This means that you can stop reading the file on error or after processing a particular message. -// To have reading and handling run concurrently, pass argument to a go routine in handler and return 'true'. -func HandleXmlReader(xmlReader io.Reader, mapHandler func(Map) bool, errHandler func(error) bool) error { - var n int - for { - m, merr := NewMapXmlReader(xmlReader) - n++ - - // handle error condition with errhandler - if merr != nil && merr != io.EOF { - merr = fmt.Errorf("[xmlReader: %d] %s", n, merr.Error()) - if ok := errHandler(merr); !ok { - // caused reader termination - return merr - } - continue - } - - // pass to maphandler - if len(m) != 0 { - if ok := mapHandler(m); !ok { - break - } - } else if merr != io.EOF { - time.Sleep(xhandlerPollInterval) - } - - if merr == io.EOF { - break - } - } - return nil -} - -// Bulk process XML using handlers that process a Map value and the raw XML. -// 'rdr' is an io.Reader for XML (stream) -// 'mapHandler' is the Map and raw XML - []byte - processor. Return of 'false' stops io.Reader processing. -// 'errHandler' is the error and raw XML processor. Return of 'false' stops io.Reader processing and returns the error. -// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. -// This means that you can stop reading the file on error or after processing a particular message. -// To have reading and handling run concurrently, pass argument(s) to a go routine in handler and return 'true'. -// See NewMapXmlReaderRaw for comment on performance associated with retrieving raw XML from a Reader. -func HandleXmlReaderRaw(xmlReader io.Reader, mapHandler func(Map, []byte) bool, errHandler func(error, []byte) bool) error { - var n int - for { - m, raw, merr := NewMapXmlReaderRaw(xmlReader) - n++ - - // handle error condition with errhandler - if merr != nil && merr != io.EOF { - merr = fmt.Errorf("[xmlReader: %d] %s", n, merr.Error()) - if ok := errHandler(merr, raw); !ok { - // caused reader termination - return merr - } - continue - } - - // pass to maphandler - if len(m) != 0 { - if ok := mapHandler(m, raw); !ok { - break - } - } else if merr != io.EOF { - time.Sleep(xhandlerPollInterval) - } - - if merr == io.EOF { - break - } - } - return nil -} - -// ----------------- END: Handle XML stream by processing Map value -------------- - -// -------- a hack of io.TeeReader ... need one that's an io.ByteReader for xml.NewDecoder() ---------- - -// This is a clone of io.TeeReader with the additional method t.ReadByte(). -// Thus, this TeeReader is also an io.ByteReader. -// This is necessary because xml.NewDecoder uses a ByteReader not a Reader. It appears to have been written -// with bufio.Reader or bytes.Reader in mind ... not a generic io.Reader, which doesn't have to have ReadByte().. -// If NewDecoder is passed a Reader that does not satisfy ByteReader() it wraps the Reader with -// bufio.NewReader and uses ReadByte rather than Read that runs the TeeReader pipe logic. - -type teeReader struct { - r io.Reader - w io.Writer - b []byte -} - -func myTeeReader(r io.Reader, w io.Writer) io.Reader { - b := make([]byte, 1) - return &teeReader{r, w, b} -} - -// need for io.Reader - but we don't use it ... -func (t *teeReader) Read(p []byte) (int, error) { - return 0, nil -} - -func (t *teeReader) ReadByte() (byte, error) { - n, err := t.r.Read(t.b) - if n > 0 { - if _, err := t.w.Write(t.b[:1]); err != nil { - return t.b[0], err - } - } - return t.b[0], err -} - -// For use with NewMapXmlReader & NewMapXmlSeqReader. -type byteReader struct { - r io.Reader - b []byte -} - -func myByteReader(r io.Reader) io.Reader { - b := make([]byte, 1) - return &byteReader{r, b} -} - -// Need for io.Reader interface ... -// Needed if reading a malformed http.Request.Body - issue #38. -func (b *byteReader) Read(p []byte) (int, error) { - return b.r.Read(p) -} - -func (b *byteReader) ReadByte() (byte, error) { - _, err := b.r.Read(b.b) - if len(b.b) > 0 { - return b.b[0], err - } - var c byte - return c, err -} - -// ----------------------- END: io.TeeReader hack ----------------------------------- - -// ---------------------- XmlIndent - from j2x package ---------------------------- - -// Encode a map[string]interface{} as a pretty XML string. -// See Xml for encoding rules. -func (mv Map) XmlIndent(prefix, indent string, rootTag ...string) ([]byte, error) { - m := map[string]interface{}(mv) - - var err error - s := new(string) - p := new(pretty) - p.indent = indent - p.padding = prefix - - if len(m) == 1 && len(rootTag) == 0 { - // this can extract the key for the single map element - // use it if it isn't a key for a list - for key, value := range m { - if _, ok := value.([]interface{}); ok { - err = mapToXmlIndent(true, s, DefaultRootTag, m, p) - } else { - err = mapToXmlIndent(true, s, key, value, p) - } - } - } else if len(rootTag) == 1 { - err = mapToXmlIndent(true, s, rootTag[0], m, p) - } else { - err = mapToXmlIndent(true, s, DefaultRootTag, m, p) - } - return []byte(*s), err -} - -type pretty struct { - indent string - cnt int - padding string - mapDepth int - start int -} - -func (p *pretty) Indent() { - p.padding += p.indent - p.cnt++ -} - -func (p *pretty) Outdent() { - if p.cnt > 0 { - p.padding = p.padding[:len(p.padding)-len(p.indent)] - p.cnt-- - } -} - -// where the work actually happens -// returns an error if an attribute is not atomic -func mapToXmlIndent(doIndent bool, s *string, key string, value interface{}, pp *pretty) error { - var endTag bool - var isSimple bool - var elen int - p := &pretty{pp.indent, pp.cnt, pp.padding, pp.mapDepth, pp.start} - - // per issue #48, 18apr18 - try and coerce maps to map[string]interface{} - // Don't need for mapToXmlSeqIndent, since maps there are decoded by NewMapXmlSeq(). - if reflect.ValueOf(value).Kind() == reflect.Map { - switch value.(type) { - case map[string]interface{}: - default: - val := make(map[string]interface{}) - vv := reflect.ValueOf(value) - keys := vv.MapKeys() - for _, k := range keys { - val[fmt.Sprint(k)] = vv.MapIndex(k).Interface() - } - value = val - } - } - - switch value.(type) { - // special handling of []interface{} values when len(value) == 0 - case map[string]interface{}, []byte, string, float64, bool, int, int32, int64, float32, json.Number: - if doIndent { - *s += p.padding - } - *s += `<` + key - } - switch value.(type) { - case map[string]interface{}: - vv := value.(map[string]interface{}) - lenvv := len(vv) - // scan out attributes - attribute keys have prepended attrPrefix - attrlist := make([][2]string, len(vv)) - var n int - var ss string - for k, v := range vv { - if lenAttrPrefix > 0 && lenAttrPrefix < len(k) && k[:lenAttrPrefix] == attrPrefix { - switch v.(type) { - case string: - if xmlEscapeChars { - ss = escapeChars(v.(string)) - } else { - ss = v.(string) - } - attrlist[n][0] = k[lenAttrPrefix:] - attrlist[n][1] = ss - case float64, bool, int, int32, int64, float32, json.Number: - attrlist[n][0] = k[lenAttrPrefix:] - attrlist[n][1] = fmt.Sprintf("%v", v) - case []byte: - if xmlEscapeChars { - ss = escapeChars(string(v.([]byte))) - } else { - ss = string(v.([]byte)) - } - attrlist[n][0] = k[lenAttrPrefix:] - attrlist[n][1] = ss - default: - return fmt.Errorf("invalid attribute value for: %s:<%T>", k, v) - } - n++ - } - } - if n > 0 { - attrlist = attrlist[:n] - sort.Sort(attrList(attrlist)) - for _, v := range attrlist { - *s += ` ` + v[0] + `="` + v[1] + `"` - } - } - // only attributes? - if n == lenvv { - if useGoXmlEmptyElemSyntax { - *s += `" - } else { - *s += `/>` - } - break - } - - // simple element? Note: '#text" is an invalid XML tag. - if v, ok := vv["#text"]; ok && n+1 == lenvv { - switch v.(type) { - case string: - if xmlEscapeChars { - v = escapeChars(v.(string)) - } else { - v = v.(string) - } - case []byte: - if xmlEscapeChars { - v = escapeChars(string(v.([]byte))) - } - } - *s += ">" + fmt.Sprintf("%v", v) - endTag = true - elen = 1 - isSimple = true - break - } else if ok { - // Handle edge case where simple element with attributes - // is unmarshal'd using NewMapXml() where attribute prefix - // has been set to "". - // TODO(clb): should probably scan all keys for invalid chars. - return fmt.Errorf("invalid attribute key label: #text - due to attributes not being prefixed") - } - - // close tag with possible attributes - *s += ">" - if doIndent { - *s += "\n" - } - // something more complex - p.mapDepth++ - // extract the map k:v pairs and sort on key - elemlist := make([][2]interface{}, len(vv)) - n = 0 - for k, v := range vv { - if lenAttrPrefix > 0 && lenAttrPrefix < len(k) && k[:lenAttrPrefix] == attrPrefix { - continue - } - elemlist[n][0] = k - elemlist[n][1] = v - n++ - } - elemlist = elemlist[:n] - sort.Sort(elemList(elemlist)) - var i int - for _, v := range elemlist { - switch v[1].(type) { - case []interface{}: - default: - if i == 0 && doIndent { - p.Indent() - } - } - i++ - if err := mapToXmlIndent(doIndent, s, v[0].(string), v[1], p); err != nil { - return err - } - switch v[1].(type) { - case []interface{}: // handled in []interface{} case - default: - if doIndent { - p.Outdent() - } - } - i-- - } - p.mapDepth-- - endTag = true - elen = 1 // we do have some content ... - case []interface{}: - // special case - found during implementing Issue #23 - if len(value.([]interface{})) == 0 { - if doIndent { - *s += p.padding + p.indent - } - *s += "<" + key - elen = 0 - endTag = true - break - } - for _, v := range value.([]interface{}) { - if doIndent { - p.Indent() - } - if err := mapToXmlIndent(doIndent, s, key, v, p); err != nil { - return err - } - if doIndent { - p.Outdent() - } - } - return nil - case []string: - // This was added by https://github.com/slotix ... not a type that - // would be encountered if mv generated from NewMapXml, NewMapJson. - // Could be encountered in AnyXml(), so we'll let it stay, though - // it should be merged with case []interface{}, above. - //quick fix for []string type - //[]string should be treated exaclty as []interface{} - if len(value.([]string)) == 0 { - if doIndent { - *s += p.padding + p.indent - } - *s += "<" + key - elen = 0 - endTag = true - break - } - for _, v := range value.([]string) { - if doIndent { - p.Indent() - } - if err := mapToXmlIndent(doIndent, s, key, v, p); err != nil { - return err - } - if doIndent { - p.Outdent() - } - } - return nil - case nil: - // terminate the tag - if doIndent { - *s += p.padding - } - *s += "<" + key - endTag, isSimple = true, true - break - default: // handle anything - even goofy stuff - elen = 0 - switch value.(type) { - case string: - v := value.(string) - if xmlEscapeChars { - v = escapeChars(v) - } - elen = len(v) - if elen > 0 { - *s += ">" + v - } - case float64, bool, int, int32, int64, float32, json.Number: - v := fmt.Sprintf("%v", value) - elen = len(v) // always > 0 - *s += ">" + v - case []byte: // NOTE: byte is just an alias for uint8 - // similar to how xml.Marshal handles []byte structure members - v := string(value.([]byte)) - if xmlEscapeChars { - v = escapeChars(v) - } - elen = len(v) - if elen > 0 { - *s += ">" + v - } - default: - var v []byte - var err error - if doIndent { - v, err = xml.MarshalIndent(value, p.padding, p.indent) - } else { - v, err = xml.Marshal(value) - } - if err != nil { - *s += ">UNKNOWN" - } else { - elen = len(v) - if elen > 0 { - *s += string(v) - } - } - } - isSimple = true - endTag = true - } - if endTag { - if doIndent { - if !isSimple { - *s += p.padding - } - } - if elen > 0 || useGoXmlEmptyElemSyntax { - if elen == 0 { - *s += ">" - } - *s += `" - } else { - *s += `/>` - } - } - if doIndent { - if p.cnt > p.start { - *s += "\n" - } - p.Outdent() - } - - return nil -} - -// ============================ sort interface implementation ================= - -type attrList [][2]string - -func (a attrList) Len() int { - return len(a) -} - -func (a attrList) Swap(i, j int) { - a[i], a[j] = a[j], a[i] -} - -func (a attrList) Less(i, j int) bool { - return a[i][0] <= a[j][0] -} - -type elemList [][2]interface{} - -func (e elemList) Len() int { - return len(e) -} - -func (e elemList) Swap(i, j int) { - e[i], e[j] = e[j], e[i] -} - -func (e elemList) Less(i, j int) bool { - return e[i][0].(string) <= e[j][0].(string) -} diff --git a/vendor/github.com/clbanning/mxj/xmlseq.go b/vendor/github.com/clbanning/mxj/xmlseq.go deleted file mode 100644 index 6be73ae60d9..00000000000 --- a/vendor/github.com/clbanning/mxj/xmlseq.go +++ /dev/null @@ -1,828 +0,0 @@ -// Copyright 2012-2016 Charles Banning. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file - -// xmlseq.go - version of xml.go with sequence # injection on Decoding and sorting on Encoding. -// Also, handles comments, directives and process instructions. - -package mxj - -import ( - "bytes" - "encoding/xml" - "errors" - "fmt" - "io" - "sort" - "strings" -) - -var NoRoot = errors.New("no root key") -var NO_ROOT = NoRoot // maintain backwards compatibility - -// ------------------- NewMapXmlSeq & NewMapXmlSeqReader ... ------------------------- - -// This is only useful if you want to re-encode the Map as XML using mv.XmlSeq(), etc., to preserve the original structure. -// The xml.Decoder.RawToken method is used to parse the XML, so there is no checking for appropriate xml.EndElement values; -// thus, it is assumed that the XML is valid. -// -// NewMapXmlSeq - convert a XML doc into a Map with elements id'd with decoding sequence int - #seq. -// If the optional argument 'cast' is 'true', then values will be converted to boolean or float64 if possible. -// NOTE: "#seq" key/value pairs are removed on encoding with mv.XmlSeq() / mv.XmlSeqIndent(). -// • attributes are a map - map["#attr"]map["attr_key"]map[string]interface{}{"#text":, "#seq":} -// • all simple elements are decoded as map["#text"]interface{} with a "#seq" k:v pair, as well. -// • lists always decode as map["list_tag"][]map[string]interface{} where the array elements are maps that -// include a "#seq" k:v pair based on sequence they are decoded. Thus, XML like: -// -// value 1 -// value 2 -// value 3 -// -// is decoded as: -// doc : -// ltag :[[]interface{}] -// [item: 0] -// #seq :[int] 0 -// #text :[string] value 1 -// [item: 1] -// #seq :[int] 2 -// #text :[string] value 3 -// newtag : -// #seq :[int] 1 -// #text :[string] value 2 -// It will encode in proper sequence even though the Map representation merges all "ltag" elements in an array. -// • comments - "" - are decoded as map["#comment"]map["#text"]"cmnt_text" with a "#seq" k:v pair. -// • directives - "" - are decoded as map["#directive"]map[#text"]"directive_text" with a "#seq" k:v pair. -// • process instructions - "" - are decoded as map["#procinst"]interface{} where the #procinst value -// is of map[string]interface{} type with the following keys: #target, #inst, and #seq. -// • comments, directives, and procinsts that are NOT part of a document with a root key will be returned as -// map[string]interface{} and the error value 'NoRoot'. -// • note: ": tag preserve the -// ":" notation rather than stripping it as with NewMapXml(). -// 2. Attribute keys for name space prefix declarations preserve "xmlns:" notation. -func NewMapXmlSeq(xmlVal []byte, cast ...bool) (Map, error) { - var r bool - if len(cast) == 1 { - r = cast[0] - } - return xmlSeqToMap(xmlVal, r) -} - -// This is only useful if you want to re-encode the Map as XML using mv.XmlSeq(), etc., to preserve the original structure. -// -// Get next XML doc from an io.Reader as a Map value. Returns Map value. -// NOTES: -// 1. The 'xmlReader' will be parsed looking for an xml.StartElement, xml.Comment, etc., so BOM and other -// extraneous xml.CharData will be ignored unless io.EOF is reached first. -// 2. CoerceKeysToLower() is NOT recognized, since the intent here is to eventually call m.XmlSeq() to -// re-encode the message in its original structure. -// 3. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. -func NewMapXmlSeqReader(xmlReader io.Reader, cast ...bool) (Map, error) { - var r bool - if len(cast) == 1 { - r = cast[0] - } - - // We need to put an *os.File reader in a ByteReader or the xml.NewDecoder - // will wrap it in a bufio.Reader and seek on the file beyond where the - // xml.Decoder parses! - if _, ok := xmlReader.(io.ByteReader); !ok { - xmlReader = myByteReader(xmlReader) // see code at EOF - } - - // build the map - return xmlSeqReaderToMap(xmlReader, r) -} - -// This is only useful if you want to re-encode the Map as XML using mv.XmlSeq(), etc., to preserve the original structure. -// -// Get next XML doc from an io.Reader as a Map value. Returns Map value and slice with the raw XML. -// NOTES: -// 1. Due to the implementation of xml.Decoder, the raw XML off the reader is buffered to []byte -// using a ByteReader. If the io.Reader is an os.File, there may be significant performance impact. -// See the examples - getmetrics1.go through getmetrics4.go - for comparative use cases on a large -// data set. If the io.Reader is wrapping a []byte value in-memory, however, such as http.Request.Body -// you CAN use it to efficiently unmarshal a XML doc and retrieve the raw XML in a single call. -// 2. The 'raw' return value may be larger than the XML text value. -// 3. The 'xmlReader' will be parsed looking for an xml.StartElement, xml.Comment, etc., so BOM and other -// extraneous xml.CharData will be ignored unless io.EOF is reached first. -// 4. CoerceKeysToLower() is NOT recognized, since the intent here is to eventually call m.XmlSeq() to -// re-encode the message in its original structure. -// 5. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. -func NewMapXmlSeqReaderRaw(xmlReader io.Reader, cast ...bool) (Map, []byte, error) { - var r bool - if len(cast) == 1 { - r = cast[0] - } - // create TeeReader so we can retrieve raw XML - buf := make([]byte, 0) - wb := bytes.NewBuffer(buf) - trdr := myTeeReader(xmlReader, wb) - - m, err := xmlSeqReaderToMap(trdr, r) - - // retrieve the raw XML that was decoded - b := wb.Bytes() - - // err may be NoRoot - return m, b, err -} - -// xmlSeqReaderToMap() - parse a XML io.Reader to a map[string]interface{} value -func xmlSeqReaderToMap(rdr io.Reader, r bool) (map[string]interface{}, error) { - // parse the Reader - p := xml.NewDecoder(rdr) - if CustomDecoder != nil { - useCustomDecoder(p) - } else { - p.CharsetReader = XmlCharsetReader - } - return xmlSeqToMapParser("", nil, p, r) -} - -// xmlSeqToMap - convert a XML doc into map[string]interface{} value -func xmlSeqToMap(doc []byte, r bool) (map[string]interface{}, error) { - b := bytes.NewReader(doc) - p := xml.NewDecoder(b) - if CustomDecoder != nil { - useCustomDecoder(p) - } else { - p.CharsetReader = XmlCharsetReader - } - return xmlSeqToMapParser("", nil, p, r) -} - -// ===================================== where the work happens ============================= - -// xmlSeqToMapParser - load a 'clean' XML doc into a map[string]interface{} directly. -// Add #seq tag value for each element decoded - to be used for Encoding later. -func xmlSeqToMapParser(skey string, a []xml.Attr, p *xml.Decoder, r bool) (map[string]interface{}, error) { - if snakeCaseKeys { - skey = strings.Replace(skey, "-", "_", -1) - } - - // NOTE: all attributes and sub-elements parsed into 'na', 'na' is returned as value for 'skey' in 'n'. - var n, na map[string]interface{} - var seq int // for including seq num when decoding - - // Allocate maps and load attributes, if any. - // NOTE: on entry from NewMapXml(), etc., skey=="", and we fall through - // to get StartElement then recurse with skey==xml.StartElement.Name.Local - // where we begin allocating map[string]interface{} values 'n' and 'na'. - if skey != "" { - // 'n' only needs one slot - save call to runtime•hashGrow() - // 'na' we don't know - n = make(map[string]interface{}, 1) - na = make(map[string]interface{}) - if len(a) > 0 { - // xml.Attr is decoded into: map["#attr"]map[]interface{} - // where interface{} is map[string]interface{}{"#text":, "#seq":} - aa := make(map[string]interface{}, len(a)) - for i, v := range a { - if snakeCaseKeys { - v.Name.Local = strings.Replace(v.Name.Local, "-", "_", -1) - } - if len(v.Name.Space) > 0 { - aa[v.Name.Space+`:`+v.Name.Local] = map[string]interface{}{"#text": cast(v.Value, r), "#seq": i} - } else { - aa[v.Name.Local] = map[string]interface{}{"#text": cast(v.Value, r), "#seq": i} - } - } - na["#attr"] = aa - } - } - - // Return XMPP message. - if handleXMPPStreamTag && skey == "stream:stream" { - n[skey] = na - return n, nil - } - - for { - t, err := p.RawToken() - if err != nil { - if err != io.EOF { - return nil, errors.New("xml.Decoder.Token() - " + err.Error()) - } - return nil, err - } - switch t.(type) { - case xml.StartElement: - tt := t.(xml.StartElement) - - // First call to xmlSeqToMapParser() doesn't pass xml.StartElement - the map key. - // So when the loop is first entered, the first token is the root tag along - // with any attributes, which we process here. - // - // Subsequent calls to xmlSeqToMapParser() will pass in tag+attributes for - // processing before getting the next token which is the element value, - // which is done above. - if skey == "" { - if len(tt.Name.Space) > 0 { - return xmlSeqToMapParser(tt.Name.Space+`:`+tt.Name.Local, tt.Attr, p, r) - } else { - return xmlSeqToMapParser(tt.Name.Local, tt.Attr, p, r) - } - } - - // If not initializing the map, parse the element. - // len(nn) == 1, necessarily - it is just an 'n'. - var nn map[string]interface{} - if len(tt.Name.Space) > 0 { - nn, err = xmlSeqToMapParser(tt.Name.Space+`:`+tt.Name.Local, tt.Attr, p, r) - } else { - nn, err = xmlSeqToMapParser(tt.Name.Local, tt.Attr, p, r) - } - if err != nil { - return nil, err - } - - // The nn map[string]interface{} value is a na[nn_key] value. - // We need to see if nn_key already exists - means we're parsing a list. - // This may require converting na[nn_key] value into []interface{} type. - // First, extract the key:val for the map - it's a singleton. - var key string - var val interface{} - for key, val = range nn { - break - } - - // add "#seq" k:v pair - - // Sequence number included even in list elements - this should allow us - // to properly resequence even something goofy like: - // item 1 - // item 2 - // item 3 - // where all the "list" subelements are decoded into an array. - switch val.(type) { - case map[string]interface{}: - val.(map[string]interface{})["#seq"] = seq - seq++ - case interface{}: // a non-nil simple element: string, float64, bool - v := map[string]interface{}{"#text": val, "#seq": seq} - seq++ - val = v - } - - // 'na' holding sub-elements of n. - // See if 'key' already exists. - // If 'key' exists, then this is a list, if not just add key:val to na. - if v, ok := na[key]; ok { - var a []interface{} - switch v.(type) { - case []interface{}: - a = v.([]interface{}) - default: // anything else - note: v.(type) != nil - a = []interface{}{v} - } - a = append(a, val) - na[key] = a - } else { - na[key] = val // save it as a singleton - } - case xml.EndElement: - if skey != "" { - tt := t.(xml.EndElement) - if snakeCaseKeys { - tt.Name.Local = strings.Replace(tt.Name.Local, "-", "_", -1) - } - var name string - if len(tt.Name.Space) > 0 { - name = tt.Name.Space + `:` + tt.Name.Local - } else { - name = tt.Name.Local - } - if skey != name { - return nil, fmt.Errorf("element %s not properly terminated, got %s at #%d", - skey, name, p.InputOffset()) - } - } - // len(n) > 0 if this is a simple element w/o xml.Attrs - see xml.CharData case. - if len(n) == 0 { - // If len(na)==0 we have an empty element == ""; - // it has no xml.Attr nor xml.CharData. - // Empty element content will be map["etag"]map["#text"]"" - // after #seq injection - map["etag"]map["#seq"]seq - after return. - if len(na) > 0 { - n[skey] = na - } else { - n[skey] = "" // empty element - } - } - return n, nil - case xml.CharData: - // clean up possible noise - tt := strings.Trim(string(t.(xml.CharData)), "\t\r\b\n ") - if skey == "" { - // per Adrian (http://www.adrianlungu.com/) catch stray text - // in decoder stream - - // https://github.com/clbanning/mxj/pull/14#issuecomment-182816374 - // NOTE: CharSetReader must be set to non-UTF-8 CharSet or you'll get - // a p.Token() decoding error when the BOM is UTF-16 or UTF-32. - continue - } - if len(tt) > 0 { - // every simple element is a #text and has #seq associated with it - na["#text"] = cast(tt, r) - na["#seq"] = seq - seq++ - } - case xml.Comment: - if n == nil { // no root 'key' - n = map[string]interface{}{"#comment": string(t.(xml.Comment))} - return n, NoRoot - } - cm := make(map[string]interface{}, 2) - cm["#text"] = string(t.(xml.Comment)) - cm["#seq"] = seq - seq++ - na["#comment"] = cm - case xml.Directive: - if n == nil { // no root 'key' - n = map[string]interface{}{"#directive": string(t.(xml.Directive))} - return n, NoRoot - } - dm := make(map[string]interface{}, 2) - dm["#text"] = string(t.(xml.Directive)) - dm["#seq"] = seq - seq++ - na["#directive"] = dm - case xml.ProcInst: - if n == nil { - na = map[string]interface{}{"#target": t.(xml.ProcInst).Target, "#inst": string(t.(xml.ProcInst).Inst)} - n = map[string]interface{}{"#procinst": na} - return n, NoRoot - } - pm := make(map[string]interface{}, 3) - pm["#target"] = t.(xml.ProcInst).Target - pm["#inst"] = string(t.(xml.ProcInst).Inst) - pm["#seq"] = seq - seq++ - na["#procinst"] = pm - default: - // noop - shouldn't ever get here, now, since we handle all token types - } - } -} - -// ------------------ END: NewMapXml & NewMapXmlReader ------------------------- - -// --------------------- mv.XmlSeq & mv.XmlSeqWriter ------------------------- - -// This should ONLY be used on Map values that were decoded using NewMapXmlSeq() & co. -// -// Encode a Map as XML with elements sorted on #seq. The companion of NewMapXmlSeq(). -// The following rules apply. -// - The key label "#text" is treated as the value for a simple element with attributes. -// - The "#seq" key is used to seqence the subelements or attributes but is ignored for writing. -// - The "#attr" map key identifies the map of attribute map[string]interface{} values with "#text" key. -// - The "#comment" map key identifies a comment in the value "#text" map entry - . -// - The "#directive" map key identifies a directive in the value "#text" map entry - . -// - The "#procinst" map key identifies a process instruction in the value "#target" and "#inst" -// map entries - . -// - Value type encoding: -// > string, bool, float64, int, int32, int64, float32: per "%v" formating -// > []bool, []uint8: by casting to string -// > structures, etc.: handed to xml.Marshal() - if there is an error, the element -// value is "UNKNOWN" -// - Elements with only attribute values or are null are terminated using "/>" unless XmlGoEmptyElemSystax() called. -// - If len(mv) == 1 and no rootTag is provided, then the map key is used as the root tag, possible. -// Thus, `{ "key":"value" }` encodes as "value". -func (mv Map) XmlSeq(rootTag ...string) ([]byte, error) { - m := map[string]interface{}(mv) - var err error - s := new(string) - p := new(pretty) // just a stub - - if len(m) == 1 && len(rootTag) == 0 { - for key, value := range m { - // if it's an array, see if all values are map[string]interface{} - // we force a new root tag if we'll end up with no key:value in the list - // so: key:[string_val, bool:true] --> string_valtrue - switch value.(type) { - case []interface{}: - for _, v := range value.([]interface{}) { - switch v.(type) { - case map[string]interface{}: // noop - default: // anything else - err = mapToXmlSeqIndent(false, s, DefaultRootTag, m, p) - goto done - } - } - } - err = mapToXmlSeqIndent(false, s, key, value, p) - } - } else if len(rootTag) == 1 { - err = mapToXmlSeqIndent(false, s, rootTag[0], m, p) - } else { - err = mapToXmlSeqIndent(false, s, DefaultRootTag, m, p) - } -done: - return []byte(*s), err -} - -// The following implementation is provided only for symmetry with NewMapXmlReader[Raw] -// The names will also provide a key for the number of return arguments. - -// This should ONLY be used on Map values that were decoded using NewMapXmlSeq() & co. -// -// Writes the Map as XML on the Writer. -// See XmlSeq() for encoding rules. -func (mv Map) XmlSeqWriter(xmlWriter io.Writer, rootTag ...string) error { - x, err := mv.XmlSeq(rootTag...) - if err != nil { - return err - } - - _, err = xmlWriter.Write(x) - return err -} - -// This should ONLY be used on Map values that were decoded using NewMapXmlSeq() & co. -// -// Writes the Map as XML on the Writer. []byte is the raw XML that was written. -// See XmlSeq() for encoding rules. -func (mv Map) XmlSeqWriterRaw(xmlWriter io.Writer, rootTag ...string) ([]byte, error) { - x, err := mv.XmlSeq(rootTag...) - if err != nil { - return x, err - } - - _, err = xmlWriter.Write(x) - return x, err -} - -// This should ONLY be used on Map values that were decoded using NewMapXmlSeq() & co. -// -// Writes the Map as pretty XML on the Writer. -// See Xml() for encoding rules. -func (mv Map) XmlSeqIndentWriter(xmlWriter io.Writer, prefix, indent string, rootTag ...string) error { - x, err := mv.XmlSeqIndent(prefix, indent, rootTag...) - if err != nil { - return err - } - - _, err = xmlWriter.Write(x) - return err -} - -// This should ONLY be used on Map values that were decoded using NewMapXmlSeq() & co. -// -// Writes the Map as pretty XML on the Writer. []byte is the raw XML that was written. -// See XmlSeq() for encoding rules. -func (mv Map) XmlSeqIndentWriterRaw(xmlWriter io.Writer, prefix, indent string, rootTag ...string) ([]byte, error) { - x, err := mv.XmlSeqIndent(prefix, indent, rootTag...) - if err != nil { - return x, err - } - - _, err = xmlWriter.Write(x) - return x, err -} - -// -------------------- END: mv.Xml & mv.XmlWriter ------------------------------- - -// ---------------------- XmlSeqIndent ---------------------------- - -// This should ONLY be used on Map values that were decoded using NewMapXmlSeq() & co. -// -// Encode a map[string]interface{} as a pretty XML string. -// See mv.XmlSeq() for encoding rules. -func (mv Map) XmlSeqIndent(prefix, indent string, rootTag ...string) ([]byte, error) { - m := map[string]interface{}(mv) - - var err error - s := new(string) - p := new(pretty) - p.indent = indent - p.padding = prefix - - if len(m) == 1 && len(rootTag) == 0 { - // this can extract the key for the single map element - // use it if it isn't a key for a list - for key, value := range m { - if _, ok := value.([]interface{}); ok { - err = mapToXmlSeqIndent(true, s, DefaultRootTag, m, p) - } else { - err = mapToXmlSeqIndent(true, s, key, value, p) - } - } - } else if len(rootTag) == 1 { - err = mapToXmlSeqIndent(true, s, rootTag[0], m, p) - } else { - err = mapToXmlSeqIndent(true, s, DefaultRootTag, m, p) - } - return []byte(*s), err -} - -// where the work actually happens -// returns an error if an attribute is not atomic -func mapToXmlSeqIndent(doIndent bool, s *string, key string, value interface{}, pp *pretty) error { - var endTag bool - var isSimple bool - var noEndTag bool - var elen int - var ss string - p := &pretty{pp.indent, pp.cnt, pp.padding, pp.mapDepth, pp.start} - - switch value.(type) { - case map[string]interface{}, []byte, string, float64, bool, int, int32, int64, float32: - if doIndent { - *s += p.padding - } - if key != "#comment" && key != "#directive" && key != "#procinst" { - *s += `<` + key - } - } - switch value.(type) { - case map[string]interface{}: - val := value.(map[string]interface{}) - - if key == "#comment" { - *s += `` - noEndTag = true - break - } - - if key == "#directive" { - *s += `` - noEndTag = true - break - } - - if key == "#procinst" { - *s += `` - noEndTag = true - break - } - - haveAttrs := false - // process attributes first - if v, ok := val["#attr"].(map[string]interface{}); ok { - // First, unroll the map[string]interface{} into a []keyval array. - // Then sequence it. - kv := make([]keyval, len(v)) - n := 0 - for ak, av := range v { - kv[n] = keyval{ak, av} - n++ - } - sort.Sort(elemListSeq(kv)) - // Now encode the attributes in original decoding sequence, using keyval array. - for _, a := range kv { - vv := a.v.(map[string]interface{}) - switch vv["#text"].(type) { - case string: - if xmlEscapeChars { - ss = escapeChars(vv["#text"].(string)) - } else { - ss = vv["#text"].(string) - } - *s += ` ` + a.k + `="` + ss + `"` - case float64, bool, int, int32, int64, float32: - *s += ` ` + a.k + `="` + fmt.Sprintf("%v", vv["#text"]) + `"` - case []byte: - if xmlEscapeChars { - ss = escapeChars(string(vv["#text"].([]byte))) - } else { - ss = string(vv["#text"].([]byte)) - } - *s += ` ` + a.k + `="` + ss + `"` - default: - return fmt.Errorf("invalid attribute value for: %s", a.k) - } - } - haveAttrs = true - } - - // simple element? - // every map value has, at least, "#seq" and, perhaps, "#text" and/or "#attr" - _, seqOK := val["#seq"] // have key - if v, ok := val["#text"]; ok && ((len(val) == 3 && haveAttrs) || (len(val) == 2 && !haveAttrs)) && seqOK { - if stmp, ok := v.(string); ok && stmp != "" { - if xmlEscapeChars { - stmp = escapeChars(stmp) - } - *s += ">" + stmp - endTag = true - elen = 1 - } - isSimple = true - break - } else if !ok && ((len(val) == 2 && haveAttrs) || (len(val) == 1 && !haveAttrs)) && seqOK { - // here no #text but have #seq or #seq+#attr - endTag = false - break - } - - // we now need to sequence everything except attributes - // 'kv' will hold everything that needs to be written - kv := make([]keyval, 0) - for k, v := range val { - if k == "#attr" { // already processed - continue - } - if k == "#seq" { // ignore - just for sorting - continue - } - switch v.(type) { - case []interface{}: - // unwind the array as separate entries - for _, vv := range v.([]interface{}) { - kv = append(kv, keyval{k, vv}) - } - default: - kv = append(kv, keyval{k, v}) - } - } - - // close tag with possible attributes - *s += ">" - if doIndent { - *s += "\n" - } - // something more complex - p.mapDepth++ - sort.Sort(elemListSeq(kv)) - i := 0 - for _, v := range kv { - switch v.v.(type) { - case []interface{}: - default: - if i == 0 && doIndent { - p.Indent() - } - } - i++ - if err := mapToXmlSeqIndent(doIndent, s, v.k, v.v, p); err != nil { - return err - } - switch v.v.(type) { - case []interface{}: // handled in []interface{} case - default: - if doIndent { - p.Outdent() - } - } - i-- - } - p.mapDepth-- - endTag = true - elen = 1 // we do have some content other than attrs - case []interface{}: - for _, v := range value.([]interface{}) { - if doIndent { - p.Indent() - } - if err := mapToXmlSeqIndent(doIndent, s, key, v, p); err != nil { - return err - } - if doIndent { - p.Outdent() - } - } - return nil - case nil: - // terminate the tag - if doIndent { - *s += p.padding - } - *s += "<" + key - endTag, isSimple = true, true - break - default: // handle anything - even goofy stuff - elen = 0 - switch value.(type) { - case string: - if xmlEscapeChars { - ss = escapeChars(value.(string)) - } else { - ss = value.(string) - } - elen = len(ss) - if elen > 0 { - *s += ">" + ss - } - case float64, bool, int, int32, int64, float32: - v := fmt.Sprintf("%v", value) - elen = len(v) - if elen > 0 { - *s += ">" + v - } - case []byte: // NOTE: byte is just an alias for uint8 - // similar to how xml.Marshal handles []byte structure members - if xmlEscapeChars { - ss = escapeChars(string(value.([]byte))) - } else { - ss = string(value.([]byte)) - } - elen = len(ss) - if elen > 0 { - *s += ">" + ss - } - default: - var v []byte - var err error - if doIndent { - v, err = xml.MarshalIndent(value, p.padding, p.indent) - } else { - v, err = xml.Marshal(value) - } - if err != nil { - *s += ">UNKNOWN" - } else { - elen = len(v) - if elen > 0 { - *s += string(v) - } - } - } - isSimple = true - endTag = true - } - if endTag && !noEndTag { - if doIndent { - if !isSimple { - *s += p.padding - } - } - switch value.(type) { - case map[string]interface{}, []byte, string, float64, bool, int, int32, int64, float32: - if elen > 0 || useGoXmlEmptyElemSyntax { - if elen == 0 { - *s += ">" - } - *s += `" - } else { - *s += `/>` - } - } - } else if !noEndTag { - if useGoXmlEmptyElemSyntax { - *s += `" - // *s += ">" - } else { - *s += "/>" - } - } - if doIndent { - if p.cnt > p.start { - *s += "\n" - } - p.Outdent() - } - - return nil -} - -// the element sort implementation - -type keyval struct { - k string - v interface{} -} -type elemListSeq []keyval - -func (e elemListSeq) Len() int { - return len(e) -} - -func (e elemListSeq) Swap(i, j int) { - e[i], e[j] = e[j], e[i] -} - -func (e elemListSeq) Less(i, j int) bool { - var iseq, jseq int - var ok bool - if iseq, ok = e[i].v.(map[string]interface{})["#seq"].(int); !ok { - iseq = 9999999 - } - - if jseq, ok = e[j].v.(map[string]interface{})["#seq"].(int); !ok { - jseq = 9999999 - } - - return iseq <= jseq -} - -// =============== https://groups.google.com/forum/#!topic/golang-nuts/lHPOHD-8qio - -// BeautifyXml (re)formats an XML doc similar to Map.XmlIndent(). -func BeautifyXml(b []byte, prefix, indent string) ([]byte, error) { - x, err := NewMapXmlSeq(b) - if err != nil { - return nil, err - } - return x.XmlSeqIndent(prefix, indent) -} diff --git a/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/davecgh/go-spew/LICENSE deleted file mode 100644 index bc52e96f2b0..00000000000 --- a/vendor/github.com/davecgh/go-spew/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -ISC License - -Copyright (c) 2012-2016 Dave Collins - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/davecgh/go-spew/spew/bypass.go deleted file mode 100644 index 792994785e3..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/bypass.go +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright (c) 2015-2016 Dave Collins -// -// Permission to use, copy, modify, and distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -// NOTE: Due to the following build constraints, this file will only be compiled -// when the code is not running on Google App Engine, compiled by GopherJS, and -// "-tags safe" is not added to the go build command line. The "disableunsafe" -// tag is deprecated and thus should not be used. -// Go versions prior to 1.4 are disabled because they use a different layout -// for interfaces which make the implementation of unsafeReflectValue more complex. -// +build !js,!appengine,!safe,!disableunsafe,go1.4 - -package spew - -import ( - "reflect" - "unsafe" -) - -const ( - // UnsafeDisabled is a build-time constant which specifies whether or - // not access to the unsafe package is available. - UnsafeDisabled = false - - // ptrSize is the size of a pointer on the current arch. - ptrSize = unsafe.Sizeof((*byte)(nil)) -) - -type flag uintptr - -var ( - // flagRO indicates whether the value field of a reflect.Value - // is read-only. - flagRO flag - - // flagAddr indicates whether the address of the reflect.Value's - // value may be taken. - flagAddr flag -) - -// flagKindMask holds the bits that make up the kind -// part of the flags field. In all the supported versions, -// it is in the lower 5 bits. -const flagKindMask = flag(0x1f) - -// Different versions of Go have used different -// bit layouts for the flags type. This table -// records the known combinations. -var okFlags = []struct { - ro, addr flag -}{{ - // From Go 1.4 to 1.5 - ro: 1 << 5, - addr: 1 << 7, -}, { - // Up to Go tip. - ro: 1<<5 | 1<<6, - addr: 1 << 8, -}} - -var flagValOffset = func() uintptr { - field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") - if !ok { - panic("reflect.Value has no flag field") - } - return field.Offset -}() - -// flagField returns a pointer to the flag field of a reflect.Value. -func flagField(v *reflect.Value) *flag { - return (*flag)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + flagValOffset)) -} - -// unsafeReflectValue converts the passed reflect.Value into a one that bypasses -// the typical safety restrictions preventing access to unaddressable and -// unexported data. It works by digging the raw pointer to the underlying -// value out of the protected value and generating a new unprotected (unsafe) -// reflect.Value to it. -// -// This allows us to check for implementations of the Stringer and error -// interfaces to be used for pretty printing ordinarily unaddressable and -// inaccessible values such as unexported struct fields. -func unsafeReflectValue(v reflect.Value) reflect.Value { - if !v.IsValid() || (v.CanInterface() && v.CanAddr()) { - return v - } - flagFieldPtr := flagField(&v) - *flagFieldPtr &^= flagRO - *flagFieldPtr |= flagAddr - return v -} - -// Sanity checks against future reflect package changes -// to the type or semantics of the Value.flag field. -func init() { - field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") - if !ok { - panic("reflect.Value has no flag field") - } - if field.Type.Kind() != reflect.TypeOf(flag(0)).Kind() { - panic("reflect.Value flag field has changed kind") - } - type t0 int - var t struct { - A t0 - // t0 will have flagEmbedRO set. - t0 - // a will have flagStickyRO set - a t0 - } - vA := reflect.ValueOf(t).FieldByName("A") - va := reflect.ValueOf(t).FieldByName("a") - vt0 := reflect.ValueOf(t).FieldByName("t0") - - // Infer flagRO from the difference between the flags - // for the (otherwise identical) fields in t. - flagPublic := *flagField(&vA) - flagWithRO := *flagField(&va) | *flagField(&vt0) - flagRO = flagPublic ^ flagWithRO - - // Infer flagAddr from the difference between a value - // taken from a pointer and not. - vPtrA := reflect.ValueOf(&t).Elem().FieldByName("A") - flagNoPtr := *flagField(&vA) - flagPtr := *flagField(&vPtrA) - flagAddr = flagNoPtr ^ flagPtr - - // Check that the inferred flags tally with one of the known versions. - for _, f := range okFlags { - if flagRO == f.ro && flagAddr == f.addr { - return - } - } - panic("reflect.Value read-only flag has changed semantics") -} diff --git a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go deleted file mode 100644 index 205c28d68c4..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) 2015-2016 Dave Collins -// -// Permission to use, copy, modify, and distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -// NOTE: Due to the following build constraints, this file will only be compiled -// when the code is running on Google App Engine, compiled by GopherJS, or -// "-tags safe" is added to the go build command line. The "disableunsafe" -// tag is deprecated and thus should not be used. -// +build js appengine safe disableunsafe !go1.4 - -package spew - -import "reflect" - -const ( - // UnsafeDisabled is a build-time constant which specifies whether or - // not access to the unsafe package is available. - UnsafeDisabled = true -) - -// unsafeReflectValue typically converts the passed reflect.Value into a one -// that bypasses the typical safety restrictions preventing access to -// unaddressable and unexported data. However, doing this relies on access to -// the unsafe package. This is a stub version which simply returns the passed -// reflect.Value when the unsafe package is not available. -func unsafeReflectValue(v reflect.Value) reflect.Value { - return v -} diff --git a/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/davecgh/go-spew/spew/common.go deleted file mode 100644 index 1be8ce94576..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/common.go +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "io" - "reflect" - "sort" - "strconv" -) - -// Some constants in the form of bytes to avoid string overhead. This mirrors -// the technique used in the fmt package. -var ( - panicBytes = []byte("(PANIC=") - plusBytes = []byte("+") - iBytes = []byte("i") - trueBytes = []byte("true") - falseBytes = []byte("false") - interfaceBytes = []byte("(interface {})") - commaNewlineBytes = []byte(",\n") - newlineBytes = []byte("\n") - openBraceBytes = []byte("{") - openBraceNewlineBytes = []byte("{\n") - closeBraceBytes = []byte("}") - asteriskBytes = []byte("*") - colonBytes = []byte(":") - colonSpaceBytes = []byte(": ") - openParenBytes = []byte("(") - closeParenBytes = []byte(")") - spaceBytes = []byte(" ") - pointerChainBytes = []byte("->") - nilAngleBytes = []byte("") - maxNewlineBytes = []byte("\n") - maxShortBytes = []byte("") - circularBytes = []byte("") - circularShortBytes = []byte("") - invalidAngleBytes = []byte("") - openBracketBytes = []byte("[") - closeBracketBytes = []byte("]") - percentBytes = []byte("%") - precisionBytes = []byte(".") - openAngleBytes = []byte("<") - closeAngleBytes = []byte(">") - openMapBytes = []byte("map[") - closeMapBytes = []byte("]") - lenEqualsBytes = []byte("len=") - capEqualsBytes = []byte("cap=") -) - -// hexDigits is used to map a decimal value to a hex digit. -var hexDigits = "0123456789abcdef" - -// catchPanic handles any panics that might occur during the handleMethods -// calls. -func catchPanic(w io.Writer, v reflect.Value) { - if err := recover(); err != nil { - w.Write(panicBytes) - fmt.Fprintf(w, "%v", err) - w.Write(closeParenBytes) - } -} - -// handleMethods attempts to call the Error and String methods on the underlying -// type the passed reflect.Value represents and outputes the result to Writer w. -// -// It handles panics in any called methods by catching and displaying the error -// as the formatted value. -func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool) { - // We need an interface to check if the type implements the error or - // Stringer interface. However, the reflect package won't give us an - // interface on certain things like unexported struct fields in order - // to enforce visibility rules. We use unsafe, when it's available, - // to bypass these restrictions since this package does not mutate the - // values. - if !v.CanInterface() { - if UnsafeDisabled { - return false - } - - v = unsafeReflectValue(v) - } - - // Choose whether or not to do error and Stringer interface lookups against - // the base type or a pointer to the base type depending on settings. - // Technically calling one of these methods with a pointer receiver can - // mutate the value, however, types which choose to satisify an error or - // Stringer interface with a pointer receiver should not be mutating their - // state inside these interface methods. - if !cs.DisablePointerMethods && !UnsafeDisabled && !v.CanAddr() { - v = unsafeReflectValue(v) - } - if v.CanAddr() { - v = v.Addr() - } - - // Is it an error or Stringer? - switch iface := v.Interface().(type) { - case error: - defer catchPanic(w, v) - if cs.ContinueOnMethod { - w.Write(openParenBytes) - w.Write([]byte(iface.Error())) - w.Write(closeParenBytes) - w.Write(spaceBytes) - return false - } - - w.Write([]byte(iface.Error())) - return true - - case fmt.Stringer: - defer catchPanic(w, v) - if cs.ContinueOnMethod { - w.Write(openParenBytes) - w.Write([]byte(iface.String())) - w.Write(closeParenBytes) - w.Write(spaceBytes) - return false - } - w.Write([]byte(iface.String())) - return true - } - return false -} - -// printBool outputs a boolean value as true or false to Writer w. -func printBool(w io.Writer, val bool) { - if val { - w.Write(trueBytes) - } else { - w.Write(falseBytes) - } -} - -// printInt outputs a signed integer value to Writer w. -func printInt(w io.Writer, val int64, base int) { - w.Write([]byte(strconv.FormatInt(val, base))) -} - -// printUint outputs an unsigned integer value to Writer w. -func printUint(w io.Writer, val uint64, base int) { - w.Write([]byte(strconv.FormatUint(val, base))) -} - -// printFloat outputs a floating point value using the specified precision, -// which is expected to be 32 or 64bit, to Writer w. -func printFloat(w io.Writer, val float64, precision int) { - w.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision))) -} - -// printComplex outputs a complex value using the specified float precision -// for the real and imaginary parts to Writer w. -func printComplex(w io.Writer, c complex128, floatPrecision int) { - r := real(c) - w.Write(openParenBytes) - w.Write([]byte(strconv.FormatFloat(r, 'g', -1, floatPrecision))) - i := imag(c) - if i >= 0 { - w.Write(plusBytes) - } - w.Write([]byte(strconv.FormatFloat(i, 'g', -1, floatPrecision))) - w.Write(iBytes) - w.Write(closeParenBytes) -} - -// printHexPtr outputs a uintptr formatted as hexadecimal with a leading '0x' -// prefix to Writer w. -func printHexPtr(w io.Writer, p uintptr) { - // Null pointer. - num := uint64(p) - if num == 0 { - w.Write(nilAngleBytes) - return - } - - // Max uint64 is 16 bytes in hex + 2 bytes for '0x' prefix - buf := make([]byte, 18) - - // It's simpler to construct the hex string right to left. - base := uint64(16) - i := len(buf) - 1 - for num >= base { - buf[i] = hexDigits[num%base] - num /= base - i-- - } - buf[i] = hexDigits[num] - - // Add '0x' prefix. - i-- - buf[i] = 'x' - i-- - buf[i] = '0' - - // Strip unused leading bytes. - buf = buf[i:] - w.Write(buf) -} - -// valuesSorter implements sort.Interface to allow a slice of reflect.Value -// elements to be sorted. -type valuesSorter struct { - values []reflect.Value - strings []string // either nil or same len and values - cs *ConfigState -} - -// newValuesSorter initializes a valuesSorter instance, which holds a set of -// surrogate keys on which the data should be sorted. It uses flags in -// ConfigState to decide if and how to populate those surrogate keys. -func newValuesSorter(values []reflect.Value, cs *ConfigState) sort.Interface { - vs := &valuesSorter{values: values, cs: cs} - if canSortSimply(vs.values[0].Kind()) { - return vs - } - if !cs.DisableMethods { - vs.strings = make([]string, len(values)) - for i := range vs.values { - b := bytes.Buffer{} - if !handleMethods(cs, &b, vs.values[i]) { - vs.strings = nil - break - } - vs.strings[i] = b.String() - } - } - if vs.strings == nil && cs.SpewKeys { - vs.strings = make([]string, len(values)) - for i := range vs.values { - vs.strings[i] = Sprintf("%#v", vs.values[i].Interface()) - } - } - return vs -} - -// canSortSimply tests whether a reflect.Kind is a primitive that can be sorted -// directly, or whether it should be considered for sorting by surrogate keys -// (if the ConfigState allows it). -func canSortSimply(kind reflect.Kind) bool { - // This switch parallels valueSortLess, except for the default case. - switch kind { - case reflect.Bool: - return true - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return true - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - return true - case reflect.Float32, reflect.Float64: - return true - case reflect.String: - return true - case reflect.Uintptr: - return true - case reflect.Array: - return true - } - return false -} - -// Len returns the number of values in the slice. It is part of the -// sort.Interface implementation. -func (s *valuesSorter) Len() int { - return len(s.values) -} - -// Swap swaps the values at the passed indices. It is part of the -// sort.Interface implementation. -func (s *valuesSorter) Swap(i, j int) { - s.values[i], s.values[j] = s.values[j], s.values[i] - if s.strings != nil { - s.strings[i], s.strings[j] = s.strings[j], s.strings[i] - } -} - -// valueSortLess returns whether the first value should sort before the second -// value. It is used by valueSorter.Less as part of the sort.Interface -// implementation. -func valueSortLess(a, b reflect.Value) bool { - switch a.Kind() { - case reflect.Bool: - return !a.Bool() && b.Bool() - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return a.Int() < b.Int() - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - return a.Uint() < b.Uint() - case reflect.Float32, reflect.Float64: - return a.Float() < b.Float() - case reflect.String: - return a.String() < b.String() - case reflect.Uintptr: - return a.Uint() < b.Uint() - case reflect.Array: - // Compare the contents of both arrays. - l := a.Len() - for i := 0; i < l; i++ { - av := a.Index(i) - bv := b.Index(i) - if av.Interface() == bv.Interface() { - continue - } - return valueSortLess(av, bv) - } - } - return a.String() < b.String() -} - -// Less returns whether the value at index i should sort before the -// value at index j. It is part of the sort.Interface implementation. -func (s *valuesSorter) Less(i, j int) bool { - if s.strings == nil { - return valueSortLess(s.values[i], s.values[j]) - } - return s.strings[i] < s.strings[j] -} - -// sortValues is a sort function that handles both native types and any type that -// can be converted to error or Stringer. Other inputs are sorted according to -// their Value.String() value to ensure display stability. -func sortValues(values []reflect.Value, cs *ConfigState) { - if len(values) == 0 { - return - } - sort.Sort(newValuesSorter(values, cs)) -} diff --git a/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/davecgh/go-spew/spew/config.go deleted file mode 100644 index 2e3d22f3120..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/config.go +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "io" - "os" -) - -// ConfigState houses the configuration options used by spew to format and -// display values. There is a global instance, Config, that is used to control -// all top-level Formatter and Dump functionality. Each ConfigState instance -// provides methods equivalent to the top-level functions. -// -// The zero value for ConfigState provides no indentation. You would typically -// want to set it to a space or a tab. -// -// Alternatively, you can use NewDefaultConfig to get a ConfigState instance -// with default settings. See the documentation of NewDefaultConfig for default -// values. -type ConfigState struct { - // Indent specifies the string to use for each indentation level. The - // global config instance that all top-level functions use set this to a - // single space by default. If you would like more indentation, you might - // set this to a tab with "\t" or perhaps two spaces with " ". - Indent string - - // MaxDepth controls the maximum number of levels to descend into nested - // data structures. The default, 0, means there is no limit. - // - // NOTE: Circular data structures are properly detected, so it is not - // necessary to set this value unless you specifically want to limit deeply - // nested data structures. - MaxDepth int - - // DisableMethods specifies whether or not error and Stringer interfaces are - // invoked for types that implement them. - DisableMethods bool - - // DisablePointerMethods specifies whether or not to check for and invoke - // error and Stringer interfaces on types which only accept a pointer - // receiver when the current type is not a pointer. - // - // NOTE: This might be an unsafe action since calling one of these methods - // with a pointer receiver could technically mutate the value, however, - // in practice, types which choose to satisify an error or Stringer - // interface with a pointer receiver should not be mutating their state - // inside these interface methods. As a result, this option relies on - // access to the unsafe package, so it will not have any effect when - // running in environments without access to the unsafe package such as - // Google App Engine or with the "safe" build tag specified. - DisablePointerMethods bool - - // DisablePointerAddresses specifies whether to disable the printing of - // pointer addresses. This is useful when diffing data structures in tests. - DisablePointerAddresses bool - - // DisableCapacities specifies whether to disable the printing of capacities - // for arrays, slices, maps and channels. This is useful when diffing - // data structures in tests. - DisableCapacities bool - - // ContinueOnMethod specifies whether or not recursion should continue once - // a custom error or Stringer interface is invoked. The default, false, - // means it will print the results of invoking the custom error or Stringer - // interface and return immediately instead of continuing to recurse into - // the internals of the data type. - // - // NOTE: This flag does not have any effect if method invocation is disabled - // via the DisableMethods or DisablePointerMethods options. - ContinueOnMethod bool - - // SortKeys specifies map keys should be sorted before being printed. Use - // this to have a more deterministic, diffable output. Note that only - // native types (bool, int, uint, floats, uintptr and string) and types - // that support the error or Stringer interfaces (if methods are - // enabled) are supported, with other types sorted according to the - // reflect.Value.String() output which guarantees display stability. - SortKeys bool - - // SpewKeys specifies that, as a last resort attempt, map keys should - // be spewed to strings and sorted by those strings. This is only - // considered if SortKeys is true. - SpewKeys bool -} - -// Config is the active configuration of the top-level functions. -// The configuration can be changed by modifying the contents of spew.Config. -var Config = ConfigState{Indent: " "} - -// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the formatted string as a value that satisfies error. See NewFormatter -// for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Errorf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Errorf(format string, a ...interface{}) (err error) { - return fmt.Errorf(format, c.convertArgs(a)...) -} - -// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprint(w, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprint(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprint(w, c.convertArgs(a)...) -} - -// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintf(w, format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - return fmt.Fprintf(w, format, c.convertArgs(a)...) -} - -// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it -// passed with a Formatter interface returned by c.NewFormatter. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintln(w, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprintln(w, c.convertArgs(a)...) -} - -// Print is a wrapper for fmt.Print that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Print(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Print(a ...interface{}) (n int, err error) { - return fmt.Print(c.convertArgs(a)...) -} - -// Printf is a wrapper for fmt.Printf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Printf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Printf(format string, a ...interface{}) (n int, err error) { - return fmt.Printf(format, c.convertArgs(a)...) -} - -// Println is a wrapper for fmt.Println that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Println(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Println(a ...interface{}) (n int, err error) { - return fmt.Println(c.convertArgs(a)...) -} - -// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprint(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprint(a ...interface{}) string { - return fmt.Sprint(c.convertArgs(a)...) -} - -// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprintf(format string, a ...interface{}) string { - return fmt.Sprintf(format, c.convertArgs(a)...) -} - -// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it -// were passed with a Formatter interface returned by c.NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintln(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprintln(a ...interface{}) string { - return fmt.Sprintln(c.convertArgs(a)...) -} - -/* -NewFormatter returns a custom formatter that satisfies the fmt.Formatter -interface. As a result, it integrates cleanly with standard fmt package -printing functions. The formatter is useful for inline printing of smaller data -types similar to the standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), and %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Typically this function shouldn't be called directly. It is much easier to make -use of the custom formatter by calling one of the convenience functions such as -c.Printf, c.Println, or c.Printf. -*/ -func (c *ConfigState) NewFormatter(v interface{}) fmt.Formatter { - return newFormatter(c, v) -} - -// Fdump formats and displays the passed arguments to io.Writer w. It formats -// exactly the same as Dump. -func (c *ConfigState) Fdump(w io.Writer, a ...interface{}) { - fdump(c, w, a...) -} - -/* -Dump displays the passed parameters to standard out with newlines, customizable -indentation, and additional debug information such as complete types and all -pointer addresses used to indirect to the final value. It provides the -following features over the built-in printing facilities provided by the fmt -package: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output - -The configuration options are controlled by modifying the public members -of c. See ConfigState for options documentation. - -See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to -get the formatted result as a string. -*/ -func (c *ConfigState) Dump(a ...interface{}) { - fdump(c, os.Stdout, a...) -} - -// Sdump returns a string with the passed arguments formatted exactly the same -// as Dump. -func (c *ConfigState) Sdump(a ...interface{}) string { - var buf bytes.Buffer - fdump(c, &buf, a...) - return buf.String() -} - -// convertArgs accepts a slice of arguments and returns a slice of the same -// length with each argument converted to a spew Formatter interface using -// the ConfigState associated with s. -func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) { - formatters = make([]interface{}, len(args)) - for index, arg := range args { - formatters[index] = newFormatter(c, arg) - } - return formatters -} - -// NewDefaultConfig returns a ConfigState with the following default settings. -// -// Indent: " " -// MaxDepth: 0 -// DisableMethods: false -// DisablePointerMethods: false -// ContinueOnMethod: false -// SortKeys: false -func NewDefaultConfig() *ConfigState { - return &ConfigState{Indent: " "} -} diff --git a/vendor/github.com/davecgh/go-spew/spew/doc.go b/vendor/github.com/davecgh/go-spew/spew/doc.go deleted file mode 100644 index aacaac6f1e1..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/doc.go +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -/* -Package spew implements a deep pretty printer for Go data structures to aid in -debugging. - -A quick overview of the additional features spew provides over the built-in -printing facilities for Go data types are as follows: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output (only when using - Dump style) - -There are two different approaches spew allows for dumping Go data structures: - - * Dump style which prints with newlines, customizable indentation, - and additional debug information such as types and all pointer addresses - used to indirect to the final value - * A custom Formatter interface that integrates cleanly with the standard fmt - package and replaces %v, %+v, %#v, and %#+v to provide inline printing - similar to the default %v while providing the additional functionality - outlined above and passing unsupported format verbs such as %x and %q - along to fmt - -Quick Start - -This section demonstrates how to quickly get started with spew. See the -sections below for further details on formatting and configuration options. - -To dump a variable with full newlines, indentation, type, and pointer -information use Dump, Fdump, or Sdump: - spew.Dump(myVar1, myVar2, ...) - spew.Fdump(someWriter, myVar1, myVar2, ...) - str := spew.Sdump(myVar1, myVar2, ...) - -Alternatively, if you would prefer to use format strings with a compacted inline -printing style, use the convenience wrappers Printf, Fprintf, etc with -%v (most compact), %+v (adds pointer addresses), %#v (adds types), or -%#+v (adds types and pointer addresses): - spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - -Configuration Options - -Configuration of spew is handled by fields in the ConfigState type. For -convenience, all of the top-level functions use a global state available -via the spew.Config global. - -It is also possible to create a ConfigState instance that provides methods -equivalent to the top-level functions. This allows concurrent configuration -options. See the ConfigState documentation for more details. - -The following configuration options are available: - * Indent - String to use for each indentation level for Dump functions. - It is a single space by default. A popular alternative is "\t". - - * MaxDepth - Maximum number of levels to descend into nested data structures. - There is no limit by default. - - * DisableMethods - Disables invocation of error and Stringer interface methods. - Method invocation is enabled by default. - - * DisablePointerMethods - Disables invocation of error and Stringer interface methods on types - which only accept pointer receivers from non-pointer variables. - Pointer method invocation is enabled by default. - - * DisablePointerAddresses - DisablePointerAddresses specifies whether to disable the printing of - pointer addresses. This is useful when diffing data structures in tests. - - * DisableCapacities - DisableCapacities specifies whether to disable the printing of - capacities for arrays, slices, maps and channels. This is useful when - diffing data structures in tests. - - * ContinueOnMethod - Enables recursion into types after invoking error and Stringer interface - methods. Recursion after method invocation is disabled by default. - - * SortKeys - Specifies map keys should be sorted before being printed. Use - this to have a more deterministic, diffable output. Note that - only native types (bool, int, uint, floats, uintptr and string) - and types which implement error or Stringer interfaces are - supported with other types sorted according to the - reflect.Value.String() output which guarantees display - stability. Natural map order is used by default. - - * SpewKeys - Specifies that, as a last resort attempt, map keys should be - spewed to strings and sorted by those strings. This is only - considered if SortKeys is true. - -Dump Usage - -Simply call spew.Dump with a list of variables you want to dump: - - spew.Dump(myVar1, myVar2, ...) - -You may also call spew.Fdump if you would prefer to output to an arbitrary -io.Writer. For example, to dump to standard error: - - spew.Fdump(os.Stderr, myVar1, myVar2, ...) - -A third option is to call spew.Sdump to get the formatted output as a string: - - str := spew.Sdump(myVar1, myVar2, ...) - -Sample Dump Output - -See the Dump example for details on the setup of the types and variables being -shown here. - - (main.Foo) { - unexportedField: (*main.Bar)(0xf84002e210)({ - flag: (main.Flag) flagTwo, - data: (uintptr) - }), - ExportedField: (map[interface {}]interface {}) (len=1) { - (string) (len=3) "one": (bool) true - } - } - -Byte (and uint8) arrays and slices are displayed uniquely like the hexdump -C -command as shown. - ([]uint8) (len=32 cap=32) { - 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... | - 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0| - 00000020 31 32 |12| - } - -Custom Formatter - -Spew provides a custom formatter that implements the fmt.Formatter interface -so that it integrates cleanly with standard fmt package printing functions. The -formatter is useful for inline printing of smaller data types similar to the -standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Custom Formatter Usage - -The simplest way to make use of the spew custom formatter is to call one of the -convenience functions such as spew.Printf, spew.Println, or spew.Printf. The -functions have syntax you are most likely already familiar with: - - spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - spew.Println(myVar, myVar2) - spew.Fprintf(os.Stderr, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Fprintf(os.Stderr, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - -See the Index for the full list convenience functions. - -Sample Formatter Output - -Double pointer to a uint8: - %v: <**>5 - %+v: <**>(0xf8400420d0->0xf8400420c8)5 - %#v: (**uint8)5 - %#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5 - -Pointer to circular struct with a uint8 field and a pointer to itself: - %v: <*>{1 <*>} - %+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)} - %#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)} - %#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)} - -See the Printf example for details on the setup of variables being shown -here. - -Errors - -Since it is possible for custom Stringer/error interfaces to panic, spew -detects them and handles them internally by printing the panic information -inline with the output. Since spew is intended to provide deep pretty printing -capabilities on structures, it intentionally does not return any errors. -*/ -package spew diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go deleted file mode 100644 index f78d89fc1f6..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/dump.go +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "encoding/hex" - "fmt" - "io" - "os" - "reflect" - "regexp" - "strconv" - "strings" -) - -var ( - // uint8Type is a reflect.Type representing a uint8. It is used to - // convert cgo types to uint8 slices for hexdumping. - uint8Type = reflect.TypeOf(uint8(0)) - - // cCharRE is a regular expression that matches a cgo char. - // It is used to detect character arrays to hexdump them. - cCharRE = regexp.MustCompile(`^.*\._Ctype_char$`) - - // cUnsignedCharRE is a regular expression that matches a cgo unsigned - // char. It is used to detect unsigned character arrays to hexdump - // them. - cUnsignedCharRE = regexp.MustCompile(`^.*\._Ctype_unsignedchar$`) - - // cUint8tCharRE is a regular expression that matches a cgo uint8_t. - // It is used to detect uint8_t arrays to hexdump them. - cUint8tCharRE = regexp.MustCompile(`^.*\._Ctype_uint8_t$`) -) - -// dumpState contains information about the state of a dump operation. -type dumpState struct { - w io.Writer - depth int - pointers map[uintptr]int - ignoreNextType bool - ignoreNextIndent bool - cs *ConfigState -} - -// indent performs indentation according to the depth level and cs.Indent -// option. -func (d *dumpState) indent() { - if d.ignoreNextIndent { - d.ignoreNextIndent = false - return - } - d.w.Write(bytes.Repeat([]byte(d.cs.Indent), d.depth)) -} - -// unpackValue returns values inside of non-nil interfaces when possible. -// This is useful for data types like structs, arrays, slices, and maps which -// can contain varying types packed inside an interface. -func (d *dumpState) unpackValue(v reflect.Value) reflect.Value { - if v.Kind() == reflect.Interface && !v.IsNil() { - v = v.Elem() - } - return v -} - -// dumpPtr handles formatting of pointers by indirecting them as necessary. -func (d *dumpState) dumpPtr(v reflect.Value) { - // Remove pointers at or below the current depth from map used to detect - // circular refs. - for k, depth := range d.pointers { - if depth >= d.depth { - delete(d.pointers, k) - } - } - - // Keep list of all dereferenced pointers to show later. - pointerChain := make([]uintptr, 0) - - // Figure out how many levels of indirection there are by dereferencing - // pointers and unpacking interfaces down the chain while detecting circular - // references. - nilFound := false - cycleFound := false - indirects := 0 - ve := v - for ve.Kind() == reflect.Ptr { - if ve.IsNil() { - nilFound = true - break - } - indirects++ - addr := ve.Pointer() - pointerChain = append(pointerChain, addr) - if pd, ok := d.pointers[addr]; ok && pd < d.depth { - cycleFound = true - indirects-- - break - } - d.pointers[addr] = d.depth - - ve = ve.Elem() - if ve.Kind() == reflect.Interface { - if ve.IsNil() { - nilFound = true - break - } - ve = ve.Elem() - } - } - - // Display type information. - d.w.Write(openParenBytes) - d.w.Write(bytes.Repeat(asteriskBytes, indirects)) - d.w.Write([]byte(ve.Type().String())) - d.w.Write(closeParenBytes) - - // Display pointer information. - if !d.cs.DisablePointerAddresses && len(pointerChain) > 0 { - d.w.Write(openParenBytes) - for i, addr := range pointerChain { - if i > 0 { - d.w.Write(pointerChainBytes) - } - printHexPtr(d.w, addr) - } - d.w.Write(closeParenBytes) - } - - // Display dereferenced value. - d.w.Write(openParenBytes) - switch { - case nilFound: - d.w.Write(nilAngleBytes) - - case cycleFound: - d.w.Write(circularBytes) - - default: - d.ignoreNextType = true - d.dump(ve) - } - d.w.Write(closeParenBytes) -} - -// dumpSlice handles formatting of arrays and slices. Byte (uint8 under -// reflection) arrays and slices are dumped in hexdump -C fashion. -func (d *dumpState) dumpSlice(v reflect.Value) { - // Determine whether this type should be hex dumped or not. Also, - // for types which should be hexdumped, try to use the underlying data - // first, then fall back to trying to convert them to a uint8 slice. - var buf []uint8 - doConvert := false - doHexDump := false - numEntries := v.Len() - if numEntries > 0 { - vt := v.Index(0).Type() - vts := vt.String() - switch { - // C types that need to be converted. - case cCharRE.MatchString(vts): - fallthrough - case cUnsignedCharRE.MatchString(vts): - fallthrough - case cUint8tCharRE.MatchString(vts): - doConvert = true - - // Try to use existing uint8 slices and fall back to converting - // and copying if that fails. - case vt.Kind() == reflect.Uint8: - // We need an addressable interface to convert the type - // to a byte slice. However, the reflect package won't - // give us an interface on certain things like - // unexported struct fields in order to enforce - // visibility rules. We use unsafe, when available, to - // bypass these restrictions since this package does not - // mutate the values. - vs := v - if !vs.CanInterface() || !vs.CanAddr() { - vs = unsafeReflectValue(vs) - } - if !UnsafeDisabled { - vs = vs.Slice(0, numEntries) - - // Use the existing uint8 slice if it can be - // type asserted. - iface := vs.Interface() - if slice, ok := iface.([]uint8); ok { - buf = slice - doHexDump = true - break - } - } - - // The underlying data needs to be converted if it can't - // be type asserted to a uint8 slice. - doConvert = true - } - - // Copy and convert the underlying type if needed. - if doConvert && vt.ConvertibleTo(uint8Type) { - // Convert and copy each element into a uint8 byte - // slice. - buf = make([]uint8, numEntries) - for i := 0; i < numEntries; i++ { - vv := v.Index(i) - buf[i] = uint8(vv.Convert(uint8Type).Uint()) - } - doHexDump = true - } - } - - // Hexdump the entire slice as needed. - if doHexDump { - indent := strings.Repeat(d.cs.Indent, d.depth) - str := indent + hex.Dump(buf) - str = strings.Replace(str, "\n", "\n"+indent, -1) - str = strings.TrimRight(str, d.cs.Indent) - d.w.Write([]byte(str)) - return - } - - // Recursively call dump for each item. - for i := 0; i < numEntries; i++ { - d.dump(d.unpackValue(v.Index(i))) - if i < (numEntries - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } -} - -// dump is the main workhorse for dumping a value. It uses the passed reflect -// value to figure out what kind of object we are dealing with and formats it -// appropriately. It is a recursive function, however circular data structures -// are detected and handled properly. -func (d *dumpState) dump(v reflect.Value) { - // Handle invalid reflect values immediately. - kind := v.Kind() - if kind == reflect.Invalid { - d.w.Write(invalidAngleBytes) - return - } - - // Handle pointers specially. - if kind == reflect.Ptr { - d.indent() - d.dumpPtr(v) - return - } - - // Print type information unless already handled elsewhere. - if !d.ignoreNextType { - d.indent() - d.w.Write(openParenBytes) - d.w.Write([]byte(v.Type().String())) - d.w.Write(closeParenBytes) - d.w.Write(spaceBytes) - } - d.ignoreNextType = false - - // Display length and capacity if the built-in len and cap functions - // work with the value's kind and the len/cap itself is non-zero. - valueLen, valueCap := 0, 0 - switch v.Kind() { - case reflect.Array, reflect.Slice, reflect.Chan: - valueLen, valueCap = v.Len(), v.Cap() - case reflect.Map, reflect.String: - valueLen = v.Len() - } - if valueLen != 0 || !d.cs.DisableCapacities && valueCap != 0 { - d.w.Write(openParenBytes) - if valueLen != 0 { - d.w.Write(lenEqualsBytes) - printInt(d.w, int64(valueLen), 10) - } - if !d.cs.DisableCapacities && valueCap != 0 { - if valueLen != 0 { - d.w.Write(spaceBytes) - } - d.w.Write(capEqualsBytes) - printInt(d.w, int64(valueCap), 10) - } - d.w.Write(closeParenBytes) - d.w.Write(spaceBytes) - } - - // Call Stringer/error interfaces if they exist and the handle methods flag - // is enabled - if !d.cs.DisableMethods { - if (kind != reflect.Invalid) && (kind != reflect.Interface) { - if handled := handleMethods(d.cs, d.w, v); handled { - return - } - } - } - - switch kind { - case reflect.Invalid: - // Do nothing. We should never get here since invalid has already - // been handled above. - - case reflect.Bool: - printBool(d.w, v.Bool()) - - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - printInt(d.w, v.Int(), 10) - - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - printUint(d.w, v.Uint(), 10) - - case reflect.Float32: - printFloat(d.w, v.Float(), 32) - - case reflect.Float64: - printFloat(d.w, v.Float(), 64) - - case reflect.Complex64: - printComplex(d.w, v.Complex(), 32) - - case reflect.Complex128: - printComplex(d.w, v.Complex(), 64) - - case reflect.Slice: - if v.IsNil() { - d.w.Write(nilAngleBytes) - break - } - fallthrough - - case reflect.Array: - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - d.dumpSlice(v) - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.String: - d.w.Write([]byte(strconv.Quote(v.String()))) - - case reflect.Interface: - // The only time we should get here is for nil interfaces due to - // unpackValue calls. - if v.IsNil() { - d.w.Write(nilAngleBytes) - } - - case reflect.Ptr: - // Do nothing. We should never get here since pointers have already - // been handled above. - - case reflect.Map: - // nil maps should be indicated as different than empty maps - if v.IsNil() { - d.w.Write(nilAngleBytes) - break - } - - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - numEntries := v.Len() - keys := v.MapKeys() - if d.cs.SortKeys { - sortValues(keys, d.cs) - } - for i, key := range keys { - d.dump(d.unpackValue(key)) - d.w.Write(colonSpaceBytes) - d.ignoreNextIndent = true - d.dump(d.unpackValue(v.MapIndex(key))) - if i < (numEntries - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.Struct: - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - vt := v.Type() - numFields := v.NumField() - for i := 0; i < numFields; i++ { - d.indent() - vtf := vt.Field(i) - d.w.Write([]byte(vtf.Name)) - d.w.Write(colonSpaceBytes) - d.ignoreNextIndent = true - d.dump(d.unpackValue(v.Field(i))) - if i < (numFields - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.Uintptr: - printHexPtr(d.w, uintptr(v.Uint())) - - case reflect.UnsafePointer, reflect.Chan, reflect.Func: - printHexPtr(d.w, v.Pointer()) - - // There were not any other types at the time this code was written, but - // fall back to letting the default fmt package handle it in case any new - // types are added. - default: - if v.CanInterface() { - fmt.Fprintf(d.w, "%v", v.Interface()) - } else { - fmt.Fprintf(d.w, "%v", v.String()) - } - } -} - -// fdump is a helper function to consolidate the logic from the various public -// methods which take varying writers and config states. -func fdump(cs *ConfigState, w io.Writer, a ...interface{}) { - for _, arg := range a { - if arg == nil { - w.Write(interfaceBytes) - w.Write(spaceBytes) - w.Write(nilAngleBytes) - w.Write(newlineBytes) - continue - } - - d := dumpState{w: w, cs: cs} - d.pointers = make(map[uintptr]int) - d.dump(reflect.ValueOf(arg)) - d.w.Write(newlineBytes) - } -} - -// Fdump formats and displays the passed arguments to io.Writer w. It formats -// exactly the same as Dump. -func Fdump(w io.Writer, a ...interface{}) { - fdump(&Config, w, a...) -} - -// Sdump returns a string with the passed arguments formatted exactly the same -// as Dump. -func Sdump(a ...interface{}) string { - var buf bytes.Buffer - fdump(&Config, &buf, a...) - return buf.String() -} - -/* -Dump displays the passed parameters to standard out with newlines, customizable -indentation, and additional debug information such as complete types and all -pointer addresses used to indirect to the final value. It provides the -following features over the built-in printing facilities provided by the fmt -package: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output - -The configuration options are controlled by an exported package global, -spew.Config. See ConfigState for options documentation. - -See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to -get the formatted result as a string. -*/ -func Dump(a ...interface{}) { - fdump(&Config, os.Stdout, a...) -} diff --git a/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/davecgh/go-spew/spew/format.go deleted file mode 100644 index b04edb7d7ac..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/format.go +++ /dev/null @@ -1,419 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "reflect" - "strconv" - "strings" -) - -// supportedFlags is a list of all the character flags supported by fmt package. -const supportedFlags = "0-+# " - -// formatState implements the fmt.Formatter interface and contains information -// about the state of a formatting operation. The NewFormatter function can -// be used to get a new Formatter which can be used directly as arguments -// in standard fmt package printing calls. -type formatState struct { - value interface{} - fs fmt.State - depth int - pointers map[uintptr]int - ignoreNextType bool - cs *ConfigState -} - -// buildDefaultFormat recreates the original format string without precision -// and width information to pass in to fmt.Sprintf in the case of an -// unrecognized type. Unless new types are added to the language, this -// function won't ever be called. -func (f *formatState) buildDefaultFormat() (format string) { - buf := bytes.NewBuffer(percentBytes) - - for _, flag := range supportedFlags { - if f.fs.Flag(int(flag)) { - buf.WriteRune(flag) - } - } - - buf.WriteRune('v') - - format = buf.String() - return format -} - -// constructOrigFormat recreates the original format string including precision -// and width information to pass along to the standard fmt package. This allows -// automatic deferral of all format strings this package doesn't support. -func (f *formatState) constructOrigFormat(verb rune) (format string) { - buf := bytes.NewBuffer(percentBytes) - - for _, flag := range supportedFlags { - if f.fs.Flag(int(flag)) { - buf.WriteRune(flag) - } - } - - if width, ok := f.fs.Width(); ok { - buf.WriteString(strconv.Itoa(width)) - } - - if precision, ok := f.fs.Precision(); ok { - buf.Write(precisionBytes) - buf.WriteString(strconv.Itoa(precision)) - } - - buf.WriteRune(verb) - - format = buf.String() - return format -} - -// unpackValue returns values inside of non-nil interfaces when possible and -// ensures that types for values which have been unpacked from an interface -// are displayed when the show types flag is also set. -// This is useful for data types like structs, arrays, slices, and maps which -// can contain varying types packed inside an interface. -func (f *formatState) unpackValue(v reflect.Value) reflect.Value { - if v.Kind() == reflect.Interface { - f.ignoreNextType = false - if !v.IsNil() { - v = v.Elem() - } - } - return v -} - -// formatPtr handles formatting of pointers by indirecting them as necessary. -func (f *formatState) formatPtr(v reflect.Value) { - // Display nil if top level pointer is nil. - showTypes := f.fs.Flag('#') - if v.IsNil() && (!showTypes || f.ignoreNextType) { - f.fs.Write(nilAngleBytes) - return - } - - // Remove pointers at or below the current depth from map used to detect - // circular refs. - for k, depth := range f.pointers { - if depth >= f.depth { - delete(f.pointers, k) - } - } - - // Keep list of all dereferenced pointers to possibly show later. - pointerChain := make([]uintptr, 0) - - // Figure out how many levels of indirection there are by derferencing - // pointers and unpacking interfaces down the chain while detecting circular - // references. - nilFound := false - cycleFound := false - indirects := 0 - ve := v - for ve.Kind() == reflect.Ptr { - if ve.IsNil() { - nilFound = true - break - } - indirects++ - addr := ve.Pointer() - pointerChain = append(pointerChain, addr) - if pd, ok := f.pointers[addr]; ok && pd < f.depth { - cycleFound = true - indirects-- - break - } - f.pointers[addr] = f.depth - - ve = ve.Elem() - if ve.Kind() == reflect.Interface { - if ve.IsNil() { - nilFound = true - break - } - ve = ve.Elem() - } - } - - // Display type or indirection level depending on flags. - if showTypes && !f.ignoreNextType { - f.fs.Write(openParenBytes) - f.fs.Write(bytes.Repeat(asteriskBytes, indirects)) - f.fs.Write([]byte(ve.Type().String())) - f.fs.Write(closeParenBytes) - } else { - if nilFound || cycleFound { - indirects += strings.Count(ve.Type().String(), "*") - } - f.fs.Write(openAngleBytes) - f.fs.Write([]byte(strings.Repeat("*", indirects))) - f.fs.Write(closeAngleBytes) - } - - // Display pointer information depending on flags. - if f.fs.Flag('+') && (len(pointerChain) > 0) { - f.fs.Write(openParenBytes) - for i, addr := range pointerChain { - if i > 0 { - f.fs.Write(pointerChainBytes) - } - printHexPtr(f.fs, addr) - } - f.fs.Write(closeParenBytes) - } - - // Display dereferenced value. - switch { - case nilFound: - f.fs.Write(nilAngleBytes) - - case cycleFound: - f.fs.Write(circularShortBytes) - - default: - f.ignoreNextType = true - f.format(ve) - } -} - -// format is the main workhorse for providing the Formatter interface. It -// uses the passed reflect value to figure out what kind of object we are -// dealing with and formats it appropriately. It is a recursive function, -// however circular data structures are detected and handled properly. -func (f *formatState) format(v reflect.Value) { - // Handle invalid reflect values immediately. - kind := v.Kind() - if kind == reflect.Invalid { - f.fs.Write(invalidAngleBytes) - return - } - - // Handle pointers specially. - if kind == reflect.Ptr { - f.formatPtr(v) - return - } - - // Print type information unless already handled elsewhere. - if !f.ignoreNextType && f.fs.Flag('#') { - f.fs.Write(openParenBytes) - f.fs.Write([]byte(v.Type().String())) - f.fs.Write(closeParenBytes) - } - f.ignoreNextType = false - - // Call Stringer/error interfaces if they exist and the handle methods - // flag is enabled. - if !f.cs.DisableMethods { - if (kind != reflect.Invalid) && (kind != reflect.Interface) { - if handled := handleMethods(f.cs, f.fs, v); handled { - return - } - } - } - - switch kind { - case reflect.Invalid: - // Do nothing. We should never get here since invalid has already - // been handled above. - - case reflect.Bool: - printBool(f.fs, v.Bool()) - - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - printInt(f.fs, v.Int(), 10) - - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - printUint(f.fs, v.Uint(), 10) - - case reflect.Float32: - printFloat(f.fs, v.Float(), 32) - - case reflect.Float64: - printFloat(f.fs, v.Float(), 64) - - case reflect.Complex64: - printComplex(f.fs, v.Complex(), 32) - - case reflect.Complex128: - printComplex(f.fs, v.Complex(), 64) - - case reflect.Slice: - if v.IsNil() { - f.fs.Write(nilAngleBytes) - break - } - fallthrough - - case reflect.Array: - f.fs.Write(openBracketBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - numEntries := v.Len() - for i := 0; i < numEntries; i++ { - if i > 0 { - f.fs.Write(spaceBytes) - } - f.ignoreNextType = true - f.format(f.unpackValue(v.Index(i))) - } - } - f.depth-- - f.fs.Write(closeBracketBytes) - - case reflect.String: - f.fs.Write([]byte(v.String())) - - case reflect.Interface: - // The only time we should get here is for nil interfaces due to - // unpackValue calls. - if v.IsNil() { - f.fs.Write(nilAngleBytes) - } - - case reflect.Ptr: - // Do nothing. We should never get here since pointers have already - // been handled above. - - case reflect.Map: - // nil maps should be indicated as different than empty maps - if v.IsNil() { - f.fs.Write(nilAngleBytes) - break - } - - f.fs.Write(openMapBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - keys := v.MapKeys() - if f.cs.SortKeys { - sortValues(keys, f.cs) - } - for i, key := range keys { - if i > 0 { - f.fs.Write(spaceBytes) - } - f.ignoreNextType = true - f.format(f.unpackValue(key)) - f.fs.Write(colonBytes) - f.ignoreNextType = true - f.format(f.unpackValue(v.MapIndex(key))) - } - } - f.depth-- - f.fs.Write(closeMapBytes) - - case reflect.Struct: - numFields := v.NumField() - f.fs.Write(openBraceBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - vt := v.Type() - for i := 0; i < numFields; i++ { - if i > 0 { - f.fs.Write(spaceBytes) - } - vtf := vt.Field(i) - if f.fs.Flag('+') || f.fs.Flag('#') { - f.fs.Write([]byte(vtf.Name)) - f.fs.Write(colonBytes) - } - f.format(f.unpackValue(v.Field(i))) - } - } - f.depth-- - f.fs.Write(closeBraceBytes) - - case reflect.Uintptr: - printHexPtr(f.fs, uintptr(v.Uint())) - - case reflect.UnsafePointer, reflect.Chan, reflect.Func: - printHexPtr(f.fs, v.Pointer()) - - // There were not any other types at the time this code was written, but - // fall back to letting the default fmt package handle it if any get added. - default: - format := f.buildDefaultFormat() - if v.CanInterface() { - fmt.Fprintf(f.fs, format, v.Interface()) - } else { - fmt.Fprintf(f.fs, format, v.String()) - } - } -} - -// Format satisfies the fmt.Formatter interface. See NewFormatter for usage -// details. -func (f *formatState) Format(fs fmt.State, verb rune) { - f.fs = fs - - // Use standard formatting for verbs that are not v. - if verb != 'v' { - format := f.constructOrigFormat(verb) - fmt.Fprintf(fs, format, f.value) - return - } - - if f.value == nil { - if fs.Flag('#') { - fs.Write(interfaceBytes) - } - fs.Write(nilAngleBytes) - return - } - - f.format(reflect.ValueOf(f.value)) -} - -// newFormatter is a helper function to consolidate the logic from the various -// public methods which take varying config states. -func newFormatter(cs *ConfigState, v interface{}) fmt.Formatter { - fs := &formatState{value: v, cs: cs} - fs.pointers = make(map[uintptr]int) - return fs -} - -/* -NewFormatter returns a custom formatter that satisfies the fmt.Formatter -interface. As a result, it integrates cleanly with standard fmt package -printing functions. The formatter is useful for inline printing of smaller data -types similar to the standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Typically this function shouldn't be called directly. It is much easier to make -use of the custom formatter by calling one of the convenience functions such as -Printf, Println, or Fprintf. -*/ -func NewFormatter(v interface{}) fmt.Formatter { - return newFormatter(&Config, v) -} diff --git a/vendor/github.com/davecgh/go-spew/spew/spew.go b/vendor/github.com/davecgh/go-spew/spew/spew.go deleted file mode 100644 index 32c0e338825..00000000000 --- a/vendor/github.com/davecgh/go-spew/spew/spew.go +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "fmt" - "io" -) - -// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the formatted string as a value that satisfies error. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Errorf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Errorf(format string, a ...interface{}) (err error) { - return fmt.Errorf(format, convertArgs(a)...) -} - -// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprint(w, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprint(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprint(w, convertArgs(a)...) -} - -// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintf(w, format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - return fmt.Fprintf(w, format, convertArgs(a)...) -} - -// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it -// passed with a default Formatter interface returned by NewFormatter. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintln(w, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprintln(w, convertArgs(a)...) -} - -// Print is a wrapper for fmt.Print that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Print(spew.NewFormatter(a), spew.NewFormatter(b)) -func Print(a ...interface{}) (n int, err error) { - return fmt.Print(convertArgs(a)...) -} - -// Printf is a wrapper for fmt.Printf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Printf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Printf(format string, a ...interface{}) (n int, err error) { - return fmt.Printf(format, convertArgs(a)...) -} - -// Println is a wrapper for fmt.Println that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Println(spew.NewFormatter(a), spew.NewFormatter(b)) -func Println(a ...interface{}) (n int, err error) { - return fmt.Println(convertArgs(a)...) -} - -// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprint(spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprint(a ...interface{}) string { - return fmt.Sprint(convertArgs(a)...) -} - -// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprintf(format string, a ...interface{}) string { - return fmt.Sprintf(format, convertArgs(a)...) -} - -// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it -// were passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintln(spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprintln(a ...interface{}) string { - return fmt.Sprintln(convertArgs(a)...) -} - -// convertArgs accepts a slice of arguments and returns a slice of the same -// length with each argument converted to a default spew Formatter interface. -func convertArgs(args []interface{}) (formatters []interface{}) { - formatters = make([]interface{}, len(args)) - for index, arg := range args { - formatters[index] = NewFormatter(arg) - } - return formatters -} diff --git a/vendor/github.com/dgrijalva/jwt-go/.gitignore b/vendor/github.com/dgrijalva/jwt-go/.gitignore deleted file mode 100644 index 80bed650ec0..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.DS_Store -bin - - diff --git a/vendor/github.com/dgrijalva/jwt-go/.travis.yml b/vendor/github.com/dgrijalva/jwt-go/.travis.yml deleted file mode 100644 index 1027f56cd94..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/.travis.yml +++ /dev/null @@ -1,13 +0,0 @@ -language: go - -script: - - go vet ./... - - go test -v ./... - -go: - - 1.3 - - 1.4 - - 1.5 - - 1.6 - - 1.7 - - tip diff --git a/vendor/github.com/dgrijalva/jwt-go/LICENSE b/vendor/github.com/dgrijalva/jwt-go/LICENSE deleted file mode 100644 index df83a9c2f01..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -Copyright (c) 2012 Dave Grijalva - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/vendor/github.com/dgrijalva/jwt-go/MIGRATION_GUIDE.md b/vendor/github.com/dgrijalva/jwt-go/MIGRATION_GUIDE.md deleted file mode 100644 index 7fc1f793cbc..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/MIGRATION_GUIDE.md +++ /dev/null @@ -1,97 +0,0 @@ -## Migration Guide from v2 -> v3 - -Version 3 adds several new, frequently requested features. To do so, it introduces a few breaking changes. We've worked to keep these as minimal as possible. This guide explains the breaking changes and how you can quickly update your code. - -### `Token.Claims` is now an interface type - -The most requested feature from the 2.0 verison of this library was the ability to provide a custom type to the JSON parser for claims. This was implemented by introducing a new interface, `Claims`, to replace `map[string]interface{}`. We also included two concrete implementations of `Claims`: `MapClaims` and `StandardClaims`. - -`MapClaims` is an alias for `map[string]interface{}` with built in validation behavior. It is the default claims type when using `Parse`. The usage is unchanged except you must type cast the claims property. - -The old example for parsing a token looked like this.. - -```go - if token, err := jwt.Parse(tokenString, keyLookupFunc); err == nil { - fmt.Printf("Token for user %v expires %v", token.Claims["user"], token.Claims["exp"]) - } -``` - -is now directly mapped to... - -```go - if token, err := jwt.Parse(tokenString, keyLookupFunc); err == nil { - claims := token.Claims.(jwt.MapClaims) - fmt.Printf("Token for user %v expires %v", claims["user"], claims["exp"]) - } -``` - -`StandardClaims` is designed to be embedded in your custom type. You can supply a custom claims type with the new `ParseWithClaims` function. Here's an example of using a custom claims type. - -```go - type MyCustomClaims struct { - User string - *StandardClaims - } - - if token, err := jwt.ParseWithClaims(tokenString, &MyCustomClaims{}, keyLookupFunc); err == nil { - claims := token.Claims.(*MyCustomClaims) - fmt.Printf("Token for user %v expires %v", claims.User, claims.StandardClaims.ExpiresAt) - } -``` - -### `ParseFromRequest` has been moved - -To keep this library focused on the tokens without becoming overburdened with complex request processing logic, `ParseFromRequest` and its new companion `ParseFromRequestWithClaims` have been moved to a subpackage, `request`. The method signatues have also been augmented to receive a new argument: `Extractor`. - -`Extractors` do the work of picking the token string out of a request. The interface is simple and composable. - -This simple parsing example: - -```go - if token, err := jwt.ParseFromRequest(tokenString, req, keyLookupFunc); err == nil { - fmt.Printf("Token for user %v expires %v", token.Claims["user"], token.Claims["exp"]) - } -``` - -is directly mapped to: - -```go - if token, err := request.ParseFromRequest(req, request.OAuth2Extractor, keyLookupFunc); err == nil { - claims := token.Claims.(jwt.MapClaims) - fmt.Printf("Token for user %v expires %v", claims["user"], claims["exp"]) - } -``` - -There are several concrete `Extractor` types provided for your convenience: - -* `HeaderExtractor` will search a list of headers until one contains content. -* `ArgumentExtractor` will search a list of keys in request query and form arguments until one contains content. -* `MultiExtractor` will try a list of `Extractors` in order until one returns content. -* `AuthorizationHeaderExtractor` will look in the `Authorization` header for a `Bearer` token. -* `OAuth2Extractor` searches the places an OAuth2 token would be specified (per the spec): `Authorization` header and `access_token` argument -* `PostExtractionFilter` wraps an `Extractor`, allowing you to process the content before it's parsed. A simple example is stripping the `Bearer ` text from a header - - -### RSA signing methods no longer accept `[]byte` keys - -Due to a [critical vulnerability](https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/), we've decided the convenience of accepting `[]byte` instead of `rsa.PublicKey` or `rsa.PrivateKey` isn't worth the risk of misuse. - -To replace this behavior, we've added two helper methods: `ParseRSAPrivateKeyFromPEM(key []byte) (*rsa.PrivateKey, error)` and `ParseRSAPublicKeyFromPEM(key []byte) (*rsa.PublicKey, error)`. These are just simple helpers for unpacking PEM encoded PKCS1 and PKCS8 keys. If your keys are encoded any other way, all you need to do is convert them to the `crypto/rsa` package's types. - -```go - func keyLookupFunc(*Token) (interface{}, error) { - // Don't forget to validate the alg is what you expect: - if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok { - return nil, fmt.Errorf("Unexpected signing method: %v", token.Header["alg"]) - } - - // Look up key - key, err := lookupPublicKey(token.Header["kid"]) - if err != nil { - return nil, err - } - - // Unpack key from PEM encoded PKCS8 - return jwt.ParseRSAPublicKeyFromPEM(key) - } -``` diff --git a/vendor/github.com/dgrijalva/jwt-go/README.md b/vendor/github.com/dgrijalva/jwt-go/README.md deleted file mode 100644 index d358d881b8d..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/README.md +++ /dev/null @@ -1,100 +0,0 @@ -# jwt-go - -[![Build Status](https://travis-ci.org/dgrijalva/jwt-go.svg?branch=master)](https://travis-ci.org/dgrijalva/jwt-go) -[![GoDoc](https://godoc.org/github.com/dgrijalva/jwt-go?status.svg)](https://godoc.org/github.com/dgrijalva/jwt-go) - -A [go](http://www.golang.org) (or 'golang' for search engine friendliness) implementation of [JSON Web Tokens](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html) - -**NEW VERSION COMING:** There have been a lot of improvements suggested since the version 3.0.0 released in 2016. I'm working now on cutting two different releases: 3.2.0 will contain any non-breaking changes or enhancements. 4.0.0 will follow shortly which will include breaking changes. See the 4.0.0 milestone to get an idea of what's coming. If you have other ideas, or would like to participate in 4.0.0, now's the time. If you depend on this library and don't want to be interrupted, I recommend you use your dependency mangement tool to pin to version 3. - -**SECURITY NOTICE:** Some older versions of Go have a security issue in the cryotp/elliptic. Recommendation is to upgrade to at least 1.8.3. See issue #216 for more detail. - -**SECURITY NOTICE:** It's important that you [validate the `alg` presented is what you expect](https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/). This library attempts to make it easy to do the right thing by requiring key types match the expected alg, but you should take the extra step to verify it in your usage. See the examples provided. - -## What the heck is a JWT? - -JWT.io has [a great introduction](https://jwt.io/introduction) to JSON Web Tokens. - -In short, it's a signed JSON object that does something useful (for example, authentication). It's commonly used for `Bearer` tokens in Oauth 2. A token is made of three parts, separated by `.`'s. The first two parts are JSON objects, that have been [base64url](http://tools.ietf.org/html/rfc4648) encoded. The last part is the signature, encoded the same way. - -The first part is called the header. It contains the necessary information for verifying the last part, the signature. For example, which encryption method was used for signing and what key was used. - -The part in the middle is the interesting bit. It's called the Claims and contains the actual stuff you care about. Refer to [the RFC](http://self-issued.info/docs/draft-jones-json-web-token.html) for information about reserved keys and the proper way to add your own. - -## What's in the box? - -This library supports the parsing and verification as well as the generation and signing of JWTs. Current supported signing algorithms are HMAC SHA, RSA, RSA-PSS, and ECDSA, though hooks are present for adding your own. - -## Examples - -See [the project documentation](https://godoc.org/github.com/dgrijalva/jwt-go) for examples of usage: - -* [Simple example of parsing and validating a token](https://godoc.org/github.com/dgrijalva/jwt-go#example-Parse--Hmac) -* [Simple example of building and signing a token](https://godoc.org/github.com/dgrijalva/jwt-go#example-New--Hmac) -* [Directory of Examples](https://godoc.org/github.com/dgrijalva/jwt-go#pkg-examples) - -## Extensions - -This library publishes all the necessary components for adding your own signing methods. Simply implement the `SigningMethod` interface and register a factory method using `RegisterSigningMethod`. - -Here's an example of an extension that integrates with the Google App Engine signing tools: https://github.com/someone1/gcp-jwt-go - -## Compliance - -This library was last reviewed to comply with [RTF 7519](http://www.rfc-editor.org/info/rfc7519) dated May 2015 with a few notable differences: - -* In order to protect against accidental use of [Unsecured JWTs](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html#UnsecuredJWT), tokens using `alg=none` will only be accepted if the constant `jwt.UnsafeAllowNoneSignatureType` is provided as the key. - -## Project Status & Versioning - -This library is considered production ready. Feedback and feature requests are appreciated. The API should be considered stable. There should be very few backwards-incompatible changes outside of major version updates (and only with good reason). - -This project uses [Semantic Versioning 2.0.0](http://semver.org). Accepted pull requests will land on `master`. Periodically, versions will be tagged from `master`. You can find all the releases on [the project releases page](https://github.com/dgrijalva/jwt-go/releases). - -While we try to make it obvious when we make breaking changes, there isn't a great mechanism for pushing announcements out to users. You may want to use this alternative package include: `gopkg.in/dgrijalva/jwt-go.v3`. It will do the right thing WRT semantic versioning. - -**BREAKING CHANGES:*** -* Version 3.0.0 includes _a lot_ of changes from the 2.x line, including a few that break the API. We've tried to break as few things as possible, so there should just be a few type signature changes. A full list of breaking changes is available in `VERSION_HISTORY.md`. See `MIGRATION_GUIDE.md` for more information on updating your code. - -## Usage Tips - -### Signing vs Encryption - -A token is simply a JSON object that is signed by its author. this tells you exactly two things about the data: - -* The author of the token was in the possession of the signing secret -* The data has not been modified since it was signed - -It's important to know that JWT does not provide encryption, which means anyone who has access to the token can read its contents. If you need to protect (encrypt) the data, there is a companion spec, `JWE`, that provides this functionality. JWE is currently outside the scope of this library. - -### Choosing a Signing Method - -There are several signing methods available, and you should probably take the time to learn about the various options before choosing one. The principal design decision is most likely going to be symmetric vs asymmetric. - -Symmetric signing methods, such as HSA, use only a single secret. This is probably the simplest signing method to use since any `[]byte` can be used as a valid secret. They are also slightly computationally faster to use, though this rarely is enough to matter. Symmetric signing methods work the best when both producers and consumers of tokens are trusted, or even the same system. Since the same secret is used to both sign and validate tokens, you can't easily distribute the key for validation. - -Asymmetric signing methods, such as RSA, use different keys for signing and verifying tokens. This makes it possible to produce tokens with a private key, and allow any consumer to access the public key for verification. - -### Signing Methods and Key Types - -Each signing method expects a different object type for its signing keys. See the package documentation for details. Here are the most common ones: - -* The [HMAC signing method](https://godoc.org/github.com/dgrijalva/jwt-go#SigningMethodHMAC) (`HS256`,`HS384`,`HS512`) expect `[]byte` values for signing and validation -* The [RSA signing method](https://godoc.org/github.com/dgrijalva/jwt-go#SigningMethodRSA) (`RS256`,`RS384`,`RS512`) expect `*rsa.PrivateKey` for signing and `*rsa.PublicKey` for validation -* The [ECDSA signing method](https://godoc.org/github.com/dgrijalva/jwt-go#SigningMethodECDSA) (`ES256`,`ES384`,`ES512`) expect `*ecdsa.PrivateKey` for signing and `*ecdsa.PublicKey` for validation - -### JWT and OAuth - -It's worth mentioning that OAuth and JWT are not the same thing. A JWT token is simply a signed JSON object. It can be used anywhere such a thing is useful. There is some confusion, though, as JWT is the most common type of bearer token used in OAuth2 authentication. - -Without going too far down the rabbit hole, here's a description of the interaction of these technologies: - -* OAuth is a protocol for allowing an identity provider to be separate from the service a user is logging in to. For example, whenever you use Facebook to log into a different service (Yelp, Spotify, etc), you are using OAuth. -* OAuth defines several options for passing around authentication data. One popular method is called a "bearer token". A bearer token is simply a string that _should_ only be held by an authenticated user. Thus, simply presenting this token proves your identity. You can probably derive from here why a JWT might make a good bearer token. -* Because bearer tokens are used for authentication, it's important they're kept secret. This is why transactions that use bearer tokens typically happen over SSL. - -## More - -Documentation can be found [on godoc.org](http://godoc.org/github.com/dgrijalva/jwt-go). - -The command line utility included in this project (cmd/jwt) provides a straightforward example of token creation and parsing as well as a useful tool for debugging your own integration. You'll also find several implementation examples in the documentation. diff --git a/vendor/github.com/dgrijalva/jwt-go/VERSION_HISTORY.md b/vendor/github.com/dgrijalva/jwt-go/VERSION_HISTORY.md deleted file mode 100644 index 6370298313a..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/VERSION_HISTORY.md +++ /dev/null @@ -1,118 +0,0 @@ -## `jwt-go` Version History - -#### 3.2.0 - -* Added method `ParseUnverified` to allow users to split up the tasks of parsing and validation -* HMAC signing method returns `ErrInvalidKeyType` instead of `ErrInvalidKey` where appropriate -* Added options to `request.ParseFromRequest`, which allows for an arbitrary list of modifiers to parsing behavior. Initial set include `WithClaims` and `WithParser`. Existing usage of this function will continue to work as before. -* Deprecated `ParseFromRequestWithClaims` to simplify API in the future. - -#### 3.1.0 - -* Improvements to `jwt` command line tool -* Added `SkipClaimsValidation` option to `Parser` -* Documentation updates - -#### 3.0.0 - -* **Compatibility Breaking Changes**: See MIGRATION_GUIDE.md for tips on updating your code - * Dropped support for `[]byte` keys when using RSA signing methods. This convenience feature could contribute to security vulnerabilities involving mismatched key types with signing methods. - * `ParseFromRequest` has been moved to `request` subpackage and usage has changed - * The `Claims` property on `Token` is now type `Claims` instead of `map[string]interface{}`. The default value is type `MapClaims`, which is an alias to `map[string]interface{}`. This makes it possible to use a custom type when decoding claims. -* Other Additions and Changes - * Added `Claims` interface type to allow users to decode the claims into a custom type - * Added `ParseWithClaims`, which takes a third argument of type `Claims`. Use this function instead of `Parse` if you have a custom type you'd like to decode into. - * Dramatically improved the functionality and flexibility of `ParseFromRequest`, which is now in the `request` subpackage - * Added `ParseFromRequestWithClaims` which is the `FromRequest` equivalent of `ParseWithClaims` - * Added new interface type `Extractor`, which is used for extracting JWT strings from http requests. Used with `ParseFromRequest` and `ParseFromRequestWithClaims`. - * Added several new, more specific, validation errors to error type bitmask - * Moved examples from README to executable example files - * Signing method registry is now thread safe - * Added new property to `ValidationError`, which contains the raw error returned by calls made by parse/verify (such as those returned by keyfunc or json parser) - -#### 2.7.0 - -This will likely be the last backwards compatible release before 3.0.0, excluding essential bug fixes. - -* Added new option `-show` to the `jwt` command that will just output the decoded token without verifying -* Error text for expired tokens includes how long it's been expired -* Fixed incorrect error returned from `ParseRSAPublicKeyFromPEM` -* Documentation updates - -#### 2.6.0 - -* Exposed inner error within ValidationError -* Fixed validation errors when using UseJSONNumber flag -* Added several unit tests - -#### 2.5.0 - -* Added support for signing method none. You shouldn't use this. The API tries to make this clear. -* Updated/fixed some documentation -* Added more helpful error message when trying to parse tokens that begin with `BEARER ` - -#### 2.4.0 - -* Added new type, Parser, to allow for configuration of various parsing parameters - * You can now specify a list of valid signing methods. Anything outside this set will be rejected. - * You can now opt to use the `json.Number` type instead of `float64` when parsing token JSON -* Added support for [Travis CI](https://travis-ci.org/dgrijalva/jwt-go) -* Fixed some bugs with ECDSA parsing - -#### 2.3.0 - -* Added support for ECDSA signing methods -* Added support for RSA PSS signing methods (requires go v1.4) - -#### 2.2.0 - -* Gracefully handle a `nil` `Keyfunc` being passed to `Parse`. Result will now be the parsed token and an error, instead of a panic. - -#### 2.1.0 - -Backwards compatible API change that was missed in 2.0.0. - -* The `SignedString` method on `Token` now takes `interface{}` instead of `[]byte` - -#### 2.0.0 - -There were two major reasons for breaking backwards compatibility with this update. The first was a refactor required to expand the width of the RSA and HMAC-SHA signing implementations. There will likely be no required code changes to support this change. - -The second update, while unfortunately requiring a small change in integration, is required to open up this library to other signing methods. Not all keys used for all signing methods have a single standard on-disk representation. Requiring `[]byte` as the type for all keys proved too limiting. Additionally, this implementation allows for pre-parsed tokens to be reused, which might matter in an application that parses a high volume of tokens with a small set of keys. Backwards compatibilty has been maintained for passing `[]byte` to the RSA signing methods, but they will also accept `*rsa.PublicKey` and `*rsa.PrivateKey`. - -It is likely the only integration change required here will be to change `func(t *jwt.Token) ([]byte, error)` to `func(t *jwt.Token) (interface{}, error)` when calling `Parse`. - -* **Compatibility Breaking Changes** - * `SigningMethodHS256` is now `*SigningMethodHMAC` instead of `type struct` - * `SigningMethodRS256` is now `*SigningMethodRSA` instead of `type struct` - * `KeyFunc` now returns `interface{}` instead of `[]byte` - * `SigningMethod.Sign` now takes `interface{}` instead of `[]byte` for the key - * `SigningMethod.Verify` now takes `interface{}` instead of `[]byte` for the key -* Renamed type `SigningMethodHS256` to `SigningMethodHMAC`. Specific sizes are now just instances of this type. - * Added public package global `SigningMethodHS256` - * Added public package global `SigningMethodHS384` - * Added public package global `SigningMethodHS512` -* Renamed type `SigningMethodRS256` to `SigningMethodRSA`. Specific sizes are now just instances of this type. - * Added public package global `SigningMethodRS256` - * Added public package global `SigningMethodRS384` - * Added public package global `SigningMethodRS512` -* Moved sample private key for HMAC tests from an inline value to a file on disk. Value is unchanged. -* Refactored the RSA implementation to be easier to read -* Exposed helper methods `ParseRSAPrivateKeyFromPEM` and `ParseRSAPublicKeyFromPEM` - -#### 1.0.2 - -* Fixed bug in parsing public keys from certificates -* Added more tests around the parsing of keys for RS256 -* Code refactoring in RS256 implementation. No functional changes - -#### 1.0.1 - -* Fixed panic if RS256 signing method was passed an invalid key - -#### 1.0.0 - -* First versioned release -* API stabilized -* Supports creating, signing, parsing, and validating JWT tokens -* Supports RS256 and HS256 signing methods \ No newline at end of file diff --git a/vendor/github.com/dgrijalva/jwt-go/claims.go b/vendor/github.com/dgrijalva/jwt-go/claims.go deleted file mode 100644 index f0228f02e03..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/claims.go +++ /dev/null @@ -1,134 +0,0 @@ -package jwt - -import ( - "crypto/subtle" - "fmt" - "time" -) - -// For a type to be a Claims object, it must just have a Valid method that determines -// if the token is invalid for any supported reason -type Claims interface { - Valid() error -} - -// Structured version of Claims Section, as referenced at -// https://tools.ietf.org/html/rfc7519#section-4.1 -// See examples for how to use this with your own claim types -type StandardClaims struct { - Audience string `json:"aud,omitempty"` - ExpiresAt int64 `json:"exp,omitempty"` - Id string `json:"jti,omitempty"` - IssuedAt int64 `json:"iat,omitempty"` - Issuer string `json:"iss,omitempty"` - NotBefore int64 `json:"nbf,omitempty"` - Subject string `json:"sub,omitempty"` -} - -// Validates time based claims "exp, iat, nbf". -// There is no accounting for clock skew. -// As well, if any of the above claims are not in the token, it will still -// be considered a valid claim. -func (c StandardClaims) Valid() error { - vErr := new(ValidationError) - now := TimeFunc().Unix() - - // The claims below are optional, by default, so if they are set to the - // default value in Go, let's not fail the verification for them. - if c.VerifyExpiresAt(now, false) == false { - delta := time.Unix(now, 0).Sub(time.Unix(c.ExpiresAt, 0)) - vErr.Inner = fmt.Errorf("token is expired by %v", delta) - vErr.Errors |= ValidationErrorExpired - } - - if c.VerifyIssuedAt(now, false) == false { - vErr.Inner = fmt.Errorf("Token used before issued") - vErr.Errors |= ValidationErrorIssuedAt - } - - if c.VerifyNotBefore(now, false) == false { - vErr.Inner = fmt.Errorf("token is not valid yet") - vErr.Errors |= ValidationErrorNotValidYet - } - - if vErr.valid() { - return nil - } - - return vErr -} - -// Compares the aud claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (c *StandardClaims) VerifyAudience(cmp string, req bool) bool { - return verifyAud(c.Audience, cmp, req) -} - -// Compares the exp claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (c *StandardClaims) VerifyExpiresAt(cmp int64, req bool) bool { - return verifyExp(c.ExpiresAt, cmp, req) -} - -// Compares the iat claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (c *StandardClaims) VerifyIssuedAt(cmp int64, req bool) bool { - return verifyIat(c.IssuedAt, cmp, req) -} - -// Compares the iss claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (c *StandardClaims) VerifyIssuer(cmp string, req bool) bool { - return verifyIss(c.Issuer, cmp, req) -} - -// Compares the nbf claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (c *StandardClaims) VerifyNotBefore(cmp int64, req bool) bool { - return verifyNbf(c.NotBefore, cmp, req) -} - -// ----- helpers - -func verifyAud(aud string, cmp string, required bool) bool { - if aud == "" { - return !required - } - if subtle.ConstantTimeCompare([]byte(aud), []byte(cmp)) != 0 { - return true - } else { - return false - } -} - -func verifyExp(exp int64, now int64, required bool) bool { - if exp == 0 { - return !required - } - return now <= exp -} - -func verifyIat(iat int64, now int64, required bool) bool { - if iat == 0 { - return !required - } - return now >= iat -} - -func verifyIss(iss string, cmp string, required bool) bool { - if iss == "" { - return !required - } - if subtle.ConstantTimeCompare([]byte(iss), []byte(cmp)) != 0 { - return true - } else { - return false - } -} - -func verifyNbf(nbf int64, now int64, required bool) bool { - if nbf == 0 { - return !required - } - return now >= nbf -} diff --git a/vendor/github.com/dgrijalva/jwt-go/doc.go b/vendor/github.com/dgrijalva/jwt-go/doc.go deleted file mode 100644 index a86dc1a3b34..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/doc.go +++ /dev/null @@ -1,4 +0,0 @@ -// Package jwt is a Go implementation of JSON Web Tokens: http://self-issued.info/docs/draft-jones-json-web-token.html -// -// See README.md for more info. -package jwt diff --git a/vendor/github.com/dgrijalva/jwt-go/ecdsa.go b/vendor/github.com/dgrijalva/jwt-go/ecdsa.go deleted file mode 100644 index f977381240e..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/ecdsa.go +++ /dev/null @@ -1,148 +0,0 @@ -package jwt - -import ( - "crypto" - "crypto/ecdsa" - "crypto/rand" - "errors" - "math/big" -) - -var ( - // Sadly this is missing from crypto/ecdsa compared to crypto/rsa - ErrECDSAVerification = errors.New("crypto/ecdsa: verification error") -) - -// Implements the ECDSA family of signing methods signing methods -// Expects *ecdsa.PrivateKey for signing and *ecdsa.PublicKey for verification -type SigningMethodECDSA struct { - Name string - Hash crypto.Hash - KeySize int - CurveBits int -} - -// Specific instances for EC256 and company -var ( - SigningMethodES256 *SigningMethodECDSA - SigningMethodES384 *SigningMethodECDSA - SigningMethodES512 *SigningMethodECDSA -) - -func init() { - // ES256 - SigningMethodES256 = &SigningMethodECDSA{"ES256", crypto.SHA256, 32, 256} - RegisterSigningMethod(SigningMethodES256.Alg(), func() SigningMethod { - return SigningMethodES256 - }) - - // ES384 - SigningMethodES384 = &SigningMethodECDSA{"ES384", crypto.SHA384, 48, 384} - RegisterSigningMethod(SigningMethodES384.Alg(), func() SigningMethod { - return SigningMethodES384 - }) - - // ES512 - SigningMethodES512 = &SigningMethodECDSA{"ES512", crypto.SHA512, 66, 521} - RegisterSigningMethod(SigningMethodES512.Alg(), func() SigningMethod { - return SigningMethodES512 - }) -} - -func (m *SigningMethodECDSA) Alg() string { - return m.Name -} - -// Implements the Verify method from SigningMethod -// For this verify method, key must be an ecdsa.PublicKey struct -func (m *SigningMethodECDSA) Verify(signingString, signature string, key interface{}) error { - var err error - - // Decode the signature - var sig []byte - if sig, err = DecodeSegment(signature); err != nil { - return err - } - - // Get the key - var ecdsaKey *ecdsa.PublicKey - switch k := key.(type) { - case *ecdsa.PublicKey: - ecdsaKey = k - default: - return ErrInvalidKeyType - } - - if len(sig) != 2*m.KeySize { - return ErrECDSAVerification - } - - r := big.NewInt(0).SetBytes(sig[:m.KeySize]) - s := big.NewInt(0).SetBytes(sig[m.KeySize:]) - - // Create hasher - if !m.Hash.Available() { - return ErrHashUnavailable - } - hasher := m.Hash.New() - hasher.Write([]byte(signingString)) - - // Verify the signature - if verifystatus := ecdsa.Verify(ecdsaKey, hasher.Sum(nil), r, s); verifystatus == true { - return nil - } else { - return ErrECDSAVerification - } -} - -// Implements the Sign method from SigningMethod -// For this signing method, key must be an ecdsa.PrivateKey struct -func (m *SigningMethodECDSA) Sign(signingString string, key interface{}) (string, error) { - // Get the key - var ecdsaKey *ecdsa.PrivateKey - switch k := key.(type) { - case *ecdsa.PrivateKey: - ecdsaKey = k - default: - return "", ErrInvalidKeyType - } - - // Create the hasher - if !m.Hash.Available() { - return "", ErrHashUnavailable - } - - hasher := m.Hash.New() - hasher.Write([]byte(signingString)) - - // Sign the string and return r, s - if r, s, err := ecdsa.Sign(rand.Reader, ecdsaKey, hasher.Sum(nil)); err == nil { - curveBits := ecdsaKey.Curve.Params().BitSize - - if m.CurveBits != curveBits { - return "", ErrInvalidKey - } - - keyBytes := curveBits / 8 - if curveBits%8 > 0 { - keyBytes += 1 - } - - // We serialize the outpus (r and s) into big-endian byte arrays and pad - // them with zeros on the left to make sure the sizes work out. Both arrays - // must be keyBytes long, and the output must be 2*keyBytes long. - rBytes := r.Bytes() - rBytesPadded := make([]byte, keyBytes) - copy(rBytesPadded[keyBytes-len(rBytes):], rBytes) - - sBytes := s.Bytes() - sBytesPadded := make([]byte, keyBytes) - copy(sBytesPadded[keyBytes-len(sBytes):], sBytes) - - out := append(rBytesPadded, sBytesPadded...) - - return EncodeSegment(out), nil - } else { - return "", err - } -} diff --git a/vendor/github.com/dgrijalva/jwt-go/ecdsa_utils.go b/vendor/github.com/dgrijalva/jwt-go/ecdsa_utils.go deleted file mode 100644 index d19624b7264..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/ecdsa_utils.go +++ /dev/null @@ -1,67 +0,0 @@ -package jwt - -import ( - "crypto/ecdsa" - "crypto/x509" - "encoding/pem" - "errors" -) - -var ( - ErrNotECPublicKey = errors.New("Key is not a valid ECDSA public key") - ErrNotECPrivateKey = errors.New("Key is not a valid ECDSA private key") -) - -// Parse PEM encoded Elliptic Curve Private Key Structure -func ParseECPrivateKeyFromPEM(key []byte) (*ecdsa.PrivateKey, error) { - var err error - - // Parse PEM block - var block *pem.Block - if block, _ = pem.Decode(key); block == nil { - return nil, ErrKeyMustBePEMEncoded - } - - // Parse the key - var parsedKey interface{} - if parsedKey, err = x509.ParseECPrivateKey(block.Bytes); err != nil { - return nil, err - } - - var pkey *ecdsa.PrivateKey - var ok bool - if pkey, ok = parsedKey.(*ecdsa.PrivateKey); !ok { - return nil, ErrNotECPrivateKey - } - - return pkey, nil -} - -// Parse PEM encoded PKCS1 or PKCS8 public key -func ParseECPublicKeyFromPEM(key []byte) (*ecdsa.PublicKey, error) { - var err error - - // Parse PEM block - var block *pem.Block - if block, _ = pem.Decode(key); block == nil { - return nil, ErrKeyMustBePEMEncoded - } - - // Parse the key - var parsedKey interface{} - if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { - if cert, err := x509.ParseCertificate(block.Bytes); err == nil { - parsedKey = cert.PublicKey - } else { - return nil, err - } - } - - var pkey *ecdsa.PublicKey - var ok bool - if pkey, ok = parsedKey.(*ecdsa.PublicKey); !ok { - return nil, ErrNotECPublicKey - } - - return pkey, nil -} diff --git a/vendor/github.com/dgrijalva/jwt-go/errors.go b/vendor/github.com/dgrijalva/jwt-go/errors.go deleted file mode 100644 index 1c93024aad2..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/errors.go +++ /dev/null @@ -1,59 +0,0 @@ -package jwt - -import ( - "errors" -) - -// Error constants -var ( - ErrInvalidKey = errors.New("key is invalid") - ErrInvalidKeyType = errors.New("key is of invalid type") - ErrHashUnavailable = errors.New("the requested hash function is unavailable") -) - -// The errors that might occur when parsing and validating a token -const ( - ValidationErrorMalformed uint32 = 1 << iota // Token is malformed - ValidationErrorUnverifiable // Token could not be verified because of signing problems - ValidationErrorSignatureInvalid // Signature validation failed - - // Standard Claim validation errors - ValidationErrorAudience // AUD validation failed - ValidationErrorExpired // EXP validation failed - ValidationErrorIssuedAt // IAT validation failed - ValidationErrorIssuer // ISS validation failed - ValidationErrorNotValidYet // NBF validation failed - ValidationErrorId // JTI validation failed - ValidationErrorClaimsInvalid // Generic claims validation error -) - -// Helper for constructing a ValidationError with a string error message -func NewValidationError(errorText string, errorFlags uint32) *ValidationError { - return &ValidationError{ - text: errorText, - Errors: errorFlags, - } -} - -// The error from Parse if token is not valid -type ValidationError struct { - Inner error // stores the error returned by external dependencies, i.e.: KeyFunc - Errors uint32 // bitfield. see ValidationError... constants - text string // errors that do not have a valid error just have text -} - -// Validation error is an error type -func (e ValidationError) Error() string { - if e.Inner != nil { - return e.Inner.Error() - } else if e.text != "" { - return e.text - } else { - return "token is invalid" - } -} - -// No errors -func (e *ValidationError) valid() bool { - return e.Errors == 0 -} diff --git a/vendor/github.com/dgrijalva/jwt-go/hmac.go b/vendor/github.com/dgrijalva/jwt-go/hmac.go deleted file mode 100644 index addbe5d4018..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/hmac.go +++ /dev/null @@ -1,95 +0,0 @@ -package jwt - -import ( - "crypto" - "crypto/hmac" - "errors" -) - -// Implements the HMAC-SHA family of signing methods signing methods -// Expects key type of []byte for both signing and validation -type SigningMethodHMAC struct { - Name string - Hash crypto.Hash -} - -// Specific instances for HS256 and company -var ( - SigningMethodHS256 *SigningMethodHMAC - SigningMethodHS384 *SigningMethodHMAC - SigningMethodHS512 *SigningMethodHMAC - ErrSignatureInvalid = errors.New("signature is invalid") -) - -func init() { - // HS256 - SigningMethodHS256 = &SigningMethodHMAC{"HS256", crypto.SHA256} - RegisterSigningMethod(SigningMethodHS256.Alg(), func() SigningMethod { - return SigningMethodHS256 - }) - - // HS384 - SigningMethodHS384 = &SigningMethodHMAC{"HS384", crypto.SHA384} - RegisterSigningMethod(SigningMethodHS384.Alg(), func() SigningMethod { - return SigningMethodHS384 - }) - - // HS512 - SigningMethodHS512 = &SigningMethodHMAC{"HS512", crypto.SHA512} - RegisterSigningMethod(SigningMethodHS512.Alg(), func() SigningMethod { - return SigningMethodHS512 - }) -} - -func (m *SigningMethodHMAC) Alg() string { - return m.Name -} - -// Verify the signature of HSXXX tokens. Returns nil if the signature is valid. -func (m *SigningMethodHMAC) Verify(signingString, signature string, key interface{}) error { - // Verify the key is the right type - keyBytes, ok := key.([]byte) - if !ok { - return ErrInvalidKeyType - } - - // Decode signature, for comparison - sig, err := DecodeSegment(signature) - if err != nil { - return err - } - - // Can we use the specified hashing method? - if !m.Hash.Available() { - return ErrHashUnavailable - } - - // This signing method is symmetric, so we validate the signature - // by reproducing the signature from the signing string and key, then - // comparing that against the provided signature. - hasher := hmac.New(m.Hash.New, keyBytes) - hasher.Write([]byte(signingString)) - if !hmac.Equal(sig, hasher.Sum(nil)) { - return ErrSignatureInvalid - } - - // No validation errors. Signature is good. - return nil -} - -// Implements the Sign method from SigningMethod for this signing method. -// Key must be []byte -func (m *SigningMethodHMAC) Sign(signingString string, key interface{}) (string, error) { - if keyBytes, ok := key.([]byte); ok { - if !m.Hash.Available() { - return "", ErrHashUnavailable - } - - hasher := hmac.New(m.Hash.New, keyBytes) - hasher.Write([]byte(signingString)) - - return EncodeSegment(hasher.Sum(nil)), nil - } - - return "", ErrInvalidKeyType -} diff --git a/vendor/github.com/dgrijalva/jwt-go/map_claims.go b/vendor/github.com/dgrijalva/jwt-go/map_claims.go deleted file mode 100644 index 291213c460d..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/map_claims.go +++ /dev/null @@ -1,94 +0,0 @@ -package jwt - -import ( - "encoding/json" - "errors" - // "fmt" -) - -// Claims type that uses the map[string]interface{} for JSON decoding -// This is the default claims type if you don't supply one -type MapClaims map[string]interface{} - -// Compares the aud claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (m MapClaims) VerifyAudience(cmp string, req bool) bool { - aud, _ := m["aud"].(string) - return verifyAud(aud, cmp, req) -} - -// Compares the exp claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (m MapClaims) VerifyExpiresAt(cmp int64, req bool) bool { - switch exp := m["exp"].(type) { - case float64: - return verifyExp(int64(exp), cmp, req) - case json.Number: - v, _ := exp.Int64() - return verifyExp(v, cmp, req) - } - return req == false -} - -// Compares the iat claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (m MapClaims) VerifyIssuedAt(cmp int64, req bool) bool { - switch iat := m["iat"].(type) { - case float64: - return verifyIat(int64(iat), cmp, req) - case json.Number: - v, _ := iat.Int64() - return verifyIat(v, cmp, req) - } - return req == false -} - -// Compares the iss claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (m MapClaims) VerifyIssuer(cmp string, req bool) bool { - iss, _ := m["iss"].(string) - return verifyIss(iss, cmp, req) -} - -// Compares the nbf claim against cmp. -// If required is false, this method will return true if the value matches or is unset -func (m MapClaims) VerifyNotBefore(cmp int64, req bool) bool { - switch nbf := m["nbf"].(type) { - case float64: - return verifyNbf(int64(nbf), cmp, req) - case json.Number: - v, _ := nbf.Int64() - return verifyNbf(v, cmp, req) - } - return req == false -} - -// Validates time based claims "exp, iat, nbf". -// There is no accounting for clock skew. -// As well, if any of the above claims are not in the token, it will still -// be considered a valid claim. -func (m MapClaims) Valid() error { - vErr := new(ValidationError) - now := TimeFunc().Unix() - - if m.VerifyExpiresAt(now, false) == false { - vErr.Inner = errors.New("Token is expired") - vErr.Errors |= ValidationErrorExpired - } - - if m.VerifyIssuedAt(now, false) == false { - vErr.Inner = errors.New("Token used before issued") - vErr.Errors |= ValidationErrorIssuedAt - } - - if m.VerifyNotBefore(now, false) == false { - vErr.Inner = errors.New("Token is not valid yet") - vErr.Errors |= ValidationErrorNotValidYet - } - - if vErr.valid() { - return nil - } - - return vErr -} diff --git a/vendor/github.com/dgrijalva/jwt-go/none.go b/vendor/github.com/dgrijalva/jwt-go/none.go deleted file mode 100644 index f04d189d067..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/none.go +++ /dev/null @@ -1,52 +0,0 @@ -package jwt - -// Implements the none signing method. This is required by the spec -// but you probably should never use it. -var SigningMethodNone *signingMethodNone - -const UnsafeAllowNoneSignatureType unsafeNoneMagicConstant = "none signing method allowed" - -var NoneSignatureTypeDisallowedError error - -type signingMethodNone struct{} -type unsafeNoneMagicConstant string - -func init() { - SigningMethodNone = &signingMethodNone{} - NoneSignatureTypeDisallowedError = NewValidationError("'none' signature type is not allowed", ValidationErrorSignatureInvalid) - - RegisterSigningMethod(SigningMethodNone.Alg(), func() SigningMethod { - return SigningMethodNone - }) -} - -func (m *signingMethodNone) Alg() string { - return "none" -} - -// Only allow 'none' alg type if UnsafeAllowNoneSignatureType is specified as the key -func (m *signingMethodNone) Verify(signingString, signature string, key interface{}) (err error) { - // Key must be UnsafeAllowNoneSignatureType to prevent accidentally - // accepting 'none' signing method - if _, ok := key.(unsafeNoneMagicConstant); !ok { - return NoneSignatureTypeDisallowedError - } - // If signing method is none, signature must be an empty string - if signature != "" { - return NewValidationError( - "'none' signing method with non-empty signature", - ValidationErrorSignatureInvalid, - ) - } - - // Accept 'none' signing method. - return nil -} - -// Only allow 'none' signing if UnsafeAllowNoneSignatureType is specified as the key -func (m *signingMethodNone) Sign(signingString string, key interface{}) (string, error) { - if _, ok := key.(unsafeNoneMagicConstant); ok { - return "", nil - } - return "", NoneSignatureTypeDisallowedError -} diff --git a/vendor/github.com/dgrijalva/jwt-go/parser.go b/vendor/github.com/dgrijalva/jwt-go/parser.go deleted file mode 100644 index d6901d9adb5..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/parser.go +++ /dev/null @@ -1,148 +0,0 @@ -package jwt - -import ( - "bytes" - "encoding/json" - "fmt" - "strings" -) - -type Parser struct { - ValidMethods []string // If populated, only these methods will be considered valid - UseJSONNumber bool // Use JSON Number format in JSON decoder - SkipClaimsValidation bool // Skip claims validation during token parsing -} - -// Parse, validate, and return a token. -// keyFunc will receive the parsed token and should return the key for validating. -// If everything is kosher, err will be nil -func (p *Parser) Parse(tokenString string, keyFunc Keyfunc) (*Token, error) { - return p.ParseWithClaims(tokenString, MapClaims{}, keyFunc) -} - -func (p *Parser) ParseWithClaims(tokenString string, claims Claims, keyFunc Keyfunc) (*Token, error) { - token, parts, err := p.ParseUnverified(tokenString, claims) - if err != nil { - return token, err - } - - // Verify signing method is in the required set - if p.ValidMethods != nil { - var signingMethodValid = false - var alg = token.Method.Alg() - for _, m := range p.ValidMethods { - if m == alg { - signingMethodValid = true - break - } - } - if !signingMethodValid { - // signing method is not in the listed set - return token, NewValidationError(fmt.Sprintf("signing method %v is invalid", alg), ValidationErrorSignatureInvalid) - } - } - - // Lookup key - var key interface{} - if keyFunc == nil { - // keyFunc was not provided. short circuiting validation - return token, NewValidationError("no Keyfunc was provided.", ValidationErrorUnverifiable) - } - if key, err = keyFunc(token); err != nil { - // keyFunc returned an error - if ve, ok := err.(*ValidationError); ok { - return token, ve - } - return token, &ValidationError{Inner: err, Errors: ValidationErrorUnverifiable} - } - - vErr := &ValidationError{} - - // Validate Claims - if !p.SkipClaimsValidation { - if err := token.Claims.Valid(); err != nil { - - // If the Claims Valid returned an error, check if it is a validation error, - // If it was another error type, create a ValidationError with a generic ClaimsInvalid flag set - if e, ok := err.(*ValidationError); !ok { - vErr = &ValidationError{Inner: err, Errors: ValidationErrorClaimsInvalid} - } else { - vErr = e - } - } - } - - // Perform validation - token.Signature = parts[2] - if err = token.Method.Verify(strings.Join(parts[0:2], "."), token.Signature, key); err != nil { - vErr.Inner = err - vErr.Errors |= ValidationErrorSignatureInvalid - } - - if vErr.valid() { - token.Valid = true - return token, nil - } - - return token, vErr -} - -// WARNING: Don't use this method unless you know what you're doing -// -// This method parses the token but doesn't validate the signature. It's only -// ever useful in cases where you know the signature is valid (because it has -// been checked previously in the stack) and you want to extract values from -// it. -func (p *Parser) ParseUnverified(tokenString string, claims Claims) (token *Token, parts []string, err error) { - parts = strings.Split(tokenString, ".") - if len(parts) != 3 { - return nil, parts, NewValidationError("token contains an invalid number of segments", ValidationErrorMalformed) - } - - token = &Token{Raw: tokenString} - - // parse Header - var headerBytes []byte - if headerBytes, err = DecodeSegment(parts[0]); err != nil { - if strings.HasPrefix(strings.ToLower(tokenString), "bearer ") { - return token, parts, NewValidationError("tokenstring should not contain 'bearer '", ValidationErrorMalformed) - } - return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} - } - if err = json.Unmarshal(headerBytes, &token.Header); err != nil { - return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} - } - - // parse Claims - var claimBytes []byte - token.Claims = claims - - if claimBytes, err = DecodeSegment(parts[1]); err != nil { - return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} - } - dec := json.NewDecoder(bytes.NewBuffer(claimBytes)) - if p.UseJSONNumber { - dec.UseNumber() - } - // JSON Decode. Special case for map type to avoid weird pointer behavior - if c, ok := token.Claims.(MapClaims); ok { - err = dec.Decode(&c) - } else { - err = dec.Decode(&claims) - } - // Handle decode error - if err != nil { - return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} - } - - // Lookup signature method - if method, ok := token.Header["alg"].(string); ok { - if token.Method = GetSigningMethod(method); token.Method == nil { - return token, parts, NewValidationError("signing method (alg) is unavailable.", ValidationErrorUnverifiable) - } - } else { - return token, parts, NewValidationError("signing method (alg) is unspecified.", ValidationErrorUnverifiable) - } - - return token, parts, nil -} diff --git a/vendor/github.com/dgrijalva/jwt-go/rsa.go b/vendor/github.com/dgrijalva/jwt-go/rsa.go deleted file mode 100644 index e4caf1ca4a1..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/rsa.go +++ /dev/null @@ -1,101 +0,0 @@ -package jwt - -import ( - "crypto" - "crypto/rand" - "crypto/rsa" -) - -// Implements the RSA family of signing methods signing methods -// Expects *rsa.PrivateKey for signing and *rsa.PublicKey for validation -type SigningMethodRSA struct { - Name string - Hash crypto.Hash -} - -// Specific instances for RS256 and company -var ( - SigningMethodRS256 *SigningMethodRSA - SigningMethodRS384 *SigningMethodRSA - SigningMethodRS512 *SigningMethodRSA -) - -func init() { - // RS256 - SigningMethodRS256 = &SigningMethodRSA{"RS256", crypto.SHA256} - RegisterSigningMethod(SigningMethodRS256.Alg(), func() SigningMethod { - return SigningMethodRS256 - }) - - // RS384 - SigningMethodRS384 = &SigningMethodRSA{"RS384", crypto.SHA384} - RegisterSigningMethod(SigningMethodRS384.Alg(), func() SigningMethod { - return SigningMethodRS384 - }) - - // RS512 - SigningMethodRS512 = &SigningMethodRSA{"RS512", crypto.SHA512} - RegisterSigningMethod(SigningMethodRS512.Alg(), func() SigningMethod { - return SigningMethodRS512 - }) -} - -func (m *SigningMethodRSA) Alg() string { - return m.Name -} - -// Implements the Verify method from SigningMethod -// For this signing method, must be an *rsa.PublicKey structure. -func (m *SigningMethodRSA) Verify(signingString, signature string, key interface{}) error { - var err error - - // Decode the signature - var sig []byte - if sig, err = DecodeSegment(signature); err != nil { - return err - } - - var rsaKey *rsa.PublicKey - var ok bool - - if rsaKey, ok = key.(*rsa.PublicKey); !ok { - return ErrInvalidKeyType - } - - // Create hasher - if !m.Hash.Available() { - return ErrHashUnavailable - } - hasher := m.Hash.New() - hasher.Write([]byte(signingString)) - - // Verify the signature - return rsa.VerifyPKCS1v15(rsaKey, m.Hash, hasher.Sum(nil), sig) -} - -// Implements the Sign method from SigningMethod -// For this signing method, must be an *rsa.PrivateKey structure. -func (m *SigningMethodRSA) Sign(signingString string, key interface{}) (string, error) { - var rsaKey *rsa.PrivateKey - var ok bool - - // Validate type of key - if rsaKey, ok = key.(*rsa.PrivateKey); !ok { - return "", ErrInvalidKey - } - - // Create the hasher - if !m.Hash.Available() { - return "", ErrHashUnavailable - } - - hasher := m.Hash.New() - hasher.Write([]byte(signingString)) - - // Sign the string and return the encoded bytes - if sigBytes, err := rsa.SignPKCS1v15(rand.Reader, rsaKey, m.Hash, hasher.Sum(nil)); err == nil { - return EncodeSegment(sigBytes), nil - } else { - return "", err - } -} diff --git a/vendor/github.com/dgrijalva/jwt-go/rsa_pss.go b/vendor/github.com/dgrijalva/jwt-go/rsa_pss.go deleted file mode 100644 index 10ee9db8a4e..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/rsa_pss.go +++ /dev/null @@ -1,126 +0,0 @@ -// +build go1.4 - -package jwt - -import ( - "crypto" - "crypto/rand" - "crypto/rsa" -) - -// Implements the RSAPSS family of signing methods signing methods -type SigningMethodRSAPSS struct { - *SigningMethodRSA - Options *rsa.PSSOptions -} - -// Specific instances for RS/PS and company -var ( - SigningMethodPS256 *SigningMethodRSAPSS - SigningMethodPS384 *SigningMethodRSAPSS - SigningMethodPS512 *SigningMethodRSAPSS -) - -func init() { - // PS256 - SigningMethodPS256 = &SigningMethodRSAPSS{ - &SigningMethodRSA{ - Name: "PS256", - Hash: crypto.SHA256, - }, - &rsa.PSSOptions{ - SaltLength: rsa.PSSSaltLengthAuto, - Hash: crypto.SHA256, - }, - } - RegisterSigningMethod(SigningMethodPS256.Alg(), func() SigningMethod { - return SigningMethodPS256 - }) - - // PS384 - SigningMethodPS384 = &SigningMethodRSAPSS{ - &SigningMethodRSA{ - Name: "PS384", - Hash: crypto.SHA384, - }, - &rsa.PSSOptions{ - SaltLength: rsa.PSSSaltLengthAuto, - Hash: crypto.SHA384, - }, - } - RegisterSigningMethod(SigningMethodPS384.Alg(), func() SigningMethod { - return SigningMethodPS384 - }) - - // PS512 - SigningMethodPS512 = &SigningMethodRSAPSS{ - &SigningMethodRSA{ - Name: "PS512", - Hash: crypto.SHA512, - }, - &rsa.PSSOptions{ - SaltLength: rsa.PSSSaltLengthAuto, - Hash: crypto.SHA512, - }, - } - RegisterSigningMethod(SigningMethodPS512.Alg(), func() SigningMethod { - return SigningMethodPS512 - }) -} - -// Implements the Verify method from SigningMethod -// For this verify method, key must be an rsa.PublicKey struct -func (m *SigningMethodRSAPSS) Verify(signingString, signature string, key interface{}) error { - var err error - - // Decode the signature - var sig []byte - if sig, err = DecodeSegment(signature); err != nil { - return err - } - - var rsaKey *rsa.PublicKey - switch k := key.(type) { - case *rsa.PublicKey: - rsaKey = k - default: - return ErrInvalidKey - } - - // Create hasher - if !m.Hash.Available() { - return ErrHashUnavailable - } - hasher := m.Hash.New() - hasher.Write([]byte(signingString)) - - return rsa.VerifyPSS(rsaKey, m.Hash, hasher.Sum(nil), sig, m.Options) -} - -// Implements the Sign method from SigningMethod -// For this signing method, key must be an rsa.PrivateKey struct -func (m *SigningMethodRSAPSS) Sign(signingString string, key interface{}) (string, error) { - var rsaKey *rsa.PrivateKey - - switch k := key.(type) { - case *rsa.PrivateKey: - rsaKey = k - default: - return "", ErrInvalidKeyType - } - - // Create the hasher - if !m.Hash.Available() { - return "", ErrHashUnavailable - } - - hasher := m.Hash.New() - hasher.Write([]byte(signingString)) - - // Sign the string and return the encoded bytes - if sigBytes, err := rsa.SignPSS(rand.Reader, rsaKey, m.Hash, hasher.Sum(nil), m.Options); err == nil { - return EncodeSegment(sigBytes), nil - } else { - return "", err - } -} diff --git a/vendor/github.com/dgrijalva/jwt-go/rsa_utils.go b/vendor/github.com/dgrijalva/jwt-go/rsa_utils.go deleted file mode 100644 index a5ababf956c..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/rsa_utils.go +++ /dev/null @@ -1,101 +0,0 @@ -package jwt - -import ( - "crypto/rsa" - "crypto/x509" - "encoding/pem" - "errors" -) - -var ( - ErrKeyMustBePEMEncoded = errors.New("Invalid Key: Key must be PEM encoded PKCS1 or PKCS8 private key") - ErrNotRSAPrivateKey = errors.New("Key is not a valid RSA private key") - ErrNotRSAPublicKey = errors.New("Key is not a valid RSA public key") -) - -// Parse PEM encoded PKCS1 or PKCS8 private key -func ParseRSAPrivateKeyFromPEM(key []byte) (*rsa.PrivateKey, error) { - var err error - - // Parse PEM block - var block *pem.Block - if block, _ = pem.Decode(key); block == nil { - return nil, ErrKeyMustBePEMEncoded - } - - var parsedKey interface{} - if parsedKey, err = x509.ParsePKCS1PrivateKey(block.Bytes); err != nil { - if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { - return nil, err - } - } - - var pkey *rsa.PrivateKey - var ok bool - if pkey, ok = parsedKey.(*rsa.PrivateKey); !ok { - return nil, ErrNotRSAPrivateKey - } - - return pkey, nil -} - -// Parse PEM encoded PKCS1 or PKCS8 private key protected with password -func ParseRSAPrivateKeyFromPEMWithPassword(key []byte, password string) (*rsa.PrivateKey, error) { - var err error - - // Parse PEM block - var block *pem.Block - if block, _ = pem.Decode(key); block == nil { - return nil, ErrKeyMustBePEMEncoded - } - - var parsedKey interface{} - - var blockDecrypted []byte - if blockDecrypted, err = x509.DecryptPEMBlock(block, []byte(password)); err != nil { - return nil, err - } - - if parsedKey, err = x509.ParsePKCS1PrivateKey(blockDecrypted); err != nil { - if parsedKey, err = x509.ParsePKCS8PrivateKey(blockDecrypted); err != nil { - return nil, err - } - } - - var pkey *rsa.PrivateKey - var ok bool - if pkey, ok = parsedKey.(*rsa.PrivateKey); !ok { - return nil, ErrNotRSAPrivateKey - } - - return pkey, nil -} - -// Parse PEM encoded PKCS1 or PKCS8 public key -func ParseRSAPublicKeyFromPEM(key []byte) (*rsa.PublicKey, error) { - var err error - - // Parse PEM block - var block *pem.Block - if block, _ = pem.Decode(key); block == nil { - return nil, ErrKeyMustBePEMEncoded - } - - // Parse the key - var parsedKey interface{} - if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { - if cert, err := x509.ParseCertificate(block.Bytes); err == nil { - parsedKey = cert.PublicKey - } else { - return nil, err - } - } - - var pkey *rsa.PublicKey - var ok bool - if pkey, ok = parsedKey.(*rsa.PublicKey); !ok { - return nil, ErrNotRSAPublicKey - } - - return pkey, nil -} diff --git a/vendor/github.com/dgrijalva/jwt-go/signing_method.go b/vendor/github.com/dgrijalva/jwt-go/signing_method.go deleted file mode 100644 index ed1f212b21e..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/signing_method.go +++ /dev/null @@ -1,35 +0,0 @@ -package jwt - -import ( - "sync" -) - -var signingMethods = map[string]func() SigningMethod{} -var signingMethodLock = new(sync.RWMutex) - -// Implement SigningMethod to add new methods for signing or verifying tokens. -type SigningMethod interface { - Verify(signingString, signature string, key interface{}) error // Returns nil if signature is valid - Sign(signingString string, key interface{}) (string, error) // Returns encoded signature or error - Alg() string // returns the alg identifier for this method (example: 'HS256') -} - -// Register the "alg" name and a factory function for signing method. -// This is typically done during init() in the method's implementation -func RegisterSigningMethod(alg string, f func() SigningMethod) { - signingMethodLock.Lock() - defer signingMethodLock.Unlock() - - signingMethods[alg] = f -} - -// Get a signing method from an "alg" string -func GetSigningMethod(alg string) (method SigningMethod) { - signingMethodLock.RLock() - defer signingMethodLock.RUnlock() - - if methodF, ok := signingMethods[alg]; ok { - method = methodF() - } - return -} diff --git a/vendor/github.com/dgrijalva/jwt-go/token.go b/vendor/github.com/dgrijalva/jwt-go/token.go deleted file mode 100644 index d637e0867c6..00000000000 --- a/vendor/github.com/dgrijalva/jwt-go/token.go +++ /dev/null @@ -1,108 +0,0 @@ -package jwt - -import ( - "encoding/base64" - "encoding/json" - "strings" - "time" -) - -// TimeFunc provides the current time when parsing token to validate "exp" claim (expiration time). -// You can override it to use another time value. This is useful for testing or if your -// server uses a different time zone than your tokens. -var TimeFunc = time.Now - -// Parse methods use this callback function to supply -// the key for verification. The function receives the parsed, -// but unverified Token. This allows you to use properties in the -// Header of the token (such as `kid`) to identify which key to use. -type Keyfunc func(*Token) (interface{}, error) - -// A JWT Token. Different fields will be used depending on whether you're -// creating or parsing/verifying a token. -type Token struct { - Raw string // The raw token. Populated when you Parse a token - Method SigningMethod // The signing method used or to be used - Header map[string]interface{} // The first segment of the token - Claims Claims // The second segment of the token - Signature string // The third segment of the token. Populated when you Parse a token - Valid bool // Is the token valid? Populated when you Parse/Verify a token -} - -// Create a new Token. Takes a signing method -func New(method SigningMethod) *Token { - return NewWithClaims(method, MapClaims{}) -} - -func NewWithClaims(method SigningMethod, claims Claims) *Token { - return &Token{ - Header: map[string]interface{}{ - "typ": "JWT", - "alg": method.Alg(), - }, - Claims: claims, - Method: method, - } -} - -// Get the complete, signed token -func (t *Token) SignedString(key interface{}) (string, error) { - var sig, sstr string - var err error - if sstr, err = t.SigningString(); err != nil { - return "", err - } - if sig, err = t.Method.Sign(sstr, key); err != nil { - return "", err - } - return strings.Join([]string{sstr, sig}, "."), nil -} - -// Generate the signing string. This is the -// most expensive part of the whole deal. Unless you -// need this for something special, just go straight for -// the SignedString. -func (t *Token) SigningString() (string, error) { - var err error - parts := make([]string, 2) - for i, _ := range parts { - var jsonValue []byte - if i == 0 { - if jsonValue, err = json.Marshal(t.Header); err != nil { - return "", err - } - } else { - if jsonValue, err = json.Marshal(t.Claims); err != nil { - return "", err - } - } - - parts[i] = EncodeSegment(jsonValue) - } - return strings.Join(parts, "."), nil -} - -// Parse, validate, and return a token. -// keyFunc will receive the parsed token and should return the key for validating. -// If everything is kosher, err will be nil -func Parse(tokenString string, keyFunc Keyfunc) (*Token, error) { - return new(Parser).Parse(tokenString, keyFunc) -} - -func ParseWithClaims(tokenString string, claims Claims, keyFunc Keyfunc) (*Token, error) { - return new(Parser).ParseWithClaims(tokenString, claims, keyFunc) -} - -// Encode JWT specific base64url encoding with padding stripped -func EncodeSegment(seg []byte) string { - return strings.TrimRight(base64.URLEncoding.EncodeToString(seg), "=") -} - -// Decode JWT specific base64url encoding with padding stripped -func DecodeSegment(seg string) ([]byte, error) { - if l := len(seg) % 4; l > 0 { - seg += strings.Repeat("=", 4-l) - } - - return base64.URLEncoding.DecodeString(seg) -} diff --git a/vendor/github.com/dgryski/go-rendezvous/LICENSE b/vendor/github.com/dgryski/go-rendezvous/LICENSE deleted file mode 100644 index 22080f736a4..00000000000 --- a/vendor/github.com/dgryski/go-rendezvous/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2017-2020 Damian Gryski - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/dgryski/go-rendezvous/rdv.go b/vendor/github.com/dgryski/go-rendezvous/rdv.go deleted file mode 100644 index 7a6f8203c67..00000000000 --- a/vendor/github.com/dgryski/go-rendezvous/rdv.go +++ /dev/null @@ -1,79 +0,0 @@ -package rendezvous - -type Rendezvous struct { - nodes map[string]int - nstr []string - nhash []uint64 - hash Hasher -} - -type Hasher func(s string) uint64 - -func New(nodes []string, hash Hasher) *Rendezvous { - r := &Rendezvous{ - nodes: make(map[string]int, len(nodes)), - nstr: make([]string, len(nodes)), - nhash: make([]uint64, len(nodes)), - hash: hash, - } - - for i, n := range nodes { - r.nodes[n] = i - r.nstr[i] = n - r.nhash[i] = hash(n) - } - - return r -} - -func (r *Rendezvous) Lookup(k string) string { - // short-circuit if we're empty - if len(r.nodes) == 0 { - return "" - } - - khash := r.hash(k) - - var midx int - var mhash = xorshiftMult64(khash ^ r.nhash[0]) - - for i, nhash := range r.nhash[1:] { - if h := xorshiftMult64(khash ^ nhash); h > mhash { - midx = i + 1 - mhash = h - } - } - - return r.nstr[midx] -} - -func (r *Rendezvous) Add(node string) { - r.nodes[node] = len(r.nstr) - r.nstr = append(r.nstr, node) - r.nhash = append(r.nhash, r.hash(node)) -} - -func (r *Rendezvous) Remove(node string) { - // find index of node to remove - nidx := r.nodes[node] - - // remove from the slices - l := len(r.nstr) - r.nstr[nidx] = r.nstr[l] - r.nstr = r.nstr[:l] - - r.nhash[nidx] = r.nhash[l] - r.nhash = r.nhash[:l] - - // update the map - delete(r.nodes, node) - moved := r.nstr[nidx] - r.nodes[moved] = nidx -} - -func xorshiftMult64(x uint64) uint64 { - x ^= x >> 12 // a - x ^= x << 25 // b - x ^= x >> 27 // c - return x * 2685821657736338717 -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/.gitignore b/vendor/github.com/eclipse/paho.mqtt.golang/.gitignore deleted file mode 100644 index 47bb0de48e9..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/.gitignore +++ /dev/null @@ -1,36 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe - -*.msg -*.lok - -samples/trivial -samples/trivial2 -samples/sample -samples/reconnect -samples/ssl -samples/custom_store -samples/simple -samples/stdinpub -samples/stdoutsub -samples/routing \ No newline at end of file diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/CONTRIBUTING.md b/vendor/github.com/eclipse/paho.mqtt.golang/CONTRIBUTING.md deleted file mode 100644 index 9791dc60318..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/CONTRIBUTING.md +++ /dev/null @@ -1,56 +0,0 @@ -Contributing to Paho -==================== - -Thanks for your interest in this project. - -Project description: --------------------- - -The Paho project has been created to provide scalable open-source implementations of open and standard messaging protocols aimed at new, existing, and emerging applications for Machine-to-Machine (M2M) and Internet of Things (IoT). -Paho reflects the inherent physical and cost constraints of device connectivity. Its objectives include effective levels of decoupling between devices and applications, designed to keep markets open and encourage the rapid growth of scalable Web and Enterprise middleware and applications. Paho is being kicked off with MQTT publish/subscribe client implementations for use on embedded platforms, along with corresponding server support as determined by the community. - -- https://projects.eclipse.org/projects/technology.paho - -Developer resources: --------------------- - -Information regarding source code management, builds, coding standards, and more. - -- https://projects.eclipse.org/projects/technology.paho/developer - -Contributor License Agreement: ------------------------------- - -Before your contribution can be accepted by the project, you need to create and electronically sign the Eclipse Foundation Contributor License Agreement (CLA). - -- http://www.eclipse.org/legal/CLA.php - -Contributing Code: ------------------- - -The Go client is developed in Github, see their documentation on the process of forking and pull requests; https://help.github.com/categories/collaborating-on-projects-using-pull-requests/ - -Git commit messages should follow the style described here; - -http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html - -Contact: --------- - -Contact the project developers via the project's "dev" list. - -- https://dev.eclipse.org/mailman/listinfo/paho-dev - -Search for bugs: ----------------- - -This project uses Github issues to track ongoing development and issues. - -- https://github.com/eclipse/paho.mqtt.golang/issues - -Create a new bug: ------------------ - -Be sure to search for existing bugs before you create another one. Remember that contributions are always welcome! - -- https://github.com/eclipse/paho.mqtt.golang/issues diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/DISTRIBUTION b/vendor/github.com/eclipse/paho.mqtt.golang/DISTRIBUTION deleted file mode 100644 index 34e49731daa..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/DISTRIBUTION +++ /dev/null @@ -1,15 +0,0 @@ - - -Eclipse Distribution License - v 1.0 - -Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. - -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE b/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE deleted file mode 100644 index aa7cc810fa1..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE +++ /dev/null @@ -1,87 +0,0 @@ -Eclipse Public License - v 1.0 - -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and - -b) in the case of each subsequent Contributor: - -i) changes to the Program, and - -ii) additions to the Program; - -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. - -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and - -b) its license agreement: - -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and - -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and - -b) a copy of this Agreement must be included with each copy of the Program. - -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/README.md b/vendor/github.com/eclipse/paho.mqtt.golang/README.md deleted file mode 100644 index 81c7148e093..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/README.md +++ /dev/null @@ -1,67 +0,0 @@ - -[![GoDoc](https://godoc.org/github.com/eclipse/paho.mqtt.golang?status.svg)](https://godoc.org/github.com/eclipse/paho.mqtt.golang) -[![Go Report Card](https://goreportcard.com/badge/github.com/eclipse/paho.mqtt.golang)](https://goreportcard.com/report/github.com/eclipse/paho.mqtt.golang) - -Eclipse Paho MQTT Go client -=========================== - - -This repository contains the source code for the [Eclipse Paho](http://eclipse.org/paho) MQTT Go client library. - -This code builds a library which enable applications to connect to an [MQTT](http://mqtt.org) broker to publish messages, and to subscribe to topics and receive published messages. - -This library supports a fully asynchronous mode of operation. - - -Installation and Build ----------------------- - -This client is designed to work with the standard Go tools, so installation is as easy as: - -``` -go get github.com/eclipse/paho.mqtt.golang -``` - -The client depends on Google's [websockets](https://godoc.org/golang.org/x/net/websocket) and [proxy](https://godoc.org/golang.org/x/net/proxy) package, -also easily installed with the commands: - -``` -go get golang.org/x/net/websocket -go get golang.org/x/net/proxy -``` - - -Usage and API -------------- - -Detailed API documentation is available by using to godoc tool, or can be browsed online -using the [godoc.org](http://godoc.org/github.com/eclipse/paho.mqtt.golang) service. - -Make use of the library by importing it in your Go client source code. For example, -``` -import "github.com/eclipse/paho.mqtt.golang" -``` - -Samples are available in the `cmd` directory for reference. - - -Runtime tracing ---------------- - -Tracing is enabled by assigning logs (from the Go log package) to the logging endpoints, ERROR, CRITICAL, WARN and DEBUG - - -Reporting bugs --------------- - -Please report bugs by raising issues for this project in github https://github.com/eclipse/paho.mqtt.golang/issues - - -More information ----------------- - -Discussion of the Paho clients takes place on the [Eclipse paho-dev mailing list](https://dev.eclipse.org/mailman/listinfo/paho-dev). - -General questions about the MQTT protocol are discussed in the [MQTT Google Group](https://groups.google.com/forum/?hl=en-US&fromgroups#!forum/mqtt). - -There is much more information available via the [MQTT community site](http://mqtt.org). diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/about.html b/vendor/github.com/eclipse/paho.mqtt.golang/about.html deleted file mode 100644 index b183f417abb..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/about.html +++ /dev/null @@ -1,41 +0,0 @@ - - - -About - - -

About This Content

- -

December 9, 2013

-

License

- -

The Eclipse Foundation makes available all content in this plug-in ("Content"). Unless otherwise -indicated below, the Content is provided to you under the terms and conditions of the -Eclipse Public License Version 1.0 ("EPL") and Eclipse Distribution License Version 1.0 ("EDL"). -A copy of the EPL is available at -http://www.eclipse.org/legal/epl-v10.html -and a copy of the EDL is available at -http://www.eclipse.org/org/documents/edl-v10.php. -For purposes of the EPL, "Program" will mean the Content.

- -

If you did not receive this Content directly from the Eclipse Foundation, the Content is -being redistributed by another party ("Redistributor") and different terms and conditions may -apply to your use of any object code in the Content. Check the Redistributor's license that was -provided with the Content. If no such license exists, contact the Redistributor. Unless otherwise -indicated below, the terms and conditions of the EPL still apply to any source code in the Content -and such source code may be obtained at http://www.eclipse.org.

- - -

Third Party Content

-

The Content includes items that have been sourced from third parties as set out below. If you - did not receive this Content directly from the Eclipse Foundation, the following is provided - for informational purposes only, and you should look to the Redistributor's license for - terms and conditions of use.

-

- None

-

-

- - - - diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/client.go b/vendor/github.com/eclipse/paho.mqtt.golang/client.go deleted file mode 100644 index 24d56c1f38b..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/client.go +++ /dev/null @@ -1,759 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -// Portions copyright © 2018 TIBCO Software Inc. - -// Package mqtt provides an MQTT v3.1.1 client library. -package mqtt - -import ( - "errors" - "fmt" - "net" - "strings" - "sync" - "sync/atomic" - "time" - - "github.com/eclipse/paho.mqtt.golang/packets" -) - -const ( - disconnected uint32 = iota - connecting - reconnecting - connected -) - -// Client is the interface definition for a Client as used by this -// library, the interface is primarily to allow mocking tests. -// -// It is an MQTT v3.1.1 client for communicating -// with an MQTT server using non-blocking methods that allow work -// to be done in the background. -// An application may connect to an MQTT server using: -// A plain TCP socket -// A secure SSL/TLS socket -// A websocket -// To enable ensured message delivery at Quality of Service (QoS) levels -// described in the MQTT spec, a message persistence mechanism must be -// used. This is done by providing a type which implements the Store -// interface. For convenience, FileStore and MemoryStore are provided -// implementations that should be sufficient for most use cases. More -// information can be found in their respective documentation. -// Numerous connection options may be specified by configuring a -// and then supplying a ClientOptions type. -type Client interface { - // IsConnected returns a bool signifying whether - // the client is connected or not. - IsConnected() bool - // IsConnectionOpen return a bool signifying wether the client has an active - // connection to mqtt broker, i.e not in disconnected or reconnect mode - IsConnectionOpen() bool - // Connect will create a connection to the message broker, by default - // it will attempt to connect at v3.1.1 and auto retry at v3.1 if that - // fails - Connect() Token - // Disconnect will end the connection with the server, but not before waiting - // the specified number of milliseconds to wait for existing work to be - // completed. - Disconnect(quiesce uint) - // Publish will publish a message with the specified QoS and content - // to the specified topic. - // Returns a token to track delivery of the message to the broker - Publish(topic string, qos byte, retained bool, payload interface{}) Token - // Subscribe starts a new subscription. Provide a MessageHandler to be executed when - // a message is published on the topic provided, or nil for the default handler - Subscribe(topic string, qos byte, callback MessageHandler) Token - // SubscribeMultiple starts a new subscription for multiple topics. Provide a MessageHandler to - // be executed when a message is published on one of the topics provided, or nil for the - // default handler - SubscribeMultiple(filters map[string]byte, callback MessageHandler) Token - // Unsubscribe will end the subscription from each of the topics provided. - // Messages published to those topics from other clients will no longer be - // received. - Unsubscribe(topics ...string) Token - // AddRoute allows you to add a handler for messages on a specific topic - // without making a subscription. For example having a different handler - // for parts of a wildcard subscription - AddRoute(topic string, callback MessageHandler) - // OptionsReader returns a ClientOptionsReader which is a copy of the clientoptions - // in use by the client. - OptionsReader() ClientOptionsReader -} - -// client implements the Client interface -type client struct { - lastSent atomic.Value - lastReceived atomic.Value - pingOutstanding int32 - status uint32 - sync.RWMutex - messageIds - conn net.Conn - ibound chan packets.ControlPacket - obound chan *PacketAndToken - oboundP chan *PacketAndToken - msgRouter *router - stopRouter chan bool - incomingPubChan chan *packets.PublishPacket - errors chan error - stop chan struct{} - persist Store - options ClientOptions - workers sync.WaitGroup -} - -// NewClient will create an MQTT v3.1.1 client with all of the options specified -// in the provided ClientOptions. The client must have the Connect method called -// on it before it may be used. This is to make sure resources (such as a net -// connection) are created before the application is actually ready. -func NewClient(o *ClientOptions) Client { - c := &client{} - c.options = *o - - if c.options.Store == nil { - c.options.Store = NewMemoryStore() - } - switch c.options.ProtocolVersion { - case 3, 4: - c.options.protocolVersionExplicit = true - case 0x83, 0x84: - c.options.protocolVersionExplicit = true - default: - c.options.ProtocolVersion = 4 - c.options.protocolVersionExplicit = false - } - c.persist = c.options.Store - c.status = disconnected - c.messageIds = messageIds{index: make(map[uint16]tokenCompletor)} - c.msgRouter, c.stopRouter = newRouter() - c.msgRouter.setDefaultHandler(c.options.DefaultPublishHandler) - if !c.options.AutoReconnect { - c.options.MessageChannelDepth = 0 - } - return c -} - -// AddRoute allows you to add a handler for messages on a specific topic -// without making a subscription. For example having a different handler -// for parts of a wildcard subscription -func (c *client) AddRoute(topic string, callback MessageHandler) { - if callback != nil { - c.msgRouter.addRoute(topic, callback) - } -} - -// IsConnected returns a bool signifying whether -// the client is connected or not. -func (c *client) IsConnected() bool { - c.RLock() - defer c.RUnlock() - status := atomic.LoadUint32(&c.status) - switch { - case status == connected: - return true - case c.options.AutoReconnect && status > connecting: - return true - default: - return false - } -} - -// IsConnectionOpen return a bool signifying whether the client has an active -// connection to mqtt broker, i.e not in disconnected or reconnect mode -func (c *client) IsConnectionOpen() bool { - c.RLock() - defer c.RUnlock() - status := atomic.LoadUint32(&c.status) - switch { - case status == connected: - return true - default: - return false - } -} - -func (c *client) connectionStatus() uint32 { - c.RLock() - defer c.RUnlock() - status := atomic.LoadUint32(&c.status) - return status -} - -func (c *client) setConnected(status uint32) { - c.Lock() - defer c.Unlock() - atomic.StoreUint32(&c.status, uint32(status)) -} - -//ErrNotConnected is the error returned from function calls that are -//made when the client is not connected to a broker -var ErrNotConnected = errors.New("Not Connected") - -// Connect will create a connection to the message broker, by default -// it will attempt to connect at v3.1.1 and auto retry at v3.1 if that -// fails -func (c *client) Connect() Token { - var err error - t := newToken(packets.Connect).(*ConnectToken) - DEBUG.Println(CLI, "Connect()") - - c.obound = make(chan *PacketAndToken, c.options.MessageChannelDepth) - c.oboundP = make(chan *PacketAndToken, c.options.MessageChannelDepth) - c.ibound = make(chan packets.ControlPacket) - - go func() { - c.persist.Open() - - c.setConnected(connecting) - c.errors = make(chan error, 1) - c.stop = make(chan struct{}) - - var rc byte - protocolVersion := c.options.ProtocolVersion - - if len(c.options.Servers) == 0 { - t.setError(fmt.Errorf("No servers defined to connect to")) - return - } - - for _, broker := range c.options.Servers { - cm := newConnectMsgFromOptions(&c.options, broker) - c.options.ProtocolVersion = protocolVersion - CONN: - DEBUG.Println(CLI, "about to write new connect msg") - c.conn, err = openConnection(broker, c.options.TLSConfig, c.options.ConnectTimeout, c.options.HTTPHeaders) - if err == nil { - DEBUG.Println(CLI, "socket connected to broker") - switch c.options.ProtocolVersion { - case 3: - DEBUG.Println(CLI, "Using MQTT 3.1 protocol") - cm.ProtocolName = "MQIsdp" - cm.ProtocolVersion = 3 - case 0x83: - DEBUG.Println(CLI, "Using MQTT 3.1b protocol") - cm.ProtocolName = "MQIsdp" - cm.ProtocolVersion = 0x83 - case 0x84: - DEBUG.Println(CLI, "Using MQTT 3.1.1b protocol") - cm.ProtocolName = "MQTT" - cm.ProtocolVersion = 0x84 - default: - DEBUG.Println(CLI, "Using MQTT 3.1.1 protocol") - c.options.ProtocolVersion = 4 - cm.ProtocolName = "MQTT" - cm.ProtocolVersion = 4 - } - cm.Write(c.conn) - - rc, t.sessionPresent = c.connect() - if rc != packets.Accepted { - if c.conn != nil { - c.conn.Close() - c.conn = nil - } - //if the protocol version was explicitly set don't do any fallback - if c.options.protocolVersionExplicit { - ERROR.Println(CLI, "Connecting to", broker, "CONNACK was not CONN_ACCEPTED, but rather", packets.ConnackReturnCodes[rc]) - continue - } - if c.options.ProtocolVersion == 4 { - DEBUG.Println(CLI, "Trying reconnect using MQTT 3.1 protocol") - c.options.ProtocolVersion = 3 - goto CONN - } - } - break - } else { - ERROR.Println(CLI, err.Error()) - WARN.Println(CLI, "failed to connect to broker, trying next") - rc = packets.ErrNetworkError - } - } - - if c.conn == nil { - ERROR.Println(CLI, "Failed to connect to a broker") - c.setConnected(disconnected) - c.persist.Close() - t.returnCode = rc - if rc != packets.ErrNetworkError { - t.setError(packets.ConnErrors[rc]) - } else { - t.setError(fmt.Errorf("%s : %s", packets.ConnErrors[rc], err)) - } - return - } - - c.options.protocolVersionExplicit = true - - if c.options.KeepAlive != 0 { - atomic.StoreInt32(&c.pingOutstanding, 0) - c.lastReceived.Store(time.Now()) - c.lastSent.Store(time.Now()) - c.workers.Add(1) - go keepalive(c) - } - - c.incomingPubChan = make(chan *packets.PublishPacket, c.options.MessageChannelDepth) - c.msgRouter.matchAndDispatch(c.incomingPubChan, c.options.Order, c) - - c.setConnected(connected) - DEBUG.Println(CLI, "client is connected") - if c.options.OnConnect != nil { - go c.options.OnConnect(c) - } - - c.workers.Add(4) - go errorWatch(c) - go alllogic(c) - go outgoing(c) - go incoming(c) - - // Take care of any messages in the store - if c.options.CleanSession == false { - c.resume(c.options.ResumeSubs) - } else { - c.persist.Reset() - } - - DEBUG.Println(CLI, "exit startClient") - t.flowComplete() - }() - return t -} - -// internal function used to reconnect the client when it loses its connection -func (c *client) reconnect() { - DEBUG.Println(CLI, "enter reconnect") - var ( - err error - - rc = byte(1) - sleep = time.Duration(1 * time.Second) - ) - - for rc != 0 && atomic.LoadUint32(&c.status) != disconnected { - for _, broker := range c.options.Servers { - cm := newConnectMsgFromOptions(&c.options, broker) - DEBUG.Println(CLI, "about to write new connect msg") - c.Lock() - c.conn, err = openConnection(broker, c.options.TLSConfig, c.options.ConnectTimeout, c.options.HTTPHeaders) - c.Unlock() - if err == nil { - DEBUG.Println(CLI, "socket connected to broker") - switch c.options.ProtocolVersion { - case 0x83: - DEBUG.Println(CLI, "Using MQTT 3.1b protocol") - cm.ProtocolName = "MQIsdp" - cm.ProtocolVersion = 0x83 - case 0x84: - DEBUG.Println(CLI, "Using MQTT 3.1.1b protocol") - cm.ProtocolName = "MQTT" - cm.ProtocolVersion = 0x84 - case 3: - DEBUG.Println(CLI, "Using MQTT 3.1 protocol") - cm.ProtocolName = "MQIsdp" - cm.ProtocolVersion = 3 - default: - DEBUG.Println(CLI, "Using MQTT 3.1.1 protocol") - cm.ProtocolName = "MQTT" - cm.ProtocolVersion = 4 - } - cm.Write(c.conn) - - rc, _ = c.connect() - if rc != packets.Accepted { - c.conn.Close() - c.conn = nil - //if the protocol version was explicitly set don't do any fallback - if c.options.protocolVersionExplicit { - ERROR.Println(CLI, "Connecting to", broker, "CONNACK was not Accepted, but rather", packets.ConnackReturnCodes[rc]) - continue - } - } - break - } else { - ERROR.Println(CLI, err.Error()) - WARN.Println(CLI, "failed to connect to broker, trying next") - rc = packets.ErrNetworkError - } - } - if rc != 0 { - DEBUG.Println(CLI, "Reconnect failed, sleeping for", int(sleep.Seconds()), "seconds") - time.Sleep(sleep) - if sleep < c.options.MaxReconnectInterval { - sleep *= 2 - } - - if sleep > c.options.MaxReconnectInterval { - sleep = c.options.MaxReconnectInterval - } - } - } - // Disconnect() must have been called while we were trying to reconnect. - if c.connectionStatus() == disconnected { - DEBUG.Println(CLI, "Client moved to disconnected state while reconnecting, abandoning reconnect") - return - } - - c.stop = make(chan struct{}) - - if c.options.KeepAlive != 0 { - atomic.StoreInt32(&c.pingOutstanding, 0) - c.lastReceived.Store(time.Now()) - c.lastSent.Store(time.Now()) - c.workers.Add(1) - go keepalive(c) - } - - c.setConnected(connected) - DEBUG.Println(CLI, "client is reconnected") - if c.options.OnConnect != nil { - go c.options.OnConnect(c) - } - - c.workers.Add(4) - go errorWatch(c) - go alllogic(c) - go outgoing(c) - go incoming(c) - - c.resume(false) -} - -// This function is only used for receiving a connack -// when the connection is first started. -// This prevents receiving incoming data while resume -// is in progress if clean session is false. -func (c *client) connect() (byte, bool) { - DEBUG.Println(NET, "connect started") - - ca, err := packets.ReadPacket(c.conn) - if err != nil { - ERROR.Println(NET, "connect got error", err) - return packets.ErrNetworkError, false - } - if ca == nil { - ERROR.Println(NET, "received nil packet") - return packets.ErrNetworkError, false - } - - msg, ok := ca.(*packets.ConnackPacket) - if !ok { - ERROR.Println(NET, "received msg that was not CONNACK") - return packets.ErrNetworkError, false - } - - DEBUG.Println(NET, "received connack") - return msg.ReturnCode, msg.SessionPresent -} - -// Disconnect will end the connection with the server, but not before waiting -// the specified number of milliseconds to wait for existing work to be -// completed. -func (c *client) Disconnect(quiesce uint) { - status := atomic.LoadUint32(&c.status) - if status == connected { - DEBUG.Println(CLI, "disconnecting") - c.setConnected(disconnected) - - dm := packets.NewControlPacket(packets.Disconnect).(*packets.DisconnectPacket) - dt := newToken(packets.Disconnect) - c.oboundP <- &PacketAndToken{p: dm, t: dt} - - // wait for work to finish, or quiesce time consumed - dt.WaitTimeout(time.Duration(quiesce) * time.Millisecond) - } else { - WARN.Println(CLI, "Disconnect() called but not connected (disconnected/reconnecting)") - c.setConnected(disconnected) - } - - c.disconnect() -} - -// ForceDisconnect will end the connection with the mqtt broker immediately. -func (c *client) forceDisconnect() { - if !c.IsConnected() { - WARN.Println(CLI, "already disconnected") - return - } - c.setConnected(disconnected) - c.conn.Close() - DEBUG.Println(CLI, "forcefully disconnecting") - c.disconnect() -} - -func (c *client) internalConnLost(err error) { - // Only do anything if this was called and we are still "connected" - // forceDisconnect can cause incoming/outgoing/alllogic to end with - // error from closing the socket but state will be "disconnected" - if c.IsConnected() { - c.closeStop() - c.conn.Close() - c.workers.Wait() - if c.options.CleanSession && !c.options.AutoReconnect { - c.messageIds.cleanUp() - } - if c.options.AutoReconnect { - c.setConnected(reconnecting) - go c.reconnect() - } else { - c.setConnected(disconnected) - } - if c.options.OnConnectionLost != nil { - go c.options.OnConnectionLost(c, err) - } - } -} - -func (c *client) closeStop() { - c.Lock() - defer c.Unlock() - select { - case <-c.stop: - DEBUG.Println("In disconnect and stop channel is already closed") - default: - if c.stop != nil { - close(c.stop) - } - } -} - -func (c *client) closeStopRouter() { - c.Lock() - defer c.Unlock() - select { - case <-c.stopRouter: - DEBUG.Println("In disconnect and stop channel is already closed") - default: - if c.stopRouter != nil { - close(c.stopRouter) - } - } -} - -func (c *client) closeConn() { - c.Lock() - defer c.Unlock() - if c.conn != nil { - c.conn.Close() - } -} - -func (c *client) disconnect() { - c.closeStop() - c.closeConn() - c.workers.Wait() - c.messageIds.cleanUp() - c.closeStopRouter() - DEBUG.Println(CLI, "disconnected") - c.persist.Close() -} - -// Publish will publish a message with the specified QoS and content -// to the specified topic. -// Returns a token to track delivery of the message to the broker -func (c *client) Publish(topic string, qos byte, retained bool, payload interface{}) Token { - token := newToken(packets.Publish).(*PublishToken) - DEBUG.Println(CLI, "enter Publish") - switch { - case !c.IsConnected(): - token.setError(ErrNotConnected) - return token - case c.connectionStatus() == reconnecting && qos == 0: - token.flowComplete() - return token - } - pub := packets.NewControlPacket(packets.Publish).(*packets.PublishPacket) - pub.Qos = qos - pub.TopicName = topic - pub.Retain = retained - switch payload.(type) { - case string: - pub.Payload = []byte(payload.(string)) - case []byte: - pub.Payload = payload.([]byte) - default: - token.setError(fmt.Errorf("Unknown payload type")) - return token - } - - if pub.Qos != 0 && pub.MessageID == 0 { - pub.MessageID = c.getID(token) - token.messageID = pub.MessageID - } - persistOutbound(c.persist, pub) - if c.connectionStatus() == reconnecting { - DEBUG.Println(CLI, "storing publish message (reconnecting), topic:", topic) - } else { - DEBUG.Println(CLI, "sending publish message, topic:", topic) - c.obound <- &PacketAndToken{p: pub, t: token} - } - return token -} - -// Subscribe starts a new subscription. Provide a MessageHandler to be executed when -// a message is published on the topic provided. -func (c *client) Subscribe(topic string, qos byte, callback MessageHandler) Token { - token := newToken(packets.Subscribe).(*SubscribeToken) - DEBUG.Println(CLI, "enter Subscribe") - if !c.IsConnected() { - token.setError(ErrNotConnected) - return token - } - sub := packets.NewControlPacket(packets.Subscribe).(*packets.SubscribePacket) - if err := validateTopicAndQos(topic, qos); err != nil { - token.setError(err) - return token - } - sub.Topics = append(sub.Topics, topic) - sub.Qoss = append(sub.Qoss, qos) - DEBUG.Println(CLI, sub.String()) - - if strings.HasPrefix(topic, "$share") { - topic = strings.Join(strings.Split(topic, "/")[2:], "/") - } - - if callback != nil { - c.msgRouter.addRoute(topic, callback) - } - - token.subs = append(token.subs, topic) - c.oboundP <- &PacketAndToken{p: sub, t: token} - DEBUG.Println(CLI, "exit Subscribe") - return token -} - -// SubscribeMultiple starts a new subscription for multiple topics. Provide a MessageHandler to -// be executed when a message is published on one of the topics provided. -func (c *client) SubscribeMultiple(filters map[string]byte, callback MessageHandler) Token { - var err error - token := newToken(packets.Subscribe).(*SubscribeToken) - DEBUG.Println(CLI, "enter SubscribeMultiple") - if !c.IsConnected() { - token.setError(ErrNotConnected) - return token - } - sub := packets.NewControlPacket(packets.Subscribe).(*packets.SubscribePacket) - if sub.Topics, sub.Qoss, err = validateSubscribeMap(filters); err != nil { - token.setError(err) - return token - } - - if callback != nil { - for topic := range filters { - c.msgRouter.addRoute(topic, callback) - } - } - token.subs = make([]string, len(sub.Topics)) - copy(token.subs, sub.Topics) - c.oboundP <- &PacketAndToken{p: sub, t: token} - DEBUG.Println(CLI, "exit SubscribeMultiple") - return token -} - -// Load all stored messages and resend them -// Call this to ensure QOS > 1,2 even after an application crash -func (c *client) resume(subscription bool) { - - storedKeys := c.persist.All() - for _, key := range storedKeys { - packet := c.persist.Get(key) - if packet == nil { - continue - } - details := packet.Details() - if isKeyOutbound(key) { - switch packet.(type) { - case *packets.SubscribePacket: - if subscription { - DEBUG.Println(STR, fmt.Sprintf("loaded pending subscribe (%d)", details.MessageID)) - token := newToken(packets.Subscribe).(*SubscribeToken) - c.oboundP <- &PacketAndToken{p: packet, t: token} - } - case *packets.UnsubscribePacket: - if subscription { - DEBUG.Println(STR, fmt.Sprintf("loaded pending unsubscribe (%d)", details.MessageID)) - token := newToken(packets.Unsubscribe).(*UnsubscribeToken) - c.oboundP <- &PacketAndToken{p: packet, t: token} - } - case *packets.PubrelPacket: - DEBUG.Println(STR, fmt.Sprintf("loaded pending pubrel (%d)", details.MessageID)) - select { - case c.oboundP <- &PacketAndToken{p: packet, t: nil}: - case <-c.stop: - } - case *packets.PublishPacket: - token := newToken(packets.Publish).(*PublishToken) - token.messageID = details.MessageID - c.claimID(token, details.MessageID) - DEBUG.Println(STR, fmt.Sprintf("loaded pending publish (%d)", details.MessageID)) - DEBUG.Println(STR, details) - c.obound <- &PacketAndToken{p: packet, t: token} - default: - ERROR.Println(STR, "invalid message type in store (discarded)") - c.persist.Del(key) - } - } else { - switch packet.(type) { - case *packets.PubrelPacket, *packets.PublishPacket: - DEBUG.Println(STR, fmt.Sprintf("loaded pending incomming (%d)", details.MessageID)) - select { - case c.ibound <- packet: - case <-c.stop: - } - default: - ERROR.Println(STR, "invalid message type in store (discarded)") - c.persist.Del(key) - } - } - } -} - -// Unsubscribe will end the subscription from each of the topics provided. -// Messages published to those topics from other clients will no longer be -// received. -func (c *client) Unsubscribe(topics ...string) Token { - token := newToken(packets.Unsubscribe).(*UnsubscribeToken) - DEBUG.Println(CLI, "enter Unsubscribe") - if !c.IsConnected() { - token.setError(ErrNotConnected) - return token - } - unsub := packets.NewControlPacket(packets.Unsubscribe).(*packets.UnsubscribePacket) - unsub.Topics = make([]string, len(topics)) - copy(unsub.Topics, topics) - - c.oboundP <- &PacketAndToken{p: unsub, t: token} - for _, topic := range topics { - c.msgRouter.deleteRoute(topic) - } - - DEBUG.Println(CLI, "exit Unsubscribe") - return token -} - -// OptionsReader returns a ClientOptionsReader which is a copy of the clientoptions -// in use by the client. -func (c *client) OptionsReader() ClientOptionsReader { - r := ClientOptionsReader{options: &c.options} - return r -} - -//DefaultConnectionLostHandler is a definition of a function that simply -//reports to the DEBUG log the reason for the client losing a connection. -func DefaultConnectionLostHandler(client Client, reason error) { - DEBUG.Println("Connection lost:", reason.Error()) -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/components.go b/vendor/github.com/eclipse/paho.mqtt.golang/components.go deleted file mode 100644 index 01f5fafdf8f..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/components.go +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -type component string - -// Component names for debug output -const ( - NET component = "[net] " - PNG component = "[pinger] " - CLI component = "[client] " - DEC component = "[decode] " - MES component = "[message] " - STR component = "[store] " - MID component = "[msgids] " - TST component = "[test] " - STA component = "[state] " - ERR component = "[error] " -) diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10 b/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10 deleted file mode 100644 index cf989f1456b..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10 +++ /dev/null @@ -1,15 +0,0 @@ - -Eclipse Distribution License - v 1.0 - -Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. - -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/epl-v10 b/vendor/github.com/eclipse/paho.mqtt.golang/epl-v10 deleted file mode 100644 index 79e486c3d2c..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/epl-v10 +++ /dev/null @@ -1,70 +0,0 @@ -Eclipse Public License - v 1.0 - -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and -b) in the case of each subsequent Contributor: -i) changes to the Program, and -ii) additions to the Program; -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and -b) its license agreement: -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and -b) a copy of this Agreement must be included with each copy of the Program. -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/filestore.go b/vendor/github.com/eclipse/paho.mqtt.golang/filestore.go deleted file mode 100644 index c4a0d36b534..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/filestore.go +++ /dev/null @@ -1,255 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "io/ioutil" - "os" - "path" - "sort" - "sync" - - "github.com/eclipse/paho.mqtt.golang/packets" -) - -const ( - msgExt = ".msg" - tmpExt = ".tmp" - corruptExt = ".CORRUPT" -) - -// FileStore implements the store interface using the filesystem to provide -// true persistence, even across client failure. This is designed to use a -// single directory per running client. If you are running multiple clients -// on the same filesystem, you will need to be careful to specify unique -// store directories for each. -type FileStore struct { - sync.RWMutex - directory string - opened bool -} - -// NewFileStore will create a new FileStore which stores its messages in the -// directory provided. -func NewFileStore(directory string) *FileStore { - store := &FileStore{ - directory: directory, - opened: false, - } - return store -} - -// Open will allow the FileStore to be used. -func (store *FileStore) Open() { - store.Lock() - defer store.Unlock() - // if no store directory was specified in ClientOpts, by default use the - // current working directory - if store.directory == "" { - store.directory, _ = os.Getwd() - } - - // if store dir exists, great, otherwise, create it - if !exists(store.directory) { - perms := os.FileMode(0770) - merr := os.MkdirAll(store.directory, perms) - chkerr(merr) - } - store.opened = true - DEBUG.Println(STR, "store is opened at", store.directory) -} - -// Close will disallow the FileStore from being used. -func (store *FileStore) Close() { - store.Lock() - defer store.Unlock() - store.opened = false - DEBUG.Println(STR, "store is closed") -} - -// Put will put a message into the store, associated with the provided -// key value. -func (store *FileStore) Put(key string, m packets.ControlPacket) { - store.Lock() - defer store.Unlock() - if !store.opened { - ERROR.Println(STR, "Trying to use file store, but not open") - return - } - full := fullpath(store.directory, key) - write(store.directory, key, m) - if !exists(full) { - ERROR.Println(STR, "file not created:", full) - } -} - -// Get will retrieve a message from the store, the one associated with -// the provided key value. -func (store *FileStore) Get(key string) packets.ControlPacket { - store.RLock() - defer store.RUnlock() - if !store.opened { - ERROR.Println(STR, "Trying to use file store, but not open") - return nil - } - filepath := fullpath(store.directory, key) - if !exists(filepath) { - return nil - } - mfile, oerr := os.Open(filepath) - chkerr(oerr) - msg, rerr := packets.ReadPacket(mfile) - chkerr(mfile.Close()) - - // Message was unreadable, return nil - if rerr != nil { - newpath := corruptpath(store.directory, key) - WARN.Println(STR, "corrupted file detected:", rerr.Error(), "archived at:", newpath) - os.Rename(filepath, newpath) - return nil - } - return msg -} - -// All will provide a list of all of the keys associated with messages -// currenly residing in the FileStore. -func (store *FileStore) All() []string { - store.RLock() - defer store.RUnlock() - return store.all() -} - -// Del will remove the persisted message associated with the provided -// key from the FileStore. -func (store *FileStore) Del(key string) { - store.Lock() - defer store.Unlock() - store.del(key) -} - -// Reset will remove all persisted messages from the FileStore. -func (store *FileStore) Reset() { - store.Lock() - defer store.Unlock() - WARN.Println(STR, "FileStore Reset") - for _, key := range store.all() { - store.del(key) - } -} - -// lockless -func (store *FileStore) all() []string { - var err error - var keys []string - var files fileInfos - - if !store.opened { - ERROR.Println(STR, "Trying to use file store, but not open") - return nil - } - - files, err = ioutil.ReadDir(store.directory) - chkerr(err) - sort.Sort(files) - for _, f := range files { - DEBUG.Println(STR, "file in All():", f.Name()) - name := f.Name() - if name[len(name)-4:len(name)] != msgExt { - DEBUG.Println(STR, "skipping file, doesn't have right extension: ", name) - continue - } - key := name[0 : len(name)-4] // remove file extension - keys = append(keys, key) - } - return keys -} - -// lockless -func (store *FileStore) del(key string) { - if !store.opened { - ERROR.Println(STR, "Trying to use file store, but not open") - return - } - DEBUG.Println(STR, "store del filepath:", store.directory) - DEBUG.Println(STR, "store delete key:", key) - filepath := fullpath(store.directory, key) - DEBUG.Println(STR, "path of deletion:", filepath) - if !exists(filepath) { - WARN.Println(STR, "store could not delete key:", key) - return - } - rerr := os.Remove(filepath) - chkerr(rerr) - DEBUG.Println(STR, "del msg:", key) - if exists(filepath) { - ERROR.Println(STR, "file not deleted:", filepath) - } -} - -func fullpath(store string, key string) string { - p := path.Join(store, key+msgExt) - return p -} - -func tmppath(store string, key string) string { - p := path.Join(store, key+tmpExt) - return p -} - -func corruptpath(store string, key string) string { - p := path.Join(store, key+corruptExt) - return p -} - -// create file called "X.[messageid].tmp" located in the store -// the contents of the file is the bytes of the message, then -// rename it to "X.[messageid].msg", overwriting any existing -// message with the same id -// X will be 'i' for inbound messages, and O for outbound messages -func write(store, key string, m packets.ControlPacket) { - temppath := tmppath(store, key) - f, err := os.Create(temppath) - chkerr(err) - werr := m.Write(f) - chkerr(werr) - cerr := f.Close() - chkerr(cerr) - rerr := os.Rename(temppath, fullpath(store, key)) - chkerr(rerr) -} - -func exists(file string) bool { - if _, err := os.Stat(file); err != nil { - if os.IsNotExist(err) { - return false - } - chkerr(err) - } - return true -} - -type fileInfos []os.FileInfo - -func (f fileInfos) Len() int { - return len(f) -} - -func (f fileInfos) Swap(i, j int) { - f[i], f[j] = f[j], f[i] -} - -func (f fileInfos) Less(i, j int) bool { - return f[i].ModTime().Before(f[j].ModTime()) -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/memstore.go b/vendor/github.com/eclipse/paho.mqtt.golang/memstore.go deleted file mode 100644 index 499c490bdbb..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/memstore.go +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "sync" - - "github.com/eclipse/paho.mqtt.golang/packets" -) - -// MemoryStore implements the store interface to provide a "persistence" -// mechanism wholly stored in memory. This is only useful for -// as long as the client instance exists. -type MemoryStore struct { - sync.RWMutex - messages map[string]packets.ControlPacket - opened bool -} - -// NewMemoryStore returns a pointer to a new instance of -// MemoryStore, the instance is not initialized and ready to -// use until Open() has been called on it. -func NewMemoryStore() *MemoryStore { - store := &MemoryStore{ - messages: make(map[string]packets.ControlPacket), - opened: false, - } - return store -} - -// Open initializes a MemoryStore instance. -func (store *MemoryStore) Open() { - store.Lock() - defer store.Unlock() - store.opened = true - DEBUG.Println(STR, "memorystore initialized") -} - -// Put takes a key and a pointer to a Message and stores the -// message. -func (store *MemoryStore) Put(key string, message packets.ControlPacket) { - store.Lock() - defer store.Unlock() - if !store.opened { - ERROR.Println(STR, "Trying to use memory store, but not open") - return - } - store.messages[key] = message -} - -// Get takes a key and looks in the store for a matching Message -// returning either the Message pointer or nil. -func (store *MemoryStore) Get(key string) packets.ControlPacket { - store.RLock() - defer store.RUnlock() - if !store.opened { - ERROR.Println(STR, "Trying to use memory store, but not open") - return nil - } - mid := mIDFromKey(key) - m := store.messages[key] - if m == nil { - CRITICAL.Println(STR, "memorystore get: message", mid, "not found") - } else { - DEBUG.Println(STR, "memorystore get: message", mid, "found") - } - return m -} - -// All returns a slice of strings containing all the keys currently -// in the MemoryStore. -func (store *MemoryStore) All() []string { - store.RLock() - defer store.RUnlock() - if !store.opened { - ERROR.Println(STR, "Trying to use memory store, but not open") - return nil - } - keys := []string{} - for k := range store.messages { - keys = append(keys, k) - } - return keys -} - -// Del takes a key, searches the MemoryStore and if the key is found -// deletes the Message pointer associated with it. -func (store *MemoryStore) Del(key string) { - store.Lock() - defer store.Unlock() - if !store.opened { - ERROR.Println(STR, "Trying to use memory store, but not open") - return - } - mid := mIDFromKey(key) - m := store.messages[key] - if m == nil { - WARN.Println(STR, "memorystore del: message", mid, "not found") - } else { - delete(store.messages, key) - DEBUG.Println(STR, "memorystore del: message", mid, "was deleted") - } -} - -// Close will disallow modifications to the state of the store. -func (store *MemoryStore) Close() { - store.Lock() - defer store.Unlock() - if !store.opened { - ERROR.Println(STR, "Trying to close memory store, but not open") - return - } - store.opened = false - DEBUG.Println(STR, "memorystore closed") -} - -// Reset eliminates all persisted message data in the store. -func (store *MemoryStore) Reset() { - store.Lock() - defer store.Unlock() - if !store.opened { - ERROR.Println(STR, "Trying to reset memory store, but not open") - } - store.messages = make(map[string]packets.ControlPacket) - WARN.Println(STR, "memorystore wiped") -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/message.go b/vendor/github.com/eclipse/paho.mqtt.golang/message.go deleted file mode 100644 index 903e5dcf5e7..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/message.go +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "net/url" - - "github.com/eclipse/paho.mqtt.golang/packets" - "sync" -) - -// Message defines the externals that a message implementation must support -// these are received messages that are passed to the callbacks, not internal -// messages -type Message interface { - Duplicate() bool - Qos() byte - Retained() bool - Topic() string - MessageID() uint16 - Payload() []byte - Ack() -} - -type message struct { - duplicate bool - qos byte - retained bool - topic string - messageID uint16 - payload []byte - once sync.Once - ack func() -} - -func (m *message) Duplicate() bool { - return m.duplicate -} - -func (m *message) Qos() byte { - return m.qos -} - -func (m *message) Retained() bool { - return m.retained -} - -func (m *message) Topic() string { - return m.topic -} - -func (m *message) MessageID() uint16 { - return m.messageID -} - -func (m *message) Payload() []byte { - return m.payload -} - -func (m *message) Ack() { - m.once.Do(m.ack) -} - -func messageFromPublish(p *packets.PublishPacket, ack func()) Message { - return &message{ - duplicate: p.Dup, - qos: p.Qos, - retained: p.Retain, - topic: p.TopicName, - messageID: p.MessageID, - payload: p.Payload, - ack: ack, - } -} - -func newConnectMsgFromOptions(options *ClientOptions, broker *url.URL) *packets.ConnectPacket { - m := packets.NewControlPacket(packets.Connect).(*packets.ConnectPacket) - - m.CleanSession = options.CleanSession - m.WillFlag = options.WillEnabled - m.WillRetain = options.WillRetained - m.ClientIdentifier = options.ClientID - - if options.WillEnabled { - m.WillQos = options.WillQos - m.WillTopic = options.WillTopic - m.WillMessage = options.WillPayload - } - - username := options.Username - password := options.Password - if broker.User != nil { - username = broker.User.Username() - if pwd, ok := broker.User.Password(); ok { - password = pwd - } - } - if options.CredentialsProvider != nil { - username, password = options.CredentialsProvider() - } - - if username != "" { - m.UsernameFlag = true - m.Username = username - //mustn't have password without user as well - if password != "" { - m.PasswordFlag = true - m.Password = []byte(password) - } - } - - m.Keepalive = uint16(options.KeepAlive) - - return m -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/messageids.go b/vendor/github.com/eclipse/paho.mqtt.golang/messageids.go deleted file mode 100644 index 9a5fa9fd159..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/messageids.go +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "fmt" - "sync" - "time" -) - -// MId is 16 bit message id as specified by the MQTT spec. -// In general, these values should not be depended upon by -// the client application. -type MId uint16 - -type messageIds struct { - sync.RWMutex - index map[uint16]tokenCompletor -} - -const ( - midMin uint16 = 1 - midMax uint16 = 65535 -) - -func (mids *messageIds) cleanUp() { - mids.Lock() - for _, token := range mids.index { - switch token.(type) { - case *PublishToken: - token.setError(fmt.Errorf("Connection lost before Publish completed")) - case *SubscribeToken: - token.setError(fmt.Errorf("Connection lost before Subscribe completed")) - case *UnsubscribeToken: - token.setError(fmt.Errorf("Connection lost before Unsubscribe completed")) - case nil: - continue - } - token.flowComplete() - } - mids.index = make(map[uint16]tokenCompletor) - mids.Unlock() - DEBUG.Println(MID, "cleaned up") -} - -func (mids *messageIds) freeID(id uint16) { - mids.Lock() - delete(mids.index, id) - mids.Unlock() -} - -func (mids *messageIds) claimID(token tokenCompletor, id uint16) { - mids.Lock() - defer mids.Unlock() - if _, ok := mids.index[id]; !ok { - mids.index[id] = token - } else { - old := mids.index[id] - old.flowComplete() - mids.index[id] = token - } -} - -func (mids *messageIds) getID(t tokenCompletor) uint16 { - mids.Lock() - defer mids.Unlock() - for i := midMin; i < midMax; i++ { - if _, ok := mids.index[i]; !ok { - mids.index[i] = t - return i - } - } - return 0 -} - -func (mids *messageIds) getToken(id uint16) tokenCompletor { - mids.RLock() - defer mids.RUnlock() - if token, ok := mids.index[id]; ok { - return token - } - return &DummyToken{id: id} -} - -type DummyToken struct { - id uint16 -} - -func (d *DummyToken) Wait() bool { - return true -} - -func (d *DummyToken) WaitTimeout(t time.Duration) bool { - return true -} - -func (d *DummyToken) flowComplete() { - ERROR.Printf("A lookup for token %d returned nil\n", d.id) -} - -func (d *DummyToken) Error() error { - return nil -} - -func (d *DummyToken) setError(e error) {} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/net.go b/vendor/github.com/eclipse/paho.mqtt.golang/net.go deleted file mode 100644 index 3e6366be719..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/net.go +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "crypto/tls" - "errors" - "fmt" - "net" - "net/http" - "net/url" - "os" - "reflect" - "sync/atomic" - "time" - - "github.com/eclipse/paho.mqtt.golang/packets" - "golang.org/x/net/proxy" - "golang.org/x/net/websocket" -) - -func signalError(c chan<- error, err error) { - select { - case c <- err: - default: - } -} - -func openConnection(uri *url.URL, tlsc *tls.Config, timeout time.Duration, headers http.Header) (net.Conn, error) { - switch uri.Scheme { - case "ws": - config, _ := websocket.NewConfig(uri.String(), fmt.Sprintf("http://%s", uri.Host)) - config.Protocol = []string{"mqtt"} - config.Header = headers - config.Dialer = &net.Dialer{Timeout: timeout} - conn, err := websocket.DialConfig(config) - if err != nil { - return nil, err - } - conn.PayloadType = websocket.BinaryFrame - return conn, err - case "wss": - config, _ := websocket.NewConfig(uri.String(), fmt.Sprintf("https://%s", uri.Host)) - config.Protocol = []string{"mqtt"} - config.TlsConfig = tlsc - config.Header = headers - config.Dialer = &net.Dialer{Timeout: timeout} - conn, err := websocket.DialConfig(config) - if err != nil { - return nil, err - } - conn.PayloadType = websocket.BinaryFrame - return conn, err - case "tcp": - allProxy := os.Getenv("all_proxy") - if len(allProxy) == 0 { - conn, err := net.DialTimeout("tcp", uri.Host, timeout) - if err != nil { - return nil, err - } - return conn, nil - } - proxyDialer := proxy.FromEnvironment() - - conn, err := proxyDialer.Dial("tcp", uri.Host) - if err != nil { - return nil, err - } - return conn, nil - case "unix": - conn, err := net.DialTimeout("unix", uri.Host, timeout) - if err != nil { - return nil, err - } - return conn, nil - case "ssl": - fallthrough - case "tls": - fallthrough - case "tcps": - allProxy := os.Getenv("all_proxy") - if len(allProxy) == 0 { - conn, err := tls.DialWithDialer(&net.Dialer{Timeout: timeout}, "tcp", uri.Host, tlsc) - if err != nil { - return nil, err - } - return conn, nil - } - proxyDialer := proxy.FromEnvironment() - - conn, err := proxyDialer.Dial("tcp", uri.Host) - if err != nil { - return nil, err - } - - tlsConn := tls.Client(conn, tlsc) - - err = tlsConn.Handshake() - if err != nil { - conn.Close() - return nil, err - } - - return tlsConn, nil - } - return nil, errors.New("Unknown protocol") -} - -// actually read incoming messages off the wire -// send Message object into ibound channel -func incoming(c *client) { - var err error - var cp packets.ControlPacket - - defer c.workers.Done() - - DEBUG.Println(NET, "incoming started") - - for { - if cp, err = packets.ReadPacket(c.conn); err != nil { - break - } - DEBUG.Println(NET, "Received Message") - select { - case c.ibound <- cp: - // Notify keepalive logic that we recently received a packet - if c.options.KeepAlive != 0 { - c.lastReceived.Store(time.Now()) - } - case <-c.stop: - // This avoids a deadlock should a message arrive while shutting down. - // In that case the "reader" of c.ibound might already be gone - WARN.Println(NET, "incoming dropped a received message during shutdown") - break - } - } - // We received an error on read. - // If disconnect is in progress, swallow error and return - select { - case <-c.stop: - DEBUG.Println(NET, "incoming stopped") - return - // Not trying to disconnect, send the error to the errors channel - default: - ERROR.Println(NET, "incoming stopped with error", err) - signalError(c.errors, err) - return - } -} - -// receive a Message object on obound, and then -// actually send outgoing message to the wire -func outgoing(c *client) { - defer c.workers.Done() - DEBUG.Println(NET, "outgoing started") - - for { - DEBUG.Println(NET, "outgoing waiting for an outbound message") - select { - case <-c.stop: - DEBUG.Println(NET, "outgoing stopped") - return - case pub := <-c.obound: - msg := pub.p.(*packets.PublishPacket) - - if c.options.WriteTimeout > 0 { - c.conn.SetWriteDeadline(time.Now().Add(c.options.WriteTimeout)) - } - - if err := msg.Write(c.conn); err != nil { - ERROR.Println(NET, "outgoing stopped with error", err) - pub.t.setError(err) - signalError(c.errors, err) - return - } - - if c.options.WriteTimeout > 0 { - // If we successfully wrote, we don't want the timeout to happen during an idle period - // so we reset it to infinite. - c.conn.SetWriteDeadline(time.Time{}) - } - - if msg.Qos == 0 { - pub.t.flowComplete() - } - DEBUG.Println(NET, "obound wrote msg, id:", msg.MessageID) - case msg := <-c.oboundP: - switch msg.p.(type) { - case *packets.SubscribePacket: - msg.p.(*packets.SubscribePacket).MessageID = c.getID(msg.t) - case *packets.UnsubscribePacket: - msg.p.(*packets.UnsubscribePacket).MessageID = c.getID(msg.t) - } - DEBUG.Println(NET, "obound priority msg to write, type", reflect.TypeOf(msg.p)) - if err := msg.p.Write(c.conn); err != nil { - ERROR.Println(NET, "outgoing stopped with error", err) - if msg.t != nil { - msg.t.setError(err) - } - signalError(c.errors, err) - return - } - switch msg.p.(type) { - case *packets.DisconnectPacket: - msg.t.(*DisconnectToken).flowComplete() - DEBUG.Println(NET, "outbound wrote disconnect, stopping") - return - } - } - // Reset ping timer after sending control packet. - if c.options.KeepAlive != 0 { - c.lastSent.Store(time.Now()) - } - } -} - -// receive Message objects on ibound -// store messages if necessary -// send replies on obound -// delete messages from store if necessary -func alllogic(c *client) { - defer c.workers.Done() - DEBUG.Println(NET, "logic started") - - for { - DEBUG.Println(NET, "logic waiting for msg on ibound") - - select { - case msg := <-c.ibound: - DEBUG.Println(NET, "logic got msg on ibound") - persistInbound(c.persist, msg) - switch m := msg.(type) { - case *packets.PingrespPacket: - DEBUG.Println(NET, "received pingresp") - atomic.StoreInt32(&c.pingOutstanding, 0) - case *packets.SubackPacket: - DEBUG.Println(NET, "received suback, id:", m.MessageID) - token := c.getToken(m.MessageID) - switch t := token.(type) { - case *SubscribeToken: - DEBUG.Println(NET, "granted qoss", m.ReturnCodes) - for i, qos := range m.ReturnCodes { - t.subResult[t.subs[i]] = qos - } - } - token.flowComplete() - c.freeID(m.MessageID) - case *packets.UnsubackPacket: - DEBUG.Println(NET, "received unsuback, id:", m.MessageID) - c.getToken(m.MessageID).flowComplete() - c.freeID(m.MessageID) - case *packets.PublishPacket: - DEBUG.Println(NET, "received publish, msgId:", m.MessageID) - DEBUG.Println(NET, "putting msg on onPubChan") - switch m.Qos { - case 2: - c.incomingPubChan <- m - DEBUG.Println(NET, "done putting msg on incomingPubChan") - case 1: - c.incomingPubChan <- m - DEBUG.Println(NET, "done putting msg on incomingPubChan") - case 0: - select { - case c.incomingPubChan <- m: - case <-c.stop: - } - DEBUG.Println(NET, "done putting msg on incomingPubChan") - } - case *packets.PubackPacket: - DEBUG.Println(NET, "received puback, id:", m.MessageID) - // c.receipts.get(msg.MsgId()) <- Receipt{} - // c.receipts.end(msg.MsgId()) - c.getToken(m.MessageID).flowComplete() - c.freeID(m.MessageID) - case *packets.PubrecPacket: - DEBUG.Println(NET, "received pubrec, id:", m.MessageID) - prel := packets.NewControlPacket(packets.Pubrel).(*packets.PubrelPacket) - prel.MessageID = m.MessageID - select { - case c.oboundP <- &PacketAndToken{p: prel, t: nil}: - case <-c.stop: - } - case *packets.PubrelPacket: - DEBUG.Println(NET, "received pubrel, id:", m.MessageID) - pc := packets.NewControlPacket(packets.Pubcomp).(*packets.PubcompPacket) - pc.MessageID = m.MessageID - persistOutbound(c.persist, pc) - select { - case c.oboundP <- &PacketAndToken{p: pc, t: nil}: - case <-c.stop: - } - case *packets.PubcompPacket: - DEBUG.Println(NET, "received pubcomp, id:", m.MessageID) - c.getToken(m.MessageID).flowComplete() - c.freeID(m.MessageID) - } - case <-c.stop: - WARN.Println(NET, "logic stopped") - return - } - } -} - -func (c *client) ackFunc(packet *packets.PublishPacket) func() { - return func() { - switch packet.Qos { - case 2: - pr := packets.NewControlPacket(packets.Pubrec).(*packets.PubrecPacket) - pr.MessageID = packet.MessageID - DEBUG.Println(NET, "putting pubrec msg on obound") - select { - case c.oboundP <- &PacketAndToken{p: pr, t: nil}: - case <-c.stop: - } - DEBUG.Println(NET, "done putting pubrec msg on obound") - case 1: - pa := packets.NewControlPacket(packets.Puback).(*packets.PubackPacket) - pa.MessageID = packet.MessageID - DEBUG.Println(NET, "putting puback msg on obound") - persistOutbound(c.persist, pa) - select { - case c.oboundP <- &PacketAndToken{p: pa, t: nil}: - case <-c.stop: - } - DEBUG.Println(NET, "done putting puback msg on obound") - case 0: - // do nothing, since there is no need to send an ack packet back - } - } -} - -func errorWatch(c *client) { - defer c.workers.Done() - select { - case <-c.stop: - WARN.Println(NET, "errorWatch stopped") - return - case err := <-c.errors: - ERROR.Println(NET, "error triggered, stopping") - go c.internalConnLost(err) - return - } -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/notice.html b/vendor/github.com/eclipse/paho.mqtt.golang/notice.html deleted file mode 100644 index f19c483b9c8..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/notice.html +++ /dev/null @@ -1,108 +0,0 @@ - - - - - -Eclipse Foundation Software User Agreement - - - -

Eclipse Foundation Software User Agreement

-

February 1, 2011

- -

Usage Of Content

- -

THE ECLIPSE FOUNDATION MAKES AVAILABLE SOFTWARE, DOCUMENTATION, INFORMATION AND/OR OTHER MATERIALS FOR OPEN SOURCE PROJECTS - (COLLECTIVELY "CONTENT"). USE OF THE CONTENT IS GOVERNED BY THE TERMS AND CONDITIONS OF THIS AGREEMENT AND/OR THE TERMS AND - CONDITIONS OF LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW. BY USING THE CONTENT, YOU AGREE THAT YOUR USE - OF THE CONTENT IS GOVERNED BY THIS AGREEMENT AND/OR THE TERMS AND CONDITIONS OF ANY APPLICABLE LICENSE AGREEMENTS OR - NOTICES INDICATED OR REFERENCED BELOW. IF YOU DO NOT AGREE TO THE TERMS AND CONDITIONS OF THIS AGREEMENT AND THE TERMS AND - CONDITIONS OF ANY APPLICABLE LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW, THEN YOU MAY NOT USE THE CONTENT.

- -

Applicable Licenses

- -

Unless otherwise indicated, all Content made available by the Eclipse Foundation is provided to you under the terms and conditions of the Eclipse Public License Version 1.0 - ("EPL"). A copy of the EPL is provided with this Content and is also available at http://www.eclipse.org/legal/epl-v10.html. - For purposes of the EPL, "Program" will mean the Content.

- -

Content includes, but is not limited to, source code, object code, documentation and other files maintained in the Eclipse Foundation source code - repository ("Repository") in software modules ("Modules") and made available as downloadable archives ("Downloads").

- -
    -
  • Content may be structured and packaged into modules to facilitate delivering, extending, and upgrading the Content. Typical modules may include plug-ins ("Plug-ins"), plug-in fragments ("Fragments"), and features ("Features").
  • -
  • Each Plug-in or Fragment may be packaged as a sub-directory or JAR (Java™ ARchive) in a directory named "plugins".
  • -
  • A Feature is a bundle of one or more Plug-ins and/or Fragments and associated material. Each Feature may be packaged as a sub-directory in a directory named "features". Within a Feature, files named "feature.xml" may contain a list of the names and version numbers of the Plug-ins - and/or Fragments associated with that Feature.
  • -
  • Features may also include other Features ("Included Features"). Within a Feature, files named "feature.xml" may contain a list of the names and version numbers of Included Features.
  • -
- -

The terms and conditions governing Plug-ins and Fragments should be contained in files named "about.html" ("Abouts"). The terms and conditions governing Features and -Included Features should be contained in files named "license.html" ("Feature Licenses"). Abouts and Feature Licenses may be located in any directory of a Download or Module -including, but not limited to the following locations:

- -
    -
  • The top-level (root) directory
  • -
  • Plug-in and Fragment directories
  • -
  • Inside Plug-ins and Fragments packaged as JARs
  • -
  • Sub-directories of the directory named "src" of certain Plug-ins
  • -
  • Feature directories
  • -
- -

Note: if a Feature made available by the Eclipse Foundation is installed using the Provisioning Technology (as defined below), you must agree to a license ("Feature Update License") during the -installation process. If the Feature contains Included Features, the Feature Update License should either provide you with the terms and conditions governing the Included Features or -inform you where you can locate them. Feature Update Licenses may be found in the "license" property of files named "feature.properties" found within a Feature. -Such Abouts, Feature Licenses, and Feature Update Licenses contain the terms and conditions (or references to such terms and conditions) that govern your use of the associated Content in -that directory.

- -

THE ABOUTS, FEATURE LICENSES, AND FEATURE UPDATE LICENSES MAY REFER TO THE EPL OR OTHER LICENSE AGREEMENTS, NOTICES OR TERMS AND CONDITIONS. SOME OF THESE -OTHER LICENSE AGREEMENTS MAY INCLUDE (BUT ARE NOT LIMITED TO):

- - - -

IT IS YOUR OBLIGATION TO READ AND ACCEPT ALL SUCH TERMS AND CONDITIONS PRIOR TO USE OF THE CONTENT. If no About, Feature License, or Feature Update License is provided, please -contact the Eclipse Foundation to determine what terms and conditions govern that particular Content.

- - -

Use of Provisioning Technology

- -

The Eclipse Foundation makes available provisioning software, examples of which include, but are not limited to, p2 and the Eclipse - Update Manager ("Provisioning Technology") for the purpose of allowing users to install software, documentation, information and/or - other materials (collectively "Installable Software"). This capability is provided with the intent of allowing such users to - install, extend and update Eclipse-based products. Information about packaging Installable Software is available at http://eclipse.org/equinox/p2/repository_packaging.html - ("Specification").

- -

You may use Provisioning Technology to allow other parties to install Installable Software. You shall be responsible for enabling the - applicable license agreements relating to the Installable Software to be presented to, and accepted by, the users of the Provisioning Technology - in accordance with the Specification. By using Provisioning Technology in such a manner and making it available in accordance with the - Specification, you further acknowledge your agreement to, and the acquisition of all necessary rights to permit the following:

- -
    -
  1. A series of actions may occur ("Provisioning Process") in which a user may execute the Provisioning Technology - on a machine ("Target Machine") with the intent of installing, extending or updating the functionality of an Eclipse-based - product.
  2. -
  3. During the Provisioning Process, the Provisioning Technology may cause third party Installable Software or a portion thereof to be - accessed and copied to the Target Machine.
  4. -
  5. Pursuant to the Specification, you will provide to the user the terms and conditions that govern the use of the Installable - Software ("Installable Software Agreement") and such Installable Software Agreement shall be accessed from the Target - Machine in accordance with the Specification. Such Installable Software Agreement must inform the user of the terms and conditions that govern - the Installable Software and must solicit acceptance by the end user in the manner prescribed in such Installable Software Agreement. Upon such - indication of agreement by the user, the provisioning Technology will complete installation of the Installable Software.
  6. -
- -

Cryptography

- -

Content may contain encryption software. The country in which you are currently may have restrictions on the import, possession, and use, and/or re-export to - another country, of encryption software. BEFORE using any encryption software, please check the country's laws, regulations and policies concerning the import, - possession, or use, and re-export of encryption software, to see if this is permitted.

- -

Java and all Java-based trademarks are trademarks of Oracle Corporation in the United States, other countries, or both.

- - diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/oops.go b/vendor/github.com/eclipse/paho.mqtt.golang/oops.go deleted file mode 100644 index 39630d7f28a..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/oops.go +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -func chkerr(e error) { - if e != nil { - panic(e) - } -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/options.go b/vendor/github.com/eclipse/paho.mqtt.golang/options.go deleted file mode 100644 index e96e9ed7c2b..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/options.go +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -// Portions copyright © 2018 TIBCO Software Inc. - -package mqtt - -import ( - "crypto/tls" - "net/http" - "net/url" - "strings" - "time" -) - -// CredentialsProvider allows the username and password to be updated -// before reconnecting. It should return the current username and password. -type CredentialsProvider func() (username string, password string) - -// MessageHandler is a callback type which can be set to be -// executed upon the arrival of messages published to topics -// to which the client is subscribed. -type MessageHandler func(Client, Message) - -// ConnectionLostHandler is a callback type which can be set to be -// executed upon an unintended disconnection from the MQTT broker. -// Disconnects caused by calling Disconnect or ForceDisconnect will -// not cause an OnConnectionLost callback to execute. -type ConnectionLostHandler func(Client, error) - -// OnConnectHandler is a callback that is called when the client -// state changes from unconnected/disconnected to connected. Both -// at initial connection and on reconnection -type OnConnectHandler func(Client) - -// ClientOptions contains configurable options for an Client. -type ClientOptions struct { - Servers []*url.URL - ClientID string - Username string - Password string - CredentialsProvider CredentialsProvider - CleanSession bool - Order bool - WillEnabled bool - WillTopic string - WillPayload []byte - WillQos byte - WillRetained bool - ProtocolVersion uint - protocolVersionExplicit bool - TLSConfig *tls.Config - KeepAlive int64 - PingTimeout time.Duration - ConnectTimeout time.Duration - MaxReconnectInterval time.Duration - AutoReconnect bool - Store Store - DefaultPublishHandler MessageHandler - OnConnect OnConnectHandler - OnConnectionLost ConnectionLostHandler - WriteTimeout time.Duration - MessageChannelDepth uint - ResumeSubs bool - HTTPHeaders http.Header -} - -// NewClientOptions will create a new ClientClientOptions type with some -// default values. -// Port: 1883 -// CleanSession: True -// Order: True -// KeepAlive: 30 (seconds) -// ConnectTimeout: 30 (seconds) -// MaxReconnectInterval 10 (minutes) -// AutoReconnect: True -func NewClientOptions() *ClientOptions { - o := &ClientOptions{ - Servers: nil, - ClientID: "", - Username: "", - Password: "", - CleanSession: true, - Order: true, - WillEnabled: false, - WillTopic: "", - WillPayload: nil, - WillQos: 0, - WillRetained: false, - ProtocolVersion: 0, - protocolVersionExplicit: false, - KeepAlive: 30, - PingTimeout: 10 * time.Second, - ConnectTimeout: 30 * time.Second, - MaxReconnectInterval: 10 * time.Minute, - AutoReconnect: true, - Store: nil, - OnConnect: nil, - OnConnectionLost: DefaultConnectionLostHandler, - WriteTimeout: 0, // 0 represents timeout disabled - MessageChannelDepth: 100, - ResumeSubs: false, - HTTPHeaders: make(map[string][]string), - } - return o -} - -// AddBroker adds a broker URI to the list of brokers to be used. The format should be -// scheme://host:port -// Where "scheme" is one of "tcp", "ssl", or "ws", "host" is the ip-address (or hostname) -// and "port" is the port on which the broker is accepting connections. -// -// Default values for hostname is "127.0.0.1", for schema is "tcp://". -// -// An example broker URI would look like: tcp://foobar.com:1883 -func (o *ClientOptions) AddBroker(server string) *ClientOptions { - if len(server) > 0 && server[0] == ':' { - server = "127.0.0.1" + server - } - if !strings.Contains(server, "://") { - server = "tcp://" + server - } - brokerURI, err := url.Parse(server) - if err != nil { - ERROR.Println(CLI, "Failed to parse %q broker address: %s", server, err) - return o - } - o.Servers = append(o.Servers, brokerURI) - return o -} - -// SetResumeSubs will enable resuming of stored (un)subscribe messages when connecting -// but not reconnecting if CleanSession is false. Otherwise these messages are discarded. -func (o *ClientOptions) SetResumeSubs(resume bool) *ClientOptions { - o.ResumeSubs = resume - return o -} - -// SetClientID will set the client id to be used by this client when -// connecting to the MQTT broker. According to the MQTT v3.1 specification, -// a client id mus be no longer than 23 characters. -func (o *ClientOptions) SetClientID(id string) *ClientOptions { - o.ClientID = id - return o -} - -// SetUsername will set the username to be used by this client when connecting -// to the MQTT broker. Note: without the use of SSL/TLS, this information will -// be sent in plaintext accross the wire. -func (o *ClientOptions) SetUsername(u string) *ClientOptions { - o.Username = u - return o -} - -// SetPassword will set the password to be used by this client when connecting -// to the MQTT broker. Note: without the use of SSL/TLS, this information will -// be sent in plaintext accross the wire. -func (o *ClientOptions) SetPassword(p string) *ClientOptions { - o.Password = p - return o -} - -// SetCredentialsProvider will set a method to be called by this client when -// connecting to the MQTT broker that provide the current username and password. -// Note: without the use of SSL/TLS, this information will be sent -// in plaintext accross the wire. -func (o *ClientOptions) SetCredentialsProvider(p CredentialsProvider) *ClientOptions { - o.CredentialsProvider = p - return o -} - -// SetCleanSession will set the "clean session" flag in the connect message -// when this client connects to an MQTT broker. By setting this flag, you are -// indicating that no messages saved by the broker for this client should be -// delivered. Any messages that were going to be sent by this client before -// diconnecting previously but didn't will not be sent upon connecting to the -// broker. -func (o *ClientOptions) SetCleanSession(clean bool) *ClientOptions { - o.CleanSession = clean - return o -} - -// SetOrderMatters will set the message routing to guarantee order within -// each QoS level. By default, this value is true. If set to false, -// this flag indicates that messages can be delivered asynchronously -// from the client to the application and possibly arrive out of order. -func (o *ClientOptions) SetOrderMatters(order bool) *ClientOptions { - o.Order = order - return o -} - -// SetTLSConfig will set an SSL/TLS configuration to be used when connecting -// to an MQTT broker. Please read the official Go documentation for more -// information. -func (o *ClientOptions) SetTLSConfig(t *tls.Config) *ClientOptions { - o.TLSConfig = t - return o -} - -// SetStore will set the implementation of the Store interface -// used to provide message persistence in cases where QoS levels -// QoS_ONE or QoS_TWO are used. If no store is provided, then the -// client will use MemoryStore by default. -func (o *ClientOptions) SetStore(s Store) *ClientOptions { - o.Store = s - return o -} - -// SetKeepAlive will set the amount of time (in seconds) that the client -// should wait before sending a PING request to the broker. This will -// allow the client to know that a connection has not been lost with the -// server. -func (o *ClientOptions) SetKeepAlive(k time.Duration) *ClientOptions { - o.KeepAlive = int64(k / time.Second) - return o -} - -// SetPingTimeout will set the amount of time (in seconds) that the client -// will wait after sending a PING request to the broker, before deciding -// that the connection has been lost. Default is 10 seconds. -func (o *ClientOptions) SetPingTimeout(k time.Duration) *ClientOptions { - o.PingTimeout = k - return o -} - -// SetProtocolVersion sets the MQTT version to be used to connect to the -// broker. Legitimate values are currently 3 - MQTT 3.1 or 4 - MQTT 3.1.1 -func (o *ClientOptions) SetProtocolVersion(pv uint) *ClientOptions { - if (pv >= 3 && pv <= 4) || (pv > 0x80) { - o.ProtocolVersion = pv - o.protocolVersionExplicit = true - } - return o -} - -// UnsetWill will cause any set will message to be disregarded. -func (o *ClientOptions) UnsetWill() *ClientOptions { - o.WillEnabled = false - return o -} - -// SetWill accepts a string will message to be set. When the client connects, -// it will give this will message to the broker, which will then publish the -// provided payload (the will) to any clients that are subscribed to the provided -// topic. -func (o *ClientOptions) SetWill(topic string, payload string, qos byte, retained bool) *ClientOptions { - o.SetBinaryWill(topic, []byte(payload), qos, retained) - return o -} - -// SetBinaryWill accepts a []byte will message to be set. When the client connects, -// it will give this will message to the broker, which will then publish the -// provided payload (the will) to any clients that are subscribed to the provided -// topic. -func (o *ClientOptions) SetBinaryWill(topic string, payload []byte, qos byte, retained bool) *ClientOptions { - o.WillEnabled = true - o.WillTopic = topic - o.WillPayload = payload - o.WillQos = qos - o.WillRetained = retained - return o -} - -// SetDefaultPublishHandler sets the MessageHandler that will be called when a message -// is received that does not match any known subscriptions. -func (o *ClientOptions) SetDefaultPublishHandler(defaultHandler MessageHandler) *ClientOptions { - o.DefaultPublishHandler = defaultHandler - return o -} - -// SetOnConnectHandler sets the function to be called when the client is connected. Both -// at initial connection time and upon automatic reconnect. -func (o *ClientOptions) SetOnConnectHandler(onConn OnConnectHandler) *ClientOptions { - o.OnConnect = onConn - return o -} - -// SetConnectionLostHandler will set the OnConnectionLost callback to be executed -// in the case where the client unexpectedly loses connection with the MQTT broker. -func (o *ClientOptions) SetConnectionLostHandler(onLost ConnectionLostHandler) *ClientOptions { - o.OnConnectionLost = onLost - return o -} - -// SetWriteTimeout puts a limit on how long a mqtt publish should block until it unblocks with a -// timeout error. A duration of 0 never times out. Default 30 seconds -func (o *ClientOptions) SetWriteTimeout(t time.Duration) *ClientOptions { - o.WriteTimeout = t - return o -} - -// SetConnectTimeout limits how long the client will wait when trying to open a connection -// to an MQTT server before timeing out and erroring the attempt. A duration of 0 never times out. -// Default 30 seconds. Currently only operational on TCP/TLS connections. -func (o *ClientOptions) SetConnectTimeout(t time.Duration) *ClientOptions { - o.ConnectTimeout = t - return o -} - -// SetMaxReconnectInterval sets the maximum time that will be waited between reconnection attempts -// when connection is lost -func (o *ClientOptions) SetMaxReconnectInterval(t time.Duration) *ClientOptions { - o.MaxReconnectInterval = t - return o -} - -// SetAutoReconnect sets whether the automatic reconnection logic should be used -// when the connection is lost, even if disabled the ConnectionLostHandler is still -// called -func (o *ClientOptions) SetAutoReconnect(a bool) *ClientOptions { - o.AutoReconnect = a - return o -} - -// SetMessageChannelDepth sets the size of the internal queue that holds messages while the -// client is temporairily offline, allowing the application to publish when the client is -// reconnecting. This setting is only valid if AutoReconnect is set to true, it is otherwise -// ignored. -func (o *ClientOptions) SetMessageChannelDepth(s uint) *ClientOptions { - o.MessageChannelDepth = s - return o -} - -// SetHTTPHeaders sets the additional HTTP headers that will be sent in the WebSocket -// opening handshake. -func (o *ClientOptions) SetHTTPHeaders(h http.Header) *ClientOptions { - o.HTTPHeaders = h - return o -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/options_reader.go b/vendor/github.com/eclipse/paho.mqtt.golang/options_reader.go deleted file mode 100644 index 60144b93c86..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/options_reader.go +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "crypto/tls" - "net/http" - "net/url" - "time" -) - -// ClientOptionsReader provides an interface for reading ClientOptions after the client has been initialized. -type ClientOptionsReader struct { - options *ClientOptions -} - -//Servers returns a slice of the servers defined in the clientoptions -func (r *ClientOptionsReader) Servers() []*url.URL { - s := make([]*url.URL, len(r.options.Servers)) - - for i, u := range r.options.Servers { - nu := *u - s[i] = &nu - } - - return s -} - -//ResumeSubs returns true if resuming stored (un)sub is enabled -func (r *ClientOptionsReader) ResumeSubs() bool { - s := r.options.ResumeSubs - return s -} - -//ClientID returns the set client id -func (r *ClientOptionsReader) ClientID() string { - s := r.options.ClientID - return s -} - -//Username returns the set username -func (r *ClientOptionsReader) Username() string { - s := r.options.Username - return s -} - -//Password returns the set password -func (r *ClientOptionsReader) Password() string { - s := r.options.Password - return s -} - -//CleanSession returns whether Cleansession is set -func (r *ClientOptionsReader) CleanSession() bool { - s := r.options.CleanSession - return s -} - -func (r *ClientOptionsReader) Order() bool { - s := r.options.Order - return s -} - -func (r *ClientOptionsReader) WillEnabled() bool { - s := r.options.WillEnabled - return s -} - -func (r *ClientOptionsReader) WillTopic() string { - s := r.options.WillTopic - return s -} - -func (r *ClientOptionsReader) WillPayload() []byte { - s := r.options.WillPayload - return s -} - -func (r *ClientOptionsReader) WillQos() byte { - s := r.options.WillQos - return s -} - -func (r *ClientOptionsReader) WillRetained() bool { - s := r.options.WillRetained - return s -} - -func (r *ClientOptionsReader) ProtocolVersion() uint { - s := r.options.ProtocolVersion - return s -} - -func (r *ClientOptionsReader) TLSConfig() *tls.Config { - s := r.options.TLSConfig - return s -} - -func (r *ClientOptionsReader) KeepAlive() time.Duration { - s := time.Duration(r.options.KeepAlive * int64(time.Second)) - return s -} - -func (r *ClientOptionsReader) PingTimeout() time.Duration { - s := r.options.PingTimeout - return s -} - -func (r *ClientOptionsReader) ConnectTimeout() time.Duration { - s := r.options.ConnectTimeout - return s -} - -func (r *ClientOptionsReader) MaxReconnectInterval() time.Duration { - s := r.options.MaxReconnectInterval - return s -} - -func (r *ClientOptionsReader) AutoReconnect() bool { - s := r.options.AutoReconnect - return s -} - -func (r *ClientOptionsReader) WriteTimeout() time.Duration { - s := r.options.WriteTimeout - return s -} - -func (r *ClientOptionsReader) MessageChannelDepth() uint { - s := r.options.MessageChannelDepth - return s -} - -func (r *ClientOptionsReader) HTTPHeaders() http.Header { - h := r.options.HTTPHeaders - return h -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/connack.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/connack.go deleted file mode 100644 index 25cf30f63d6..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/connack.go +++ /dev/null @@ -1,55 +0,0 @@ -package packets - -import ( - "bytes" - "fmt" - "io" -) - -//ConnackPacket is an internal representation of the fields of the -//Connack MQTT packet -type ConnackPacket struct { - FixedHeader - SessionPresent bool - ReturnCode byte -} - -func (ca *ConnackPacket) String() string { - str := fmt.Sprintf("%s", ca.FixedHeader) - str += " " - str += fmt.Sprintf("sessionpresent: %t returncode: %d", ca.SessionPresent, ca.ReturnCode) - return str -} - -func (ca *ConnackPacket) Write(w io.Writer) error { - var body bytes.Buffer - var err error - - body.WriteByte(boolToByte(ca.SessionPresent)) - body.WriteByte(ca.ReturnCode) - ca.FixedHeader.RemainingLength = 2 - packet := ca.FixedHeader.pack() - packet.Write(body.Bytes()) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (ca *ConnackPacket) Unpack(b io.Reader) error { - flags, err := decodeByte(b) - if err != nil { - return err - } - ca.SessionPresent = 1&flags > 0 - ca.ReturnCode, err = decodeByte(b) - - return err -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (ca *ConnackPacket) Details() Details { - return Details{Qos: 0, MessageID: 0} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/connect.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/connect.go deleted file mode 100644 index cb03ebc0730..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/connect.go +++ /dev/null @@ -1,154 +0,0 @@ -package packets - -import ( - "bytes" - "fmt" - "io" -) - -//ConnectPacket is an internal representation of the fields of the -//Connect MQTT packet -type ConnectPacket struct { - FixedHeader - ProtocolName string - ProtocolVersion byte - CleanSession bool - WillFlag bool - WillQos byte - WillRetain bool - UsernameFlag bool - PasswordFlag bool - ReservedBit byte - Keepalive uint16 - - ClientIdentifier string - WillTopic string - WillMessage []byte - Username string - Password []byte -} - -func (c *ConnectPacket) String() string { - str := fmt.Sprintf("%s", c.FixedHeader) - str += " " - str += fmt.Sprintf("protocolversion: %d protocolname: %s cleansession: %t willflag: %t WillQos: %d WillRetain: %t Usernameflag: %t Passwordflag: %t keepalive: %d clientId: %s willtopic: %s willmessage: %s Username: %s Password: %s", c.ProtocolVersion, c.ProtocolName, c.CleanSession, c.WillFlag, c.WillQos, c.WillRetain, c.UsernameFlag, c.PasswordFlag, c.Keepalive, c.ClientIdentifier, c.WillTopic, c.WillMessage, c.Username, c.Password) - return str -} - -func (c *ConnectPacket) Write(w io.Writer) error { - var body bytes.Buffer - var err error - - body.Write(encodeString(c.ProtocolName)) - body.WriteByte(c.ProtocolVersion) - body.WriteByte(boolToByte(c.CleanSession)<<1 | boolToByte(c.WillFlag)<<2 | c.WillQos<<3 | boolToByte(c.WillRetain)<<5 | boolToByte(c.PasswordFlag)<<6 | boolToByte(c.UsernameFlag)<<7) - body.Write(encodeUint16(c.Keepalive)) - body.Write(encodeString(c.ClientIdentifier)) - if c.WillFlag { - body.Write(encodeString(c.WillTopic)) - body.Write(encodeBytes(c.WillMessage)) - } - if c.UsernameFlag { - body.Write(encodeString(c.Username)) - } - if c.PasswordFlag { - body.Write(encodeBytes(c.Password)) - } - c.FixedHeader.RemainingLength = body.Len() - packet := c.FixedHeader.pack() - packet.Write(body.Bytes()) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (c *ConnectPacket) Unpack(b io.Reader) error { - var err error - c.ProtocolName, err = decodeString(b) - if err != nil { - return err - } - c.ProtocolVersion, err = decodeByte(b) - if err != nil { - return err - } - options, err := decodeByte(b) - if err != nil { - return err - } - c.ReservedBit = 1 & options - c.CleanSession = 1&(options>>1) > 0 - c.WillFlag = 1&(options>>2) > 0 - c.WillQos = 3 & (options >> 3) - c.WillRetain = 1&(options>>5) > 0 - c.PasswordFlag = 1&(options>>6) > 0 - c.UsernameFlag = 1&(options>>7) > 0 - c.Keepalive, err = decodeUint16(b) - if err != nil { - return err - } - c.ClientIdentifier, err = decodeString(b) - if err != nil { - return err - } - if c.WillFlag { - c.WillTopic, err = decodeString(b) - if err != nil { - return err - } - c.WillMessage, err = decodeBytes(b) - if err != nil { - return err - } - } - if c.UsernameFlag { - c.Username, err = decodeString(b) - if err != nil { - return err - } - } - if c.PasswordFlag { - c.Password, err = decodeBytes(b) - if err != nil { - return err - } - } - - return nil -} - -//Validate performs validation of the fields of a Connect packet -func (c *ConnectPacket) Validate() byte { - if c.PasswordFlag && !c.UsernameFlag { - return ErrRefusedBadUsernameOrPassword - } - if c.ReservedBit != 0 { - //Bad reserved bit - return ErrProtocolViolation - } - if (c.ProtocolName == "MQIsdp" && c.ProtocolVersion != 3) || (c.ProtocolName == "MQTT" && c.ProtocolVersion != 4) { - //Mismatched or unsupported protocol version - return ErrRefusedBadProtocolVersion - } - if c.ProtocolName != "MQIsdp" && c.ProtocolName != "MQTT" { - //Bad protocol name - return ErrProtocolViolation - } - if len(c.ClientIdentifier) > 65535 || len(c.Username) > 65535 || len(c.Password) > 65535 { - //Bad size field - return ErrProtocolViolation - } - if len(c.ClientIdentifier) == 0 && !c.CleanSession { - //Bad client identifier - return ErrRefusedIDRejected - } - return Accepted -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (c *ConnectPacket) Details() Details { - return Details{Qos: 0, MessageID: 0} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/disconnect.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/disconnect.go deleted file mode 100644 index e5c1869207c..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/disconnect.go +++ /dev/null @@ -1,36 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//DisconnectPacket is an internal representation of the fields of the -//Disconnect MQTT packet -type DisconnectPacket struct { - FixedHeader -} - -func (d *DisconnectPacket) String() string { - str := fmt.Sprintf("%s", d.FixedHeader) - return str -} - -func (d *DisconnectPacket) Write(w io.Writer) error { - packet := d.FixedHeader.pack() - _, err := packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (d *DisconnectPacket) Unpack(b io.Reader) error { - return nil -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (d *DisconnectPacket) Details() Details { - return Details{Qos: 0, MessageID: 0} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/packets.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/packets.go deleted file mode 100644 index 42eeb46d39c..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/packets.go +++ /dev/null @@ -1,346 +0,0 @@ -package packets - -import ( - "bytes" - "encoding/binary" - "errors" - "fmt" - "io" -) - -//ControlPacket defines the interface for structs intended to hold -//decoded MQTT packets, either from being read or before being -//written -type ControlPacket interface { - Write(io.Writer) error - Unpack(io.Reader) error - String() string - Details() Details -} - -//PacketNames maps the constants for each of the MQTT packet types -//to a string representation of their name. -var PacketNames = map[uint8]string{ - 1: "CONNECT", - 2: "CONNACK", - 3: "PUBLISH", - 4: "PUBACK", - 5: "PUBREC", - 6: "PUBREL", - 7: "PUBCOMP", - 8: "SUBSCRIBE", - 9: "SUBACK", - 10: "UNSUBSCRIBE", - 11: "UNSUBACK", - 12: "PINGREQ", - 13: "PINGRESP", - 14: "DISCONNECT", -} - -//Below are the constants assigned to each of the MQTT packet types -const ( - Connect = 1 - Connack = 2 - Publish = 3 - Puback = 4 - Pubrec = 5 - Pubrel = 6 - Pubcomp = 7 - Subscribe = 8 - Suback = 9 - Unsubscribe = 10 - Unsuback = 11 - Pingreq = 12 - Pingresp = 13 - Disconnect = 14 -) - -//Below are the const definitions for error codes returned by -//Connect() -const ( - Accepted = 0x00 - ErrRefusedBadProtocolVersion = 0x01 - ErrRefusedIDRejected = 0x02 - ErrRefusedServerUnavailable = 0x03 - ErrRefusedBadUsernameOrPassword = 0x04 - ErrRefusedNotAuthorised = 0x05 - ErrNetworkError = 0xFE - ErrProtocolViolation = 0xFF -) - -//ConnackReturnCodes is a map of the error codes constants for Connect() -//to a string representation of the error -var ConnackReturnCodes = map[uint8]string{ - 0: "Connection Accepted", - 1: "Connection Refused: Bad Protocol Version", - 2: "Connection Refused: Client Identifier Rejected", - 3: "Connection Refused: Server Unavailable", - 4: "Connection Refused: Username or Password in unknown format", - 5: "Connection Refused: Not Authorised", - 254: "Connection Error", - 255: "Connection Refused: Protocol Violation", -} - -//ConnErrors is a map of the errors codes constants for Connect() -//to a Go error -var ConnErrors = map[byte]error{ - Accepted: nil, - ErrRefusedBadProtocolVersion: errors.New("Unnacceptable protocol version"), - ErrRefusedIDRejected: errors.New("Identifier rejected"), - ErrRefusedServerUnavailable: errors.New("Server Unavailable"), - ErrRefusedBadUsernameOrPassword: errors.New("Bad user name or password"), - ErrRefusedNotAuthorised: errors.New("Not Authorized"), - ErrNetworkError: errors.New("Network Error"), - ErrProtocolViolation: errors.New("Protocol Violation"), -} - -//ReadPacket takes an instance of an io.Reader (such as net.Conn) and attempts -//to read an MQTT packet from the stream. It returns a ControlPacket -//representing the decoded MQTT packet and an error. One of these returns will -//always be nil, a nil ControlPacket indicating an error occurred. -func ReadPacket(r io.Reader) (ControlPacket, error) { - var fh FixedHeader - b := make([]byte, 1) - - _, err := io.ReadFull(r, b) - if err != nil { - return nil, err - } - - err = fh.unpack(b[0], r) - if err != nil { - return nil, err - } - - cp, err := NewControlPacketWithHeader(fh) - if err != nil { - return nil, err - } - - packetBytes := make([]byte, fh.RemainingLength) - n, err := io.ReadFull(r, packetBytes) - if err != nil { - return nil, err - } - if n != fh.RemainingLength { - return nil, errors.New("Failed to read expected data") - } - - err = cp.Unpack(bytes.NewBuffer(packetBytes)) - return cp, err -} - -//NewControlPacket is used to create a new ControlPacket of the type specified -//by packetType, this is usually done by reference to the packet type constants -//defined in packets.go. The newly created ControlPacket is empty and a pointer -//is returned. -func NewControlPacket(packetType byte) ControlPacket { - switch packetType { - case Connect: - return &ConnectPacket{FixedHeader: FixedHeader{MessageType: Connect}} - case Connack: - return &ConnackPacket{FixedHeader: FixedHeader{MessageType: Connack}} - case Disconnect: - return &DisconnectPacket{FixedHeader: FixedHeader{MessageType: Disconnect}} - case Publish: - return &PublishPacket{FixedHeader: FixedHeader{MessageType: Publish}} - case Puback: - return &PubackPacket{FixedHeader: FixedHeader{MessageType: Puback}} - case Pubrec: - return &PubrecPacket{FixedHeader: FixedHeader{MessageType: Pubrec}} - case Pubrel: - return &PubrelPacket{FixedHeader: FixedHeader{MessageType: Pubrel, Qos: 1}} - case Pubcomp: - return &PubcompPacket{FixedHeader: FixedHeader{MessageType: Pubcomp}} - case Subscribe: - return &SubscribePacket{FixedHeader: FixedHeader{MessageType: Subscribe, Qos: 1}} - case Suback: - return &SubackPacket{FixedHeader: FixedHeader{MessageType: Suback}} - case Unsubscribe: - return &UnsubscribePacket{FixedHeader: FixedHeader{MessageType: Unsubscribe, Qos: 1}} - case Unsuback: - return &UnsubackPacket{FixedHeader: FixedHeader{MessageType: Unsuback}} - case Pingreq: - return &PingreqPacket{FixedHeader: FixedHeader{MessageType: Pingreq}} - case Pingresp: - return &PingrespPacket{FixedHeader: FixedHeader{MessageType: Pingresp}} - } - return nil -} - -//NewControlPacketWithHeader is used to create a new ControlPacket of the type -//specified within the FixedHeader that is passed to the function. -//The newly created ControlPacket is empty and a pointer is returned. -func NewControlPacketWithHeader(fh FixedHeader) (ControlPacket, error) { - switch fh.MessageType { - case Connect: - return &ConnectPacket{FixedHeader: fh}, nil - case Connack: - return &ConnackPacket{FixedHeader: fh}, nil - case Disconnect: - return &DisconnectPacket{FixedHeader: fh}, nil - case Publish: - return &PublishPacket{FixedHeader: fh}, nil - case Puback: - return &PubackPacket{FixedHeader: fh}, nil - case Pubrec: - return &PubrecPacket{FixedHeader: fh}, nil - case Pubrel: - return &PubrelPacket{FixedHeader: fh}, nil - case Pubcomp: - return &PubcompPacket{FixedHeader: fh}, nil - case Subscribe: - return &SubscribePacket{FixedHeader: fh}, nil - case Suback: - return &SubackPacket{FixedHeader: fh}, nil - case Unsubscribe: - return &UnsubscribePacket{FixedHeader: fh}, nil - case Unsuback: - return &UnsubackPacket{FixedHeader: fh}, nil - case Pingreq: - return &PingreqPacket{FixedHeader: fh}, nil - case Pingresp: - return &PingrespPacket{FixedHeader: fh}, nil - } - return nil, fmt.Errorf("unsupported packet type 0x%x", fh.MessageType) -} - -//Details struct returned by the Details() function called on -//ControlPackets to present details of the Qos and MessageID -//of the ControlPacket -type Details struct { - Qos byte - MessageID uint16 -} - -//FixedHeader is a struct to hold the decoded information from -//the fixed header of an MQTT ControlPacket -type FixedHeader struct { - MessageType byte - Dup bool - Qos byte - Retain bool - RemainingLength int -} - -func (fh FixedHeader) String() string { - return fmt.Sprintf("%s: dup: %t qos: %d retain: %t rLength: %d", PacketNames[fh.MessageType], fh.Dup, fh.Qos, fh.Retain, fh.RemainingLength) -} - -func boolToByte(b bool) byte { - switch b { - case true: - return 1 - default: - return 0 - } -} - -func (fh *FixedHeader) pack() bytes.Buffer { - var header bytes.Buffer - header.WriteByte(fh.MessageType<<4 | boolToByte(fh.Dup)<<3 | fh.Qos<<1 | boolToByte(fh.Retain)) - header.Write(encodeLength(fh.RemainingLength)) - return header -} - -func (fh *FixedHeader) unpack(typeAndFlags byte, r io.Reader) error { - fh.MessageType = typeAndFlags >> 4 - fh.Dup = (typeAndFlags>>3)&0x01 > 0 - fh.Qos = (typeAndFlags >> 1) & 0x03 - fh.Retain = typeAndFlags&0x01 > 0 - - var err error - fh.RemainingLength, err = decodeLength(r) - return err -} - -func decodeByte(b io.Reader) (byte, error) { - num := make([]byte, 1) - _, err := b.Read(num) - if err != nil { - return 0, err - } - - return num[0], nil -} - -func decodeUint16(b io.Reader) (uint16, error) { - num := make([]byte, 2) - _, err := b.Read(num) - if err != nil { - return 0, err - } - return binary.BigEndian.Uint16(num), nil -} - -func encodeUint16(num uint16) []byte { - bytes := make([]byte, 2) - binary.BigEndian.PutUint16(bytes, num) - return bytes -} - -func encodeString(field string) []byte { - return encodeBytes([]byte(field)) -} - -func decodeString(b io.Reader) (string, error) { - buf, err := decodeBytes(b) - return string(buf), err -} - -func decodeBytes(b io.Reader) ([]byte, error) { - fieldLength, err := decodeUint16(b) - if err != nil { - return nil, err - } - - field := make([]byte, fieldLength) - _, err = b.Read(field) - if err != nil { - return nil, err - } - - return field, nil -} - -func encodeBytes(field []byte) []byte { - fieldLength := make([]byte, 2) - binary.BigEndian.PutUint16(fieldLength, uint16(len(field))) - return append(fieldLength, field...) -} - -func encodeLength(length int) []byte { - var encLength []byte - for { - digit := byte(length % 128) - length /= 128 - if length > 0 { - digit |= 0x80 - } - encLength = append(encLength, digit) - if length == 0 { - break - } - } - return encLength -} - -func decodeLength(r io.Reader) (int, error) { - var rLength uint32 - var multiplier uint32 - b := make([]byte, 1) - for multiplier < 27 { //fix: Infinite '(digit & 128) == 1' will cause the dead loop - _, err := io.ReadFull(r, b) - if err != nil { - return 0, err - } - - digit := b[0] - rLength |= uint32(digit&127) << multiplier - if (digit & 128) == 0 { - break - } - multiplier += 7 - } - return int(rLength), nil -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pingreq.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/pingreq.go deleted file mode 100644 index 5c3e88f9408..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pingreq.go +++ /dev/null @@ -1,36 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//PingreqPacket is an internal representation of the fields of the -//Pingreq MQTT packet -type PingreqPacket struct { - FixedHeader -} - -func (pr *PingreqPacket) String() string { - str := fmt.Sprintf("%s", pr.FixedHeader) - return str -} - -func (pr *PingreqPacket) Write(w io.Writer) error { - packet := pr.FixedHeader.pack() - _, err := packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (pr *PingreqPacket) Unpack(b io.Reader) error { - return nil -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (pr *PingreqPacket) Details() Details { - return Details{Qos: 0, MessageID: 0} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pingresp.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/pingresp.go deleted file mode 100644 index 39ebc001e66..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pingresp.go +++ /dev/null @@ -1,36 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//PingrespPacket is an internal representation of the fields of the -//Pingresp MQTT packet -type PingrespPacket struct { - FixedHeader -} - -func (pr *PingrespPacket) String() string { - str := fmt.Sprintf("%s", pr.FixedHeader) - return str -} - -func (pr *PingrespPacket) Write(w io.Writer) error { - packet := pr.FixedHeader.pack() - _, err := packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (pr *PingrespPacket) Unpack(b io.Reader) error { - return nil -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (pr *PingrespPacket) Details() Details { - return Details{Qos: 0, MessageID: 0} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/puback.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/puback.go deleted file mode 100644 index 7c0cd7efdd1..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/puback.go +++ /dev/null @@ -1,45 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//PubackPacket is an internal representation of the fields of the -//Puback MQTT packet -type PubackPacket struct { - FixedHeader - MessageID uint16 -} - -func (pa *PubackPacket) String() string { - str := fmt.Sprintf("%s", pa.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d", pa.MessageID) - return str -} - -func (pa *PubackPacket) Write(w io.Writer) error { - var err error - pa.FixedHeader.RemainingLength = 2 - packet := pa.FixedHeader.pack() - packet.Write(encodeUint16(pa.MessageID)) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (pa *PubackPacket) Unpack(b io.Reader) error { - var err error - pa.MessageID, err = decodeUint16(b) - - return err -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (pa *PubackPacket) Details() Details { - return Details{Qos: pa.Qos, MessageID: pa.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubcomp.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubcomp.go deleted file mode 100644 index 4f6f6e216e1..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubcomp.go +++ /dev/null @@ -1,45 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//PubcompPacket is an internal representation of the fields of the -//Pubcomp MQTT packet -type PubcompPacket struct { - FixedHeader - MessageID uint16 -} - -func (pc *PubcompPacket) String() string { - str := fmt.Sprintf("%s", pc.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d", pc.MessageID) - return str -} - -func (pc *PubcompPacket) Write(w io.Writer) error { - var err error - pc.FixedHeader.RemainingLength = 2 - packet := pc.FixedHeader.pack() - packet.Write(encodeUint16(pc.MessageID)) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (pc *PubcompPacket) Unpack(b io.Reader) error { - var err error - pc.MessageID, err = decodeUint16(b) - - return err -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (pc *PubcompPacket) Details() Details { - return Details{Qos: pc.Qos, MessageID: pc.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/publish.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/publish.go deleted file mode 100644 index adc9adb9c0d..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/publish.go +++ /dev/null @@ -1,88 +0,0 @@ -package packets - -import ( - "bytes" - "fmt" - "io" -) - -//PublishPacket is an internal representation of the fields of the -//Publish MQTT packet -type PublishPacket struct { - FixedHeader - TopicName string - MessageID uint16 - Payload []byte -} - -func (p *PublishPacket) String() string { - str := fmt.Sprintf("%s", p.FixedHeader) - str += " " - str += fmt.Sprintf("topicName: %s MessageID: %d", p.TopicName, p.MessageID) - str += " " - str += fmt.Sprintf("payload: %s", string(p.Payload)) - return str -} - -func (p *PublishPacket) Write(w io.Writer) error { - var body bytes.Buffer - var err error - - body.Write(encodeString(p.TopicName)) - if p.Qos > 0 { - body.Write(encodeUint16(p.MessageID)) - } - p.FixedHeader.RemainingLength = body.Len() + len(p.Payload) - packet := p.FixedHeader.pack() - packet.Write(body.Bytes()) - packet.Write(p.Payload) - _, err = w.Write(packet.Bytes()) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (p *PublishPacket) Unpack(b io.Reader) error { - var payloadLength = p.FixedHeader.RemainingLength - var err error - p.TopicName, err = decodeString(b) - if err != nil { - return err - } - - if p.Qos > 0 { - p.MessageID, err = decodeUint16(b) - if err != nil { - return err - } - payloadLength -= len(p.TopicName) + 4 - } else { - payloadLength -= len(p.TopicName) + 2 - } - if payloadLength < 0 { - return fmt.Errorf("Error unpacking publish, payload length < 0") - } - p.Payload = make([]byte, payloadLength) - _, err = b.Read(p.Payload) - - return err -} - -//Copy creates a new PublishPacket with the same topic and payload -//but an empty fixed header, useful for when you want to deliver -//a message with different properties such as Qos but the same -//content -func (p *PublishPacket) Copy() *PublishPacket { - newP := NewControlPacket(Publish).(*PublishPacket) - newP.TopicName = p.TopicName - newP.Payload = p.Payload - - return newP -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (p *PublishPacket) Details() Details { - return Details{Qos: p.Qos, MessageID: p.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubrec.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubrec.go deleted file mode 100644 index 483372b0722..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubrec.go +++ /dev/null @@ -1,45 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//PubrecPacket is an internal representation of the fields of the -//Pubrec MQTT packet -type PubrecPacket struct { - FixedHeader - MessageID uint16 -} - -func (pr *PubrecPacket) String() string { - str := fmt.Sprintf("%s", pr.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d", pr.MessageID) - return str -} - -func (pr *PubrecPacket) Write(w io.Writer) error { - var err error - pr.FixedHeader.RemainingLength = 2 - packet := pr.FixedHeader.pack() - packet.Write(encodeUint16(pr.MessageID)) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (pr *PubrecPacket) Unpack(b io.Reader) error { - var err error - pr.MessageID, err = decodeUint16(b) - - return err -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (pr *PubrecPacket) Details() Details { - return Details{Qos: pr.Qos, MessageID: pr.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubrel.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubrel.go deleted file mode 100644 index 8590fd976ce..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/pubrel.go +++ /dev/null @@ -1,45 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//PubrelPacket is an internal representation of the fields of the -//Pubrel MQTT packet -type PubrelPacket struct { - FixedHeader - MessageID uint16 -} - -func (pr *PubrelPacket) String() string { - str := fmt.Sprintf("%s", pr.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d", pr.MessageID) - return str -} - -func (pr *PubrelPacket) Write(w io.Writer) error { - var err error - pr.FixedHeader.RemainingLength = 2 - packet := pr.FixedHeader.pack() - packet.Write(encodeUint16(pr.MessageID)) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (pr *PubrelPacket) Unpack(b io.Reader) error { - var err error - pr.MessageID, err = decodeUint16(b) - - return err -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (pr *PubrelPacket) Details() Details { - return Details{Qos: pr.Qos, MessageID: pr.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/suback.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/suback.go deleted file mode 100644 index fc0572475ad..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/suback.go +++ /dev/null @@ -1,60 +0,0 @@ -package packets - -import ( - "bytes" - "fmt" - "io" -) - -//SubackPacket is an internal representation of the fields of the -//Suback MQTT packet -type SubackPacket struct { - FixedHeader - MessageID uint16 - ReturnCodes []byte -} - -func (sa *SubackPacket) String() string { - str := fmt.Sprintf("%s", sa.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d", sa.MessageID) - return str -} - -func (sa *SubackPacket) Write(w io.Writer) error { - var body bytes.Buffer - var err error - body.Write(encodeUint16(sa.MessageID)) - body.Write(sa.ReturnCodes) - sa.FixedHeader.RemainingLength = body.Len() - packet := sa.FixedHeader.pack() - packet.Write(body.Bytes()) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (sa *SubackPacket) Unpack(b io.Reader) error { - var qosBuffer bytes.Buffer - var err error - sa.MessageID, err = decodeUint16(b) - if err != nil { - return err - } - - _, err = qosBuffer.ReadFrom(b) - if err != nil { - return err - } - sa.ReturnCodes = qosBuffer.Bytes() - - return nil -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (sa *SubackPacket) Details() Details { - return Details{Qos: 0, MessageID: sa.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/subscribe.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/subscribe.go deleted file mode 100644 index 0787ce07c07..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/subscribe.go +++ /dev/null @@ -1,72 +0,0 @@ -package packets - -import ( - "bytes" - "fmt" - "io" -) - -//SubscribePacket is an internal representation of the fields of the -//Subscribe MQTT packet -type SubscribePacket struct { - FixedHeader - MessageID uint16 - Topics []string - Qoss []byte -} - -func (s *SubscribePacket) String() string { - str := fmt.Sprintf("%s", s.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d topics: %s", s.MessageID, s.Topics) - return str -} - -func (s *SubscribePacket) Write(w io.Writer) error { - var body bytes.Buffer - var err error - - body.Write(encodeUint16(s.MessageID)) - for i, topic := range s.Topics { - body.Write(encodeString(topic)) - body.WriteByte(s.Qoss[i]) - } - s.FixedHeader.RemainingLength = body.Len() - packet := s.FixedHeader.pack() - packet.Write(body.Bytes()) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (s *SubscribePacket) Unpack(b io.Reader) error { - var err error - s.MessageID, err = decodeUint16(b) - if err != nil { - return err - } - payloadLength := s.FixedHeader.RemainingLength - 2 - for payloadLength > 0 { - topic, err := decodeString(b) - if err != nil { - return err - } - s.Topics = append(s.Topics, topic) - qos, err := decodeByte(b) - if err != nil { - return err - } - s.Qoss = append(s.Qoss, qos) - payloadLength -= 2 + len(topic) + 1 //2 bytes of string length, plus string, plus 1 byte for Qos - } - - return nil -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (s *SubscribePacket) Details() Details { - return Details{Qos: 1, MessageID: s.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/unsuback.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/unsuback.go deleted file mode 100644 index 4b40c273af0..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/unsuback.go +++ /dev/null @@ -1,45 +0,0 @@ -package packets - -import ( - "fmt" - "io" -) - -//UnsubackPacket is an internal representation of the fields of the -//Unsuback MQTT packet -type UnsubackPacket struct { - FixedHeader - MessageID uint16 -} - -func (ua *UnsubackPacket) String() string { - str := fmt.Sprintf("%s", ua.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d", ua.MessageID) - return str -} - -func (ua *UnsubackPacket) Write(w io.Writer) error { - var err error - ua.FixedHeader.RemainingLength = 2 - packet := ua.FixedHeader.pack() - packet.Write(encodeUint16(ua.MessageID)) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (ua *UnsubackPacket) Unpack(b io.Reader) error { - var err error - ua.MessageID, err = decodeUint16(b) - - return err -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (ua *UnsubackPacket) Details() Details { - return Details{Qos: 0, MessageID: ua.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/packets/unsubscribe.go b/vendor/github.com/eclipse/paho.mqtt.golang/packets/unsubscribe.go deleted file mode 100644 index 2012c310f83..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/packets/unsubscribe.go +++ /dev/null @@ -1,59 +0,0 @@ -package packets - -import ( - "bytes" - "fmt" - "io" -) - -//UnsubscribePacket is an internal representation of the fields of the -//Unsubscribe MQTT packet -type UnsubscribePacket struct { - FixedHeader - MessageID uint16 - Topics []string -} - -func (u *UnsubscribePacket) String() string { - str := fmt.Sprintf("%s", u.FixedHeader) - str += " " - str += fmt.Sprintf("MessageID: %d", u.MessageID) - return str -} - -func (u *UnsubscribePacket) Write(w io.Writer) error { - var body bytes.Buffer - var err error - body.Write(encodeUint16(u.MessageID)) - for _, topic := range u.Topics { - body.Write(encodeString(topic)) - } - u.FixedHeader.RemainingLength = body.Len() - packet := u.FixedHeader.pack() - packet.Write(body.Bytes()) - _, err = packet.WriteTo(w) - - return err -} - -//Unpack decodes the details of a ControlPacket after the fixed -//header has been read -func (u *UnsubscribePacket) Unpack(b io.Reader) error { - var err error - u.MessageID, err = decodeUint16(b) - if err != nil { - return err - } - - for topic, err := decodeString(b); err == nil && topic != ""; topic, err = decodeString(b) { - u.Topics = append(u.Topics, topic) - } - - return err -} - -//Details returns a Details struct containing the Qos and -//MessageID of this ControlPacket -func (u *UnsubscribePacket) Details() Details { - return Details{Qos: 1, MessageID: u.MessageID} -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/ping.go b/vendor/github.com/eclipse/paho.mqtt.golang/ping.go deleted file mode 100644 index dcbcb1dd226..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/ping.go +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "errors" - "sync/atomic" - "time" - - "github.com/eclipse/paho.mqtt.golang/packets" -) - -func keepalive(c *client) { - defer c.workers.Done() - DEBUG.Println(PNG, "keepalive starting") - var checkInterval int64 - var pingSent time.Time - - if c.options.KeepAlive > 10 { - checkInterval = 5 - } else { - checkInterval = c.options.KeepAlive / 2 - } - - intervalTicker := time.NewTicker(time.Duration(checkInterval * int64(time.Second))) - defer intervalTicker.Stop() - - for { - select { - case <-c.stop: - DEBUG.Println(PNG, "keepalive stopped") - return - case <-intervalTicker.C: - lastSent := c.lastSent.Load().(time.Time) - lastReceived := c.lastReceived.Load().(time.Time) - - DEBUG.Println(PNG, "ping check", time.Since(lastSent).Seconds()) - if time.Since(lastSent) >= time.Duration(c.options.KeepAlive*int64(time.Second)) || time.Since(lastReceived) >= time.Duration(c.options.KeepAlive*int64(time.Second)) { - if atomic.LoadInt32(&c.pingOutstanding) == 0 { - DEBUG.Println(PNG, "keepalive sending ping") - ping := packets.NewControlPacket(packets.Pingreq).(*packets.PingreqPacket) - //We don't want to wait behind large messages being sent, the Write call - //will block until it it able to send the packet. - atomic.StoreInt32(&c.pingOutstanding, 1) - ping.Write(c.conn) - c.lastSent.Store(time.Now()) - pingSent = time.Now() - } - } - if atomic.LoadInt32(&c.pingOutstanding) > 0 && time.Now().Sub(pingSent) >= c.options.PingTimeout { - CRITICAL.Println(PNG, "pingresp not received, disconnecting") - c.errors <- errors.New("pingresp not received, disconnecting") - return - } - } - } -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/router.go b/vendor/github.com/eclipse/paho.mqtt.golang/router.go deleted file mode 100644 index 7b4e8f8082f..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/router.go +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "container/list" - "strings" - "sync" - - "github.com/eclipse/paho.mqtt.golang/packets" -) - -// route is a type which associates MQTT Topic strings with a -// callback to be executed upon the arrival of a message associated -// with a subscription to that topic. -type route struct { - topic string - callback MessageHandler -} - -// match takes a slice of strings which represent the route being tested having been split on '/' -// separators, and a slice of strings representing the topic string in the published message, similarly -// split. -// The function determines if the topic string matches the route according to the MQTT topic rules -// and returns a boolean of the outcome -func match(route []string, topic []string) bool { - if len(route) == 0 { - if len(topic) == 0 { - return true - } - return false - } - - if len(topic) == 0 { - if route[0] == "#" { - return true - } - return false - } - - if route[0] == "#" { - return true - } - - if (route[0] == "+") || (route[0] == topic[0]) { - return match(route[1:], topic[1:]) - } - return false -} - -func routeIncludesTopic(route, topic string) bool { - return match(routeSplit(route), strings.Split(topic, "/")) -} - -// removes $share and sharename when splitting the route to allow -// shared subscription routes to correctly match the topic -func routeSplit(route string) []string { - var result []string - if strings.HasPrefix(route, "$share") { - result = strings.Split(route, "/")[2:] - } else { - result = strings.Split(route, "/") - } - return result -} - -// match takes the topic string of the published message and does a basic compare to the -// string of the current Route, if they match it returns true -func (r *route) match(topic string) bool { - return r.topic == topic || routeIncludesTopic(r.topic, topic) -} - -type router struct { - sync.RWMutex - routes *list.List - defaultHandler MessageHandler - messages chan *packets.PublishPacket - stop chan bool -} - -// newRouter returns a new instance of a Router and channel which can be used to tell the Router -// to stop -func newRouter() (*router, chan bool) { - router := &router{routes: list.New(), messages: make(chan *packets.PublishPacket), stop: make(chan bool)} - stop := router.stop - return router, stop -} - -// addRoute takes a topic string and MessageHandler callback. It looks in the current list of -// routes to see if there is already a matching Route. If there is it replaces the current -// callback with the new one. If not it add a new entry to the list of Routes. -func (r *router) addRoute(topic string, callback MessageHandler) { - r.Lock() - defer r.Unlock() - for e := r.routes.Front(); e != nil; e = e.Next() { - if e.Value.(*route).match(topic) { - r := e.Value.(*route) - r.callback = callback - return - } - } - r.routes.PushBack(&route{topic: topic, callback: callback}) -} - -// deleteRoute takes a route string, looks for a matching Route in the list of Routes. If -// found it removes the Route from the list. -func (r *router) deleteRoute(topic string) { - r.Lock() - defer r.Unlock() - for e := r.routes.Front(); e != nil; e = e.Next() { - if e.Value.(*route).match(topic) { - r.routes.Remove(e) - return - } - } -} - -// setDefaultHandler assigns a default callback that will be called if no matching Route -// is found for an incoming Publish. -func (r *router) setDefaultHandler(handler MessageHandler) { - r.Lock() - defer r.Unlock() - r.defaultHandler = handler -} - -// matchAndDispatch takes a channel of Message pointers as input and starts a go routine that -// takes messages off the channel, matches them against the internal route list and calls the -// associated callback (or the defaultHandler, if one exists and no other route matched). If -// anything is sent down the stop channel the function will end. -func (r *router) matchAndDispatch(messages <-chan *packets.PublishPacket, order bool, client *client) { - go func() { - for { - select { - case message := <-messages: - sent := false - r.RLock() - m := messageFromPublish(message, client.ackFunc(message)) - handlers := []MessageHandler{} - for e := r.routes.Front(); e != nil; e = e.Next() { - if e.Value.(*route).match(message.TopicName) { - if order { - handlers = append(handlers, e.Value.(*route).callback) - } else { - hd := e.Value.(*route).callback - go func() { - hd(client, m) - m.Ack() - }() - } - sent = true - } - } - if !sent && r.defaultHandler != nil { - if order { - handlers = append(handlers, r.defaultHandler) - } else { - go func() { - r.defaultHandler(client, m) - m.Ack() - }() - } - } - r.RUnlock() - for _, handler := range handlers { - func() { - handler(client, m) - m.Ack() - }() - } - case <-r.stop: - return - } - } - }() -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/store.go b/vendor/github.com/eclipse/paho.mqtt.golang/store.go deleted file mode 100644 index 24a76b7df3c..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/store.go +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "fmt" - "strconv" - - "github.com/eclipse/paho.mqtt.golang/packets" -) - -const ( - inboundPrefix = "i." - outboundPrefix = "o." -) - -// Store is an interface which can be used to provide implementations -// for message persistence. -// Because we may have to store distinct messages with the same -// message ID, we need a unique key for each message. This is -// possible by prepending "i." or "o." to each message id -type Store interface { - Open() - Put(key string, message packets.ControlPacket) - Get(key string) packets.ControlPacket - All() []string - Del(key string) - Close() - Reset() -} - -// A key MUST have the form "X.[messageid]" -// where X is 'i' or 'o' -func mIDFromKey(key string) uint16 { - s := key[2:] - i, err := strconv.Atoi(s) - chkerr(err) - return uint16(i) -} - -// Return true if key prefix is outbound -func isKeyOutbound(key string) bool { - return key[:2] == outboundPrefix -} - -// Return true if key prefix is inbound -func isKeyInbound(key string) bool { - return key[:2] == inboundPrefix -} - -// Return a string of the form "i.[id]" -func inboundKeyFromMID(id uint16) string { - return fmt.Sprintf("%s%d", inboundPrefix, id) -} - -// Return a string of the form "o.[id]" -func outboundKeyFromMID(id uint16) string { - return fmt.Sprintf("%s%d", outboundPrefix, id) -} - -// govern which outgoing messages are persisted -func persistOutbound(s Store, m packets.ControlPacket) { - switch m.Details().Qos { - case 0: - switch m.(type) { - case *packets.PubackPacket, *packets.PubcompPacket: - // Sending puback. delete matching publish - // from ibound - s.Del(inboundKeyFromMID(m.Details().MessageID)) - } - case 1: - switch m.(type) { - case *packets.PublishPacket, *packets.PubrelPacket, *packets.SubscribePacket, *packets.UnsubscribePacket: - // Sending publish. store in obound - // until puback received - s.Put(outboundKeyFromMID(m.Details().MessageID), m) - default: - ERROR.Println(STR, "Asked to persist an invalid message type") - } - case 2: - switch m.(type) { - case *packets.PublishPacket: - // Sending publish. store in obound - // until pubrel received - s.Put(outboundKeyFromMID(m.Details().MessageID), m) - default: - ERROR.Println(STR, "Asked to persist an invalid message type") - } - } -} - -// govern which incoming messages are persisted -func persistInbound(s Store, m packets.ControlPacket) { - switch m.Details().Qos { - case 0: - switch m.(type) { - case *packets.PubackPacket, *packets.SubackPacket, *packets.UnsubackPacket, *packets.PubcompPacket: - // Received a puback. delete matching publish - // from obound - s.Del(outboundKeyFromMID(m.Details().MessageID)) - case *packets.PublishPacket, *packets.PubrecPacket, *packets.PingrespPacket, *packets.ConnackPacket: - default: - ERROR.Println(STR, "Asked to persist an invalid messages type") - } - case 1: - switch m.(type) { - case *packets.PublishPacket, *packets.PubrelPacket: - // Received a publish. store it in ibound - // until puback sent - s.Put(inboundKeyFromMID(m.Details().MessageID), m) - default: - ERROR.Println(STR, "Asked to persist an invalid messages type") - } - case 2: - switch m.(type) { - case *packets.PublishPacket: - // Received a publish. store it in ibound - // until pubrel received - s.Put(inboundKeyFromMID(m.Details().MessageID), m) - default: - ERROR.Println(STR, "Asked to persist an invalid messages type") - } - } -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/token.go b/vendor/github.com/eclipse/paho.mqtt.golang/token.go deleted file mode 100644 index 0818553332f..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/token.go +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright (c) 2014 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Allan Stockdill-Mander - */ - -package mqtt - -import ( - "sync" - "time" - - "github.com/eclipse/paho.mqtt.golang/packets" -) - -// PacketAndToken is a struct that contains both a ControlPacket and a -// Token. This struct is passed via channels between the client interface -// code and the underlying code responsible for sending and receiving -// MQTT messages. -type PacketAndToken struct { - p packets.ControlPacket - t tokenCompletor -} - -// Token defines the interface for the tokens used to indicate when -// actions have completed. -type Token interface { - Wait() bool - WaitTimeout(time.Duration) bool - Error() error -} - -type TokenErrorSetter interface { - setError(error) -} - -type tokenCompletor interface { - Token - TokenErrorSetter - flowComplete() -} - -type baseToken struct { - m sync.RWMutex - complete chan struct{} - err error -} - -// Wait will wait indefinitely for the Token to complete, ie the Publish -// to be sent and confirmed receipt from the broker -func (b *baseToken) Wait() bool { - <-b.complete - return true -} - -// WaitTimeout takes a time.Duration to wait for the flow associated with the -// Token to complete, returns true if it returned before the timeout or -// returns false if the timeout occurred. In the case of a timeout the Token -// does not have an error set in case the caller wishes to wait again -func (b *baseToken) WaitTimeout(d time.Duration) bool { - b.m.Lock() - defer b.m.Unlock() - - timer := time.NewTimer(d) - select { - case <-b.complete: - if !timer.Stop() { - <-timer.C - } - return true - case <-timer.C: - } - - return false -} - -func (b *baseToken) flowComplete() { - select { - case <-b.complete: - default: - close(b.complete) - } -} - -func (b *baseToken) Error() error { - b.m.RLock() - defer b.m.RUnlock() - return b.err -} - -func (b *baseToken) setError(e error) { - b.m.Lock() - b.err = e - b.flowComplete() - b.m.Unlock() -} - -func newToken(tType byte) tokenCompletor { - switch tType { - case packets.Connect: - return &ConnectToken{baseToken: baseToken{complete: make(chan struct{})}} - case packets.Subscribe: - return &SubscribeToken{baseToken: baseToken{complete: make(chan struct{})}, subResult: make(map[string]byte)} - case packets.Publish: - return &PublishToken{baseToken: baseToken{complete: make(chan struct{})}} - case packets.Unsubscribe: - return &UnsubscribeToken{baseToken: baseToken{complete: make(chan struct{})}} - case packets.Disconnect: - return &DisconnectToken{baseToken: baseToken{complete: make(chan struct{})}} - } - return nil -} - -// ConnectToken is an extension of Token containing the extra fields -// required to provide information about calls to Connect() -type ConnectToken struct { - baseToken - returnCode byte - sessionPresent bool -} - -// ReturnCode returns the acknowlegement code in the connack sent -// in response to a Connect() -func (c *ConnectToken) ReturnCode() byte { - c.m.RLock() - defer c.m.RUnlock() - return c.returnCode -} - -// SessionPresent returns a bool representing the value of the -// session present field in the connack sent in response to a Connect() -func (c *ConnectToken) SessionPresent() bool { - c.m.RLock() - defer c.m.RUnlock() - return c.sessionPresent -} - -// PublishToken is an extension of Token containing the extra fields -// required to provide information about calls to Publish() -type PublishToken struct { - baseToken - messageID uint16 -} - -// MessageID returns the MQTT message ID that was assigned to the -// Publish packet when it was sent to the broker -func (p *PublishToken) MessageID() uint16 { - return p.messageID -} - -// SubscribeToken is an extension of Token containing the extra fields -// required to provide information about calls to Subscribe() -type SubscribeToken struct { - baseToken - subs []string - subResult map[string]byte -} - -// Result returns a map of topics that were subscribed to along with -// the matching return code from the broker. This is either the Qos -// value of the subscription or an error code. -func (s *SubscribeToken) Result() map[string]byte { - s.m.RLock() - defer s.m.RUnlock() - return s.subResult -} - -// UnsubscribeToken is an extension of Token containing the extra fields -// required to provide information about calls to Unsubscribe() -type UnsubscribeToken struct { - baseToken -} - -// DisconnectToken is an extension of Token containing the extra fields -// required to provide information about calls to Disconnect() -type DisconnectToken struct { - baseToken -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/topic.go b/vendor/github.com/eclipse/paho.mqtt.golang/topic.go deleted file mode 100644 index 6fa3ad2ac5f..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/topic.go +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2014 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -import ( - "errors" - "strings" -) - -//ErrInvalidQos is the error returned when an packet is to be sent -//with an invalid Qos value -var ErrInvalidQos = errors.New("Invalid QoS") - -//ErrInvalidTopicEmptyString is the error returned when a topic string -//is passed in that is 0 length -var ErrInvalidTopicEmptyString = errors.New("Invalid Topic; empty string") - -//ErrInvalidTopicMultilevel is the error returned when a topic string -//is passed in that has the multi level wildcard in any position but -//the last -var ErrInvalidTopicMultilevel = errors.New("Invalid Topic; multi-level wildcard must be last level") - -// Topic Names and Topic Filters -// The MQTT v3.1.1 spec clarifies a number of ambiguities with regard -// to the validity of Topic strings. -// - A Topic must be between 1 and 65535 bytes. -// - A Topic is case sensitive. -// - A Topic may contain whitespace. -// - A Topic containing a leading forward slash is different than a Topic without. -// - A Topic may be "/" (two levels, both empty string). -// - A Topic must be UTF-8 encoded. -// - A Topic may contain any number of levels. -// - A Topic may contain an empty level (two forward slashes in a row). -// - A TopicName may not contain a wildcard. -// - A TopicFilter may only have a # (multi-level) wildcard as the last level. -// - A TopicFilter may contain any number of + (single-level) wildcards. -// - A TopicFilter with a # will match the absense of a level -// Example: a subscription to "foo/#" will match messages published to "foo". - -func validateSubscribeMap(subs map[string]byte) ([]string, []byte, error) { - var topics []string - var qoss []byte - for topic, qos := range subs { - if err := validateTopicAndQos(topic, qos); err != nil { - return nil, nil, err - } - topics = append(topics, topic) - qoss = append(qoss, qos) - } - - return topics, qoss, nil -} - -func validateTopicAndQos(topic string, qos byte) error { - if len(topic) == 0 { - return ErrInvalidTopicEmptyString - } - - levels := strings.Split(topic, "/") - for i, level := range levels { - if level == "#" && i != len(levels)-1 { - return ErrInvalidTopicMultilevel - } - } - - if qos < 0 || qos > 2 { - return ErrInvalidQos - } - return nil -} diff --git a/vendor/github.com/eclipse/paho.mqtt.golang/trace.go b/vendor/github.com/eclipse/paho.mqtt.golang/trace.go deleted file mode 100644 index 195c8173dcf..00000000000 --- a/vendor/github.com/eclipse/paho.mqtt.golang/trace.go +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2013 IBM Corp. - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v1.0 - * which accompanies this distribution, and is available at - * http://www.eclipse.org/legal/epl-v10.html - * - * Contributors: - * Seth Hoenig - * Allan Stockdill-Mander - * Mike Robertson - */ - -package mqtt - -type ( - // Logger interface allows implementations to provide to this package any - // object that implements the methods defined in it. - Logger interface { - Println(v ...interface{}) - Printf(format string, v ...interface{}) - } - - // NOOPLogger implements the logger that does not perform any operation - // by default. This allows us to efficiently discard the unwanted messages. - NOOPLogger struct{} -) - -func (NOOPLogger) Println(v ...interface{}) {} -func (NOOPLogger) Printf(format string, v ...interface{}) {} - -// Internal levels of library output that are initialised to not print -// anything but can be overridden by programmer -var ( - ERROR Logger = NOOPLogger{} - CRITICAL Logger = NOOPLogger{} - WARN Logger = NOOPLogger{} - DEBUG Logger = NOOPLogger{} -) diff --git a/vendor/github.com/evalphobia/logrus_sentry/.travis.yml b/vendor/github.com/evalphobia/logrus_sentry/.travis.yml deleted file mode 100644 index 33edbc4c30f..00000000000 --- a/vendor/github.com/evalphobia/logrus_sentry/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -sudo: false -language: go -go: - - 1.10.x - - 1.x - - tip -matrix: - allow_failures: - - go: tip -before_install: - - go get github.com/axw/gocov/gocov - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover - - test -z "$(gofmt -s -l . | tee /dev/stderr)" - - go tool vet -all -structtags -shadow . -script: - - $HOME/gopath/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/evalphobia/logrus_sentry/LICENSE b/vendor/github.com/evalphobia/logrus_sentry/LICENSE deleted file mode 100644 index a2301f534a2..00000000000 --- a/vendor/github.com/evalphobia/logrus_sentry/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 logrus_sentry Authors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/evalphobia/logrus_sentry/README.md b/vendor/github.com/evalphobia/logrus_sentry/README.md deleted file mode 100644 index 9a65baafa65..00000000000 --- a/vendor/github.com/evalphobia/logrus_sentry/README.md +++ /dev/null @@ -1,163 +0,0 @@ -Sentry Hook for Logrus :walrus: ----- - -[![GoDoc][1]][2] [![Release][5]][6] [![Build Status][7]][8] [![Coverage Status][9]][10] [![Go Report Card][13]][14] [![Code Climate][19]][20] [![BCH compliance][21]][22] - -[1]: https://godoc.org/github.com/evalphobia/logrus_sentry?status.svg -[2]: https://godoc.org/github.com/evalphobia/logrus_sentry -[4]: LICENSE.md -[5]: https://img.shields.io/github/release/evalphobia/logrus_sentry.svg -[6]: https://github.com/evalphobia/logrus_sentry/releases/latest -[7]: https://travis-ci.org/evalphobia/logrus_sentry.svg?branch=master -[8]: https://travis-ci.org/evalphobia/logrus_sentry -[9]: https://coveralls.io/repos/evalphobia/logrus_sentry/badge.svg?branch=master&service=github -[10]: https://coveralls.io/github/evalphobia/logrus_sentry?branch=master -[11]: https://codecov.io/github/evalphobia/logrus_sentry/coverage.svg?branch=master -[12]: https://codecov.io/github/evalphobia/logrus_sentry?branch=master -[13]: https://goreportcard.com/badge/github.com/evalphobia/logrus_sentry -[14]: https://goreportcard.com/report/github.com/evalphobia/logrus_sentry -[15]: https://img.shields.io/github/downloads/evalphobia/logrus_sentry/total.svg?maxAge=1800 -[16]: https://github.com/evalphobia/logrus_sentry/releases -[17]: https://img.shields.io/github/stars/evalphobia/logrus_sentry.svg -[18]: https://github.com/evalphobia/logrus_sentry/stargazers -[19]: https://codeclimate.com/github/evalphobia/logrus_sentry/badges/gpa.svg -[20]: https://codeclimate.com/github/evalphobia/logrus_sentry -[21]: https://bettercodehub.com/edge/badge/evalphobia/logrus_sentry?branch=master -[22]: https://bettercodehub.com/ - - -[Sentry](https://getsentry.com) provides both self-hosted and hosted -solutions for exception tracking. -Both client and server are -[open source](https://github.com/getsentry/sentry). - -## Usage - -Every sentry application defined on the server gets a different -[DSN](https://www.getsentry.com/docs/). In the example below replace -`YOUR_DSN` with the one created for your application. - -```go -import ( - "github.com/sirupsen/logrus" - "github.com/evalphobia/logrus_sentry" -) - -func main() { - log := logrus.New() - hook, err := logrus_sentry.NewSentryHook(YOUR_DSN, []logrus.Level{ - logrus.PanicLevel, - logrus.FatalLevel, - logrus.ErrorLevel, - }) - - if err == nil { - log.Hooks.Add(hook) - } -} -``` - -If you wish to initialize a SentryHook with tags, you can use the `NewWithTagsSentryHook` constructor to provide default tags: - -```go -tags := map[string]string{ - "site": "example.com", -} -levels := []logrus.Level{ - logrus.PanicLevel, - logrus.FatalLevel, - logrus.ErrorLevel, -} -hook, err := logrus_sentry.NewWithTagsSentryHook(YOUR_DSN, tags, levels) - -``` - -If you wish to initialize a SentryHook with an already initialized raven client, you can use -the `NewWithClientSentryHook` constructor: - -```go -import ( - "github.com/sirupsen/logrus" - "github.com/evalphobia/logrus_sentry" - "github.com/getsentry/raven-go" -) - -func main() { - log := logrus.New() - - client, err := raven.New(YOUR_DSN) - if err != nil { - log.Fatal(err) - } - - hook, err := logrus_sentry.NewWithClientSentryHook(client, []logrus.Level{ - logrus.PanicLevel, - logrus.FatalLevel, - logrus.ErrorLevel, - }) - - if err == nil { - log.Hooks.Add(hook) - } -} - -hook, err := NewWithClientSentryHook(client, []logrus.Level{ - logrus.ErrorLevel, -}) -``` - -## Special fields - -Some logrus fields have a special meaning in this hook, and they will be especially processed by Sentry. - - -| Field key | Description | -| ------------- | ------------- | -| `event_id` | Each logged event is identified by the `event_id`, which is hexadecimal string representing a UUID4 value. You can manually specify the identifier of a log event by supplying this field. The `event_id` string should be in one of the following UUID format: `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` `xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx` and `urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx`)| -| `user_name` | Name of the user who is in the context of the event | -| `user_email` | Email of the user who is in the context of the event | -| `user_id` | ID of the user who is in the context of the event | -| `user_ip` | IP of the user who is in the context of the event | -| `server_name` | Also known as hostname, it is the name of the server which is logging the event (hostname.example.com) | -| `tags` | `tags` are `raven.Tags` struct from `github.com/getsentry/raven-go` and override default tags data | -| `fingerprint` | `fingerprint` is an string array, that allows you to affect sentry's grouping of events as detailed in the [sentry documentation](https://docs.sentry.io/learn/rollups/#customize-grouping-with-fingerprints) | -| `logger` | `logger` is the part of the application which is logging the event. In go this usually means setting it to the name of the package. | -| `http_request` | `http_request` is the in-coming request(*http.Request). The detailed request data are sent to Sentry. | - -## Timeout - -`Timeout` is the time the sentry hook will wait for a response -from the sentry server. - -If this time elapses with no response from -the server an error will be returned. - -If `Timeout` is set to 0 the SentryHook will not wait for a reply -and will assume a correct delivery. - -The SentryHook has a default timeout of `100 milliseconds` when created -with a call to `NewSentryHook`. This can be changed by assigning a value to the `Timeout` field: - -```go -hook, _ := logrus_sentry.NewSentryHook(...) -hook.Timeout = 20*time.Second -``` - -## Enabling Stacktraces - -By default the hook will not send any stacktraces. However, this can be enabled -with: - -```go -hook, _ := logrus_sentry.NewSentryHook(...) -hook.StacktraceConfiguration.Enable = true -``` - -Subsequent calls to `logger.Error` and above will create a stacktrace. - -Other configuration options are: -- `StacktraceConfiguration.Level` the logrus level at which to start capturing stacktraces. -- `StacktraceConfiguration.Skip` how many stack frames to skip before stacktrace starts recording. -- `StacktraceConfiguration.Context` the number of lines to include around a stack frame for context. -- `StacktraceConfiguration.InAppPrefixes` the prefixes that will be matched against the stack frame to identify it as in_app -- `StacktraceConfiguration.IncludeErrorBreadcrumb` whether to create a breadcrumb with the full text of error diff --git a/vendor/github.com/evalphobia/logrus_sentry/data_field.go b/vendor/github.com/evalphobia/logrus_sentry/data_field.go deleted file mode 100644 index 37c89fbb66f..00000000000 --- a/vendor/github.com/evalphobia/logrus_sentry/data_field.go +++ /dev/null @@ -1,137 +0,0 @@ -package logrus_sentry - -import ( - "net/http" - - "github.com/getsentry/raven-go" - "github.com/sirupsen/logrus" -) - -const ( - fieldEventID = "event_id" - fieldFingerprint = "fingerprint" - fieldLogger = "logger" - fieldServerName = "server_name" - fieldTags = "tags" - fieldHTTPRequest = "http_request" - fieldUser = "user" -) - -type dataField struct { - data logrus.Fields - omitList map[string]struct{} -} - -func newDataField(data logrus.Fields) *dataField { - return &dataField{ - data: data, - omitList: make(map[string]struct{}), - } -} - -func (d *dataField) len() int { - return len(d.data) -} - -func (d *dataField) isOmit(key string) bool { - _, ok := d.omitList[key] - return ok -} - -func (d *dataField) getLogger() (string, bool) { - if logger, ok := d.data[fieldLogger].(string); ok { - d.omitList[fieldLogger] = struct{}{} - return logger, true - } - return "", false -} - -func (d *dataField) getServerName() (string, bool) { - if serverName, ok := d.data[fieldServerName].(string); ok { - d.omitList[fieldServerName] = struct{}{} - return serverName, true - } - return "", false -} - -func (d *dataField) getTags() (raven.Tags, bool) { - if tags, ok := d.data[fieldTags].(raven.Tags); ok { - d.omitList[fieldTags] = struct{}{} - return tags, true - } - return nil, false -} - -func (d *dataField) getFingerprint() ([]string, bool) { - if fingerprint, ok := d.data[fieldFingerprint].([]string); ok { - d.omitList[fieldFingerprint] = struct{}{} - return fingerprint, true - } - return nil, false -} - -func (d *dataField) getError() (error, bool) { - if err, ok := d.data[logrus.ErrorKey].(error); ok { - d.omitList[logrus.ErrorKey] = struct{}{} - return err, true - } - return nil, false -} - -func (d *dataField) getHTTPRequest() (*raven.Http, bool) { - if req, ok := d.data[fieldHTTPRequest].(*http.Request); ok { - d.omitList[fieldHTTPRequest] = struct{}{} - return raven.NewHttp(req), true - } - if req, ok := d.data[fieldHTTPRequest].(*raven.Http); ok { - d.omitList[fieldHTTPRequest] = struct{}{} - return req, true - } - return nil, false -} - -func (d *dataField) getEventID() (string, bool) { - eventID, ok := d.data[fieldEventID].(string) - if !ok { - return "", false - } - - //verify eventID is 32 characters hexadecimal string (UUID4) - uuid := parseUUID(eventID) - if uuid == nil { - return "", false - } - - d.omitList[fieldEventID] = struct{}{} - return uuid.noDashString(), true -} - -func (d *dataField) getUser() (*raven.User, bool) { - data := d.data - if v, ok := data[fieldUser]; ok { - switch val := v.(type) { - case *raven.User: - d.omitList[fieldUser] = struct{}{} - return val, true - case raven.User: - d.omitList[fieldUser] = struct{}{} - return &val, true - } - } - - username, _ := data["user_name"].(string) - email, _ := data["user_email"].(string) - id, _ := data["user_id"].(string) - ip, _ := data["user_ip"].(string) - - if username == "" && email == "" && id == "" && ip == "" { - return nil, false - } - - return &raven.User{ - ID: id, - Username: username, - Email: email, - IP: ip, - }, true -} diff --git a/vendor/github.com/evalphobia/logrus_sentry/sentry.go b/vendor/github.com/evalphobia/logrus_sentry/sentry.go deleted file mode 100644 index 5d314b97e6e..00000000000 --- a/vendor/github.com/evalphobia/logrus_sentry/sentry.go +++ /dev/null @@ -1,424 +0,0 @@ -package logrus_sentry - -import ( - "encoding/json" - "fmt" - "runtime" - "sync" - "time" - - raven "github.com/getsentry/raven-go" - "github.com/pkg/errors" - "github.com/sirupsen/logrus" -) - -var ( - severityMap = map[logrus.Level]raven.Severity{ - logrus.TraceLevel: raven.DEBUG, - logrus.DebugLevel: raven.DEBUG, - logrus.InfoLevel: raven.INFO, - logrus.WarnLevel: raven.WARNING, - logrus.ErrorLevel: raven.ERROR, - logrus.FatalLevel: raven.FATAL, - logrus.PanicLevel: raven.FATAL, - } -) - -// SentryHook delivers logs to a sentry server. -type SentryHook struct { - // Timeout sets the time to wait for a delivery error from the sentry server. - // If this is set to zero the server will not wait for any response and will - // consider the message correctly sent. - // - // This is ignored for asynchronous hooks. If you want to set a timeout when - // using an async hook (to bound the length of time that hook.Flush can take), - // you probably want to create your own raven.Client and set - // ravenClient.Transport.(*raven.HTTPTransport).Client.Timeout to set a - // timeout on the underlying HTTP request instead. - Timeout time.Duration - StacktraceConfiguration StackTraceConfiguration - - client *raven.Client - levels []logrus.Level - - serverName string - ignoreFields map[string]struct{} - extraFilters map[string]func(interface{}) interface{} - errorHandlers []func(entry *logrus.Entry, err error) - - asynchronous bool - - mu sync.RWMutex - wg sync.WaitGroup -} - -// The Stacktracer interface allows an error type to return a raven.Stacktrace. -type Stacktracer interface { - GetStacktrace() *raven.Stacktrace -} - -type causer interface { - Cause() error -} - -type pkgErrorStackTracer interface { - StackTrace() errors.StackTrace -} - -// StackTraceConfiguration allows for configuring stacktraces -type StackTraceConfiguration struct { - // whether stacktraces should be enabled - Enable bool - // the level at which to start capturing stacktraces - Level logrus.Level - // how many stack frames to skip before stacktrace starts recording - Skip int - // the number of lines to include around a stack frame for context - Context int - // the prefixes that will be matched against the stack frame. - // if the stack frame's package matches one of these prefixes - // sentry will identify the stack frame as "in_app" - InAppPrefixes []string - // whether sending exception type should be enabled. - SendExceptionType bool - // whether the exception type and message should be switched. - SwitchExceptionTypeAndMessage bool - // whether to include a breadcrumb with the full error stack - IncludeErrorBreadcrumb bool -} - -// NewSentryHook creates a hook to be added to an instance of logger -// and initializes the raven client. -// This method sets the timeout to 100 milliseconds. -func NewSentryHook(DSN string, levels []logrus.Level) (*SentryHook, error) { - client, err := raven.New(DSN) - if err != nil { - return nil, err - } - return NewWithClientSentryHook(client, levels) -} - -// NewWithTagsSentryHook creates a hook with tags to be added to an instance -// of logger and initializes the raven client. This method sets the timeout to -// 100 milliseconds. -func NewWithTagsSentryHook(DSN string, tags map[string]string, levels []logrus.Level) (*SentryHook, error) { - client, err := raven.NewWithTags(DSN, tags) - if err != nil { - return nil, err - } - return NewWithClientSentryHook(client, levels) -} - -// NewWithClientSentryHook creates a hook using an initialized raven client. -// This method sets the timeout to 100 milliseconds. -func NewWithClientSentryHook(client *raven.Client, levels []logrus.Level) (*SentryHook, error) { - return &SentryHook{ - Timeout: 100 * time.Millisecond, - StacktraceConfiguration: StackTraceConfiguration{ - Enable: false, - Level: logrus.ErrorLevel, - Skip: 6, - Context: 0, - InAppPrefixes: nil, - SendExceptionType: true, - }, - client: client, - levels: levels, - ignoreFields: make(map[string]struct{}), - extraFilters: make(map[string]func(interface{}) interface{}), - }, nil -} - -// NewAsyncSentryHook creates a hook same as NewSentryHook, but in asynchronous -// mode. -func NewAsyncSentryHook(DSN string, levels []logrus.Level) (*SentryHook, error) { - hook, err := NewSentryHook(DSN, levels) - return setAsync(hook), err -} - -// NewAsyncWithTagsSentryHook creates a hook same as NewWithTagsSentryHook, but -// in asynchronous mode. -func NewAsyncWithTagsSentryHook(DSN string, tags map[string]string, levels []logrus.Level) (*SentryHook, error) { - hook, err := NewWithTagsSentryHook(DSN, tags, levels) - return setAsync(hook), err -} - -// NewAsyncWithClientSentryHook creates a hook same as NewWithClientSentryHook, -// but in asynchronous mode. -func NewAsyncWithClientSentryHook(client *raven.Client, levels []logrus.Level) (*SentryHook, error) { - hook, err := NewWithClientSentryHook(client, levels) - return setAsync(hook), err -} - -func setAsync(hook *SentryHook) *SentryHook { - if hook == nil { - return nil - } - hook.asynchronous = true - return hook -} - -// Fire is called when an event should be sent to sentry -// Special fields that sentry uses to give more information to the server -// are extracted from entry.Data (if they are found) -// These fields are: error, logger, server_name, http_request, tags -func (hook *SentryHook) Fire(entry *logrus.Entry) error { - hook.mu.RLock() // Allow multiple go routines to log simultaneously - defer hook.mu.RUnlock() - - df := newDataField(entry.Data) - - err, hasError := df.getError() - var crumbs *Breadcrumbs - if hasError && hook.StacktraceConfiguration.IncludeErrorBreadcrumb { - crumbs = &Breadcrumbs{ - Values: []Value{{ - Timestamp: int64(time.Now().Unix()), - Type: "error", - Message: fmt.Sprintf("%+v", err), - }}, - } - } - - packet := raven.NewPacketWithExtra(entry.Message, nil, crumbs) - packet.Timestamp = raven.Timestamp(entry.Time) - packet.Level = severityMap[entry.Level] - packet.Platform = "go" - - // set special fields - if hook.serverName != "" { - packet.ServerName = hook.serverName - } - if logger, ok := df.getLogger(); ok { - packet.Logger = logger - } - if serverName, ok := df.getServerName(); ok { - packet.ServerName = serverName - } - if eventID, ok := df.getEventID(); ok { - packet.EventID = eventID - } - if tags, ok := df.getTags(); ok { - packet.Tags = tags - } - if fingerprint, ok := df.getFingerprint(); ok { - packet.Fingerprint = fingerprint - } - if req, ok := df.getHTTPRequest(); ok { - packet.Interfaces = append(packet.Interfaces, req) - } - if user, ok := df.getUser(); ok { - packet.Interfaces = append(packet.Interfaces, user) - } - - // set stacktrace data - stConfig := &hook.StacktraceConfiguration - if stConfig.Enable && entry.Level <= stConfig.Level { - if err, ok := df.getError(); ok { - var currentStacktrace *raven.Stacktrace - currentStacktrace = hook.findStacktrace(err) - if currentStacktrace == nil { - currentStacktrace = raven.NewStacktrace(stConfig.Skip, stConfig.Context, stConfig.InAppPrefixes) - } - cause := errors.Cause(err) - if cause == nil { - cause = err - } - exc := raven.NewException(cause, currentStacktrace) - if !stConfig.SendExceptionType { - exc.Type = "" - } - if stConfig.SwitchExceptionTypeAndMessage { - packet.Interfaces = append(packet.Interfaces, currentStacktrace) - packet.Culprit = exc.Type + ": " + currentStacktrace.Culprit() - } else { - packet.Interfaces = append(packet.Interfaces, exc) - packet.Culprit = err.Error() - } - } else { - currentStacktrace := raven.NewStacktrace(stConfig.Skip, stConfig.Context, stConfig.InAppPrefixes) - if currentStacktrace != nil { - packet.Interfaces = append(packet.Interfaces, currentStacktrace) - } - } - } else { - // set the culprit even when the stack trace is disabled, as long as we have an error - if err, ok := df.getError(); ok { - packet.Culprit = err.Error() - } - } - - // set other fields - dataExtra := hook.formatExtraData(df) - if packet.Extra == nil { - packet.Extra = dataExtra - } else { - for k, v := range dataExtra { - packet.Extra[k] = v - } - } - - _, errCh := hook.client.Capture(packet, nil) - - switch { - case hook.asynchronous: - // Our use of hook.mu guarantees that we are following the WaitGroup rule of - // not calling Add in parallel with Wait. - hook.wg.Add(1) - go func() { - if err := <-errCh; err != nil { - for _, handlerFn := range hook.errorHandlers { - handlerFn(entry, err) - } - } - hook.wg.Done() - }() - return nil - case hook.Timeout == 0: - return nil - default: - timeout := hook.Timeout - timeoutCh := time.After(timeout) - select { - case err := <-errCh: - for _, handlerFn := range hook.errorHandlers { - handlerFn(entry, err) - } - return err - case <-timeoutCh: - return fmt.Errorf("no response from sentry server in %s", timeout) - } - } -} - -// Flush waits for the log queue to empty. This function only does anything in -// asynchronous mode. -func (hook *SentryHook) Flush() { - if !hook.asynchronous { - return - } - hook.mu.Lock() // Claim exclusive access; any logging goroutines will block until the flush completes - defer hook.mu.Unlock() - - hook.wg.Wait() -} - -func (hook *SentryHook) findStacktrace(err error) *raven.Stacktrace { - var stacktrace *raven.Stacktrace - var stackErr errors.StackTrace - for err != nil { - // Find the earliest *raven.Stacktrace, or error.StackTrace - if tracer, ok := err.(Stacktracer); ok { - stacktrace = tracer.GetStacktrace() - stackErr = nil - } else if tracer, ok := err.(pkgErrorStackTracer); ok { - stacktrace = nil - stackErr = tracer.StackTrace() - } - if cause, ok := err.(causer); ok { - err = cause.Cause() - } else { - break - } - } - if stackErr != nil { - stacktrace = hook.convertStackTrace(stackErr) - } - return stacktrace -} - -// convertStackTrace converts an errors.StackTrace into a natively consumable -// *raven.Stacktrace -func (hook *SentryHook) convertStackTrace(st errors.StackTrace) *raven.Stacktrace { - stConfig := &hook.StacktraceConfiguration - stFrames := []errors.Frame(st) - frames := make([]*raven.StacktraceFrame, 0, len(stFrames)) - for i := range stFrames { - pc := uintptr(stFrames[i]) - fn := runtime.FuncForPC(pc) - file, line := fn.FileLine(pc) - frame := raven.NewStacktraceFrame(pc, fn.Name(), file, line, stConfig.Context, stConfig.InAppPrefixes) - if frame != nil { - frames = append(frames, frame) - } - } - - // Sentry wants the frames with the oldest first, so reverse them - for i, j := 0, len(frames)-1; i < j; i, j = i+1, j-1 { - frames[i], frames[j] = frames[j], frames[i] - } - return &raven.Stacktrace{Frames: frames} -} - -// Levels returns the available logging levels. -func (hook *SentryHook) Levels() []logrus.Level { - return hook.levels -} - -// AddIgnore adds field name to ignore. -func (hook *SentryHook) AddIgnore(name string) { - hook.ignoreFields[name] = struct{}{} -} - -// AddExtraFilter adds a custom filter function. -func (hook *SentryHook) AddExtraFilter(name string, fn func(interface{}) interface{}) { - hook.extraFilters[name] = fn -} - -// AddErrorHandler adds a error handler function used when Sentry returns error. -func (hook *SentryHook) AddErrorHandler(fn func(entry *logrus.Entry, err error)) { - hook.errorHandlers = append(hook.errorHandlers, fn) -} - -func (hook *SentryHook) formatExtraData(df *dataField) (result map[string]interface{}) { - // create a map for passing to Sentry's extra data - result = make(map[string]interface{}, df.len()) - for k, v := range df.data { - if df.isOmit(k) { - continue // skip already used special fields - } - if _, ok := hook.ignoreFields[k]; ok { - continue - } - - if fn, ok := hook.extraFilters[k]; ok { - v = fn(v) // apply custom filter - } else { - v = formatData(v) // use default formatter - } - result[k] = v - } - return result -} - -// formatData returns value as a suitable format. -func formatData(value interface{}) (formatted interface{}) { - switch value := value.(type) { - case json.Marshaler: - return value - case error: - return value.Error() - case fmt.Stringer: - return value.String() - default: - return value - } -} - -// utility classes for breadcrumb support -type Breadcrumbs struct { - Values []Value `json:"values"` -} - -type Value struct { - Timestamp int64 `json:"timestamp"` - Type string `json:"type"` - Message string `json:"message"` - Category string `json:"category"` - Level string `json:"string"` - Data interface{} `json:"data"` -} - -func (b *Breadcrumbs) Class() string { - return "breadcrumbs" -} diff --git a/vendor/github.com/evalphobia/logrus_sentry/sentry_setter.go b/vendor/github.com/evalphobia/logrus_sentry/sentry_setter.go deleted file mode 100644 index 1912e53f8a9..00000000000 --- a/vendor/github.com/evalphobia/logrus_sentry/sentry_setter.go +++ /dev/null @@ -1,55 +0,0 @@ -package logrus_sentry - -import ( - "github.com/getsentry/raven-go" -) - -// SetDefaultLoggerName sets default logger name tag. -func (hook *SentryHook) SetDefaultLoggerName(name string) { - hook.client.SetDefaultLoggerName(name) -} - -// SetEnvironment sets environment tag. -func (hook *SentryHook) SetEnvironment(environment string) { - hook.client.SetEnvironment(environment) -} - -// SetHttpContext sets http client. -func (hook *SentryHook) SetHttpContext(h *raven.Http) { - hook.client.SetHttpContext(h) -} - -// SetIgnoreErrors sets ignoreErrorsRegexp. -func (hook *SentryHook) SetIgnoreErrors(errs ...string) error { - return hook.client.SetIgnoreErrors(errs) -} - -// SetIncludePaths sets includePaths. -func (hook *SentryHook) SetIncludePaths(p []string) { - hook.client.SetIncludePaths(p) -} - -// SetRelease sets release tag. -func (hook *SentryHook) SetRelease(release string) { - hook.client.SetRelease(release) -} - -// SetSampleRate sets sampling rate. -func (hook *SentryHook) SetSampleRate(rate float32) error { - return hook.client.SetSampleRate(rate) -} - -// SetTagsContext sets tags. -func (hook *SentryHook) SetTagsContext(t map[string]string) { - hook.client.SetTagsContext(t) -} - -// SetUserContext sets user. -func (hook *SentryHook) SetUserContext(u *raven.User) { - hook.client.SetUserContext(u) -} - -// SetServerName sets server_name tag. -func (hook *SentryHook) SetServerName(serverName string) { - hook.serverName = serverName -} diff --git a/vendor/github.com/evalphobia/logrus_sentry/utils.go b/vendor/github.com/evalphobia/logrus_sentry/utils.go deleted file mode 100644 index 8b4a9095d3b..00000000000 --- a/vendor/github.com/evalphobia/logrus_sentry/utils.go +++ /dev/null @@ -1,135 +0,0 @@ -package logrus_sentry - -import ( - "fmt" - "strings" -) - -/* -Copyright (c) 2009,2014 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -*/ - -// A UUID is a 128 bit (16 byte) Universal Unique IDentifier as defined in RFC -// 4122. -type uuid []byte - -// parseUUID decodes s into a UUID or returns nil. Both the UUID form of -// xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx and -// xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx and -// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx are decoded. -func parseUUID(s string) uuid { - //If it is in no dash format "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" - if len(s) == 32 { - uuid := make([]byte, 16) - for i, x := range []int{ - 0, 2, 4, 6, 8, 10, - 12, 14, 16, 18, 20, - 22, 24, 26, 28, 30} { - if v, ok := xtob(s[x:]); !ok { - return nil - } else { - uuid[i] = v - } - } - return uuid - } - - if len(s) == 36+9 { - if strings.ToLower(s[:9]) != "urn:uuid:" { - return nil - } - s = s[9:] - } else if len(s) != 36 { - return nil - } - if s[8] != '-' || s[13] != '-' || s[18] != '-' || s[23] != '-' { - return nil - } - uuid := make([]byte, 16) - for i, x := range []int{ - 0, 2, 4, 6, - 9, 11, - 14, 16, - 19, 21, - 24, 26, 28, 30, 32, 34} { - if v, ok := xtob(s[x:]); !ok { - return nil - } else { - uuid[i] = v - } - } - return uuid -} - -// String returns the string form of uuid, xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -// , or "" if uuid is invalid. -func (uuid uuid) string() string { - if uuid == nil || len(uuid) != 16 { - return "" - } - b := []byte(uuid) - return fmt.Sprintf("%08x-%04x-%04x-%04x-%012x", - b[:4], b[4:6], b[6:8], b[8:10], b[10:]) -} - -func (uuid uuid) noDashString() string { - if uuid == nil || len(uuid) != 16 { - return "" - } - b := []byte(uuid) - return fmt.Sprintf("%08x%04x%04x%04x%012x", - b[:4], b[4:6], b[6:8], b[8:10], b[10:]) -} - -// xvalues returns the value of a byte as a hexadecimal digit or 255. -var xvalues = []byte{ - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 255, 255, 255, 255, 255, 255, - 255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, -} - -// xtob converts the the first two hex bytes of x into a byte. -func xtob(x string) (byte, bool) { - b1 := xvalues[x[0]] - b2 := xvalues[x[1]] - return (b1 << 4) | b2, b1 != 255 && b2 != 255 -} diff --git a/vendor/github.com/facebookgo/clock/LICENSE b/vendor/github.com/facebookgo/clock/LICENSE deleted file mode 100644 index ce212cb1cee..00000000000 --- a/vendor/github.com/facebookgo/clock/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Ben Johnson - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/facebookgo/clock/README.md b/vendor/github.com/facebookgo/clock/README.md deleted file mode 100644 index 5d4f4fe72e7..00000000000 --- a/vendor/github.com/facebookgo/clock/README.md +++ /dev/null @@ -1,104 +0,0 @@ -clock [![Build Status](https://drone.io/github.com/benbjohnson/clock/status.png)](https://drone.io/github.com/benbjohnson/clock/latest) [![Coverage Status](https://coveralls.io/repos/benbjohnson/clock/badge.png?branch=master)](https://coveralls.io/r/benbjohnson/clock?branch=master) [![GoDoc](https://godoc.org/github.com/benbjohnson/clock?status.png)](https://godoc.org/github.com/benbjohnson/clock) ![Project status](http://img.shields.io/status/experimental.png?color=red) -===== - -Clock is a small library for mocking time in Go. It provides an interface -around the standard library's [`time`][time] package so that the application -can use the realtime clock while tests can use the mock clock. - -[time]: http://golang.org/pkg/time/ - - -## Usage - -### Realtime Clock - -Your application can maintain a `Clock` variable that will allow realtime and -mock clocks to be interchangable. For example, if you had an `Application` type: - -```go -import "github.com/benbjohnson/clock" - -type Application struct { - Clock clock.Clock -} -``` - -You could initialize it to use the realtime clock like this: - -```go -var app Application -app.Clock = clock.New() -... -``` - -Then all timers and time-related functionality should be performed from the -`Clock` variable. - - -### Mocking time - -In your tests, you will want to use a `Mock` clock: - -```go -import ( - "testing" - - "github.com/benbjohnson/clock" -) - -func TestApplication_DoSomething(t *testing.T) { - mock := clock.NewMock() - app := Application{Clock: mock} - ... -} -``` - -Now that you've initialized your application to use the mock clock, you can -adjust the time programmatically. The mock clock always starts from the Unix -epoch (midnight, Jan 1, 1970 UTC). - - -### Controlling time - -The mock clock provides the same functions that the standard library's `time` -package provides. For example, to find the current time, you use the `Now()` -function: - -```go -mock := clock.NewMock() - -// Find the current time. -mock.Now().UTC() // 1970-01-01 00:00:00 +0000 UTC - -// Move the clock forward. -mock.Add(2 * time.Hour) - -// Check the time again. It's 2 hours later! -mock.Now().UTC() // 1970-01-01 02:00:00 +0000 UTC -``` - -Timers and Tickers are also controlled by this same mock clock. They will only -execute when the clock is moved forward: - -``` -mock := clock.NewMock() -count := 0 - -// Kick off a timer to increment every 1 mock second. -go func() { - ticker := clock.Ticker(1 * time.Second) - for { - <-ticker.C - count++ - } -}() -runtime.Gosched() - -// Move the clock forward 10 second. -mock.Add(10 * time.Second) - -// This prints 10. -fmt.Println(count) -``` - - diff --git a/vendor/github.com/facebookgo/clock/clock.go b/vendor/github.com/facebookgo/clock/clock.go deleted file mode 100644 index bca1a7ba8b3..00000000000 --- a/vendor/github.com/facebookgo/clock/clock.go +++ /dev/null @@ -1,363 +0,0 @@ -package clock - -import ( - "runtime" - "sort" - "sync" - "time" -) - -// Clock represents an interface to the functions in the standard library time -// package. Two implementations are available in the clock package. The first -// is a real-time clock which simply wraps the time package's functions. The -// second is a mock clock which will only make forward progress when -// programmatically adjusted. -type Clock interface { - After(d time.Duration) <-chan time.Time - AfterFunc(d time.Duration, f func()) *Timer - Now() time.Time - Sleep(d time.Duration) - Tick(d time.Duration) <-chan time.Time - Ticker(d time.Duration) *Ticker - Timer(d time.Duration) *Timer -} - -// New returns an instance of a real-time clock. -func New() Clock { - return &clock{} -} - -// clock implements a real-time clock by simply wrapping the time package functions. -type clock struct{} - -func (c *clock) After(d time.Duration) <-chan time.Time { return time.After(d) } - -func (c *clock) AfterFunc(d time.Duration, f func()) *Timer { - return &Timer{timer: time.AfterFunc(d, f)} -} - -func (c *clock) Now() time.Time { return time.Now() } - -func (c *clock) Sleep(d time.Duration) { time.Sleep(d) } - -func (c *clock) Tick(d time.Duration) <-chan time.Time { return time.Tick(d) } - -func (c *clock) Ticker(d time.Duration) *Ticker { - t := time.NewTicker(d) - return &Ticker{C: t.C, ticker: t} -} - -func (c *clock) Timer(d time.Duration) *Timer { - t := time.NewTimer(d) - return &Timer{C: t.C, timer: t} -} - -// Mock represents a mock clock that only moves forward programmically. -// It can be preferable to a real-time clock when testing time-based functionality. -type Mock struct { - mu sync.Mutex - now time.Time // current time - timers clockTimers // tickers & timers - - calls Calls - waiting []waiting - callsMutex sync.Mutex -} - -// NewMock returns an instance of a mock clock. -// The current time of the mock clock on initialization is the Unix epoch. -func NewMock() *Mock { - return &Mock{now: time.Unix(0, 0)} -} - -// Add moves the current time of the mock clock forward by the duration. -// This should only be called from a single goroutine at a time. -func (m *Mock) Add(d time.Duration) { - // Calculate the final current time. - t := m.now.Add(d) - - // Continue to execute timers until there are no more before the new time. - for { - if !m.runNextTimer(t) { - break - } - } - - // Ensure that we end with the new time. - m.mu.Lock() - m.now = t - m.mu.Unlock() - - // Give a small buffer to make sure the other goroutines get handled. - gosched() -} - -// runNextTimer executes the next timer in chronological order and moves the -// current time to the timer's next tick time. The next time is not executed if -// it's next time if after the max time. Returns true if a timer is executed. -func (m *Mock) runNextTimer(max time.Time) bool { - m.mu.Lock() - - // Sort timers by time. - sort.Sort(m.timers) - - // If we have no more timers then exit. - if len(m.timers) == 0 { - m.mu.Unlock() - return false - } - - // Retrieve next timer. Exit if next tick is after new time. - t := m.timers[0] - if t.Next().After(max) { - m.mu.Unlock() - return false - } - - // Move "now" forward and unlock clock. - m.now = t.Next() - m.mu.Unlock() - - // Execute timer. - t.Tick(m.now) - return true -} - -// After waits for the duration to elapse and then sends the current time on the returned channel. -func (m *Mock) After(d time.Duration) <-chan time.Time { - defer m.inc(&m.calls.After) - return m.Timer(d).C -} - -// AfterFunc waits for the duration to elapse and then executes a function. -// A Timer is returned that can be stopped. -func (m *Mock) AfterFunc(d time.Duration, f func()) *Timer { - defer m.inc(&m.calls.AfterFunc) - t := m.Timer(d) - t.C = nil - t.fn = f - return t -} - -// Now returns the current wall time on the mock clock. -func (m *Mock) Now() time.Time { - defer m.inc(&m.calls.Now) - m.mu.Lock() - defer m.mu.Unlock() - return m.now -} - -// Sleep pauses the goroutine for the given duration on the mock clock. -// The clock must be moved forward in a separate goroutine. -func (m *Mock) Sleep(d time.Duration) { - defer m.inc(&m.calls.Sleep) - <-m.After(d) -} - -// Tick is a convenience function for Ticker(). -// It will return a ticker channel that cannot be stopped. -func (m *Mock) Tick(d time.Duration) <-chan time.Time { - defer m.inc(&m.calls.Tick) - return m.Ticker(d).C -} - -// Ticker creates a new instance of Ticker. -func (m *Mock) Ticker(d time.Duration) *Ticker { - defer m.inc(&m.calls.Ticker) - m.mu.Lock() - defer m.mu.Unlock() - ch := make(chan time.Time) - t := &Ticker{ - C: ch, - c: ch, - mock: m, - d: d, - next: m.now.Add(d), - } - m.timers = append(m.timers, (*internalTicker)(t)) - return t -} - -// Timer creates a new instance of Timer. -func (m *Mock) Timer(d time.Duration) *Timer { - defer m.inc(&m.calls.Timer) - m.mu.Lock() - defer m.mu.Unlock() - ch := make(chan time.Time) - t := &Timer{ - C: ch, - c: ch, - mock: m, - next: m.now.Add(d), - } - m.timers = append(m.timers, (*internalTimer)(t)) - return t -} - -func (m *Mock) removeClockTimer(t clockTimer) { - m.mu.Lock() - defer m.mu.Unlock() - for i, timer := range m.timers { - if timer == t { - copy(m.timers[i:], m.timers[i+1:]) - m.timers[len(m.timers)-1] = nil - m.timers = m.timers[:len(m.timers)-1] - break - } - } - sort.Sort(m.timers) -} - -func (m *Mock) inc(addr *uint32) { - m.callsMutex.Lock() - defer m.callsMutex.Unlock() - *addr++ - var newWaiting []waiting - for _, w := range m.waiting { - if m.calls.atLeast(w.expected) { - close(w.done) - continue - } - newWaiting = append(newWaiting, w) - } - m.waiting = newWaiting -} - -// Wait waits for at least the relevant calls before returning. The expected -// Calls are always over the lifetime of the Mock. Values in the Calls struct -// are used as the minimum number of calls, this allows you to wait for only -// the calls you care about. -func (m *Mock) Wait(s Calls) { - m.callsMutex.Lock() - if m.calls.atLeast(s) { - m.callsMutex.Unlock() - return - } - done := make(chan struct{}) - m.waiting = append(m.waiting, waiting{expected: s, done: done}) - m.callsMutex.Unlock() - <-done -} - -// clockTimer represents an object with an associated start time. -type clockTimer interface { - Next() time.Time - Tick(time.Time) -} - -// clockTimers represents a list of sortable timers. -type clockTimers []clockTimer - -func (a clockTimers) Len() int { return len(a) } -func (a clockTimers) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a clockTimers) Less(i, j int) bool { return a[i].Next().Before(a[j].Next()) } - -// Timer represents a single event. -// The current time will be sent on C, unless the timer was created by AfterFunc. -type Timer struct { - C <-chan time.Time - c chan time.Time - timer *time.Timer // realtime impl, if set - next time.Time // next tick time - mock *Mock // mock clock, if set - fn func() // AfterFunc function, if set -} - -// Stop turns off the ticker. -func (t *Timer) Stop() { - if t.timer != nil { - t.timer.Stop() - } else { - t.mock.removeClockTimer((*internalTimer)(t)) - } -} - -type internalTimer Timer - -func (t *internalTimer) Next() time.Time { return t.next } -func (t *internalTimer) Tick(now time.Time) { - if t.fn != nil { - t.fn() - } else { - t.c <- now - } - t.mock.removeClockTimer((*internalTimer)(t)) - gosched() -} - -// Ticker holds a channel that receives "ticks" at regular intervals. -type Ticker struct { - C <-chan time.Time - c chan time.Time - ticker *time.Ticker // realtime impl, if set - next time.Time // next tick time - mock *Mock // mock clock, if set - d time.Duration // time between ticks -} - -// Stop turns off the ticker. -func (t *Ticker) Stop() { - if t.ticker != nil { - t.ticker.Stop() - } else { - t.mock.removeClockTimer((*internalTicker)(t)) - } -} - -type internalTicker Ticker - -func (t *internalTicker) Next() time.Time { return t.next } -func (t *internalTicker) Tick(now time.Time) { - select { - case t.c <- now: - case <-time.After(1 * time.Millisecond): - } - t.next = now.Add(t.d) - gosched() -} - -// Sleep momentarily so that other goroutines can process. -func gosched() { runtime.Gosched() } - -// Calls keeps track of the count of calls for each of the methods on the Clock -// interface. -type Calls struct { - After uint32 - AfterFunc uint32 - Now uint32 - Sleep uint32 - Tick uint32 - Ticker uint32 - Timer uint32 -} - -// atLeast returns true if at least the number of calls in o have been made. -func (c Calls) atLeast(o Calls) bool { - if c.After < o.After { - return false - } - if c.AfterFunc < o.AfterFunc { - return false - } - if c.Now < o.Now { - return false - } - if c.Sleep < o.Sleep { - return false - } - if c.Tick < o.Tick { - return false - } - if c.Ticker < o.Ticker { - return false - } - if c.Timer < o.Timer { - return false - } - return true -} - -type waiting struct { - expected Calls - done chan struct{} -} diff --git a/vendor/github.com/franela/goreq/.gitignore b/vendor/github.com/franela/goreq/.gitignore deleted file mode 100644 index 131fc16fbbc..00000000000 --- a/vendor/github.com/franela/goreq/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe - -src diff --git a/vendor/github.com/franela/goreq/.travis.yml b/vendor/github.com/franela/goreq/.travis.yml deleted file mode 100644 index 916dd6889ef..00000000000 --- a/vendor/github.com/franela/goreq/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: go -go: - - 1.9.2 - - tip -notifications: - email: - - ionathan@gmail.com - - marcosnils@gmail.com diff --git a/vendor/github.com/franela/goreq/LICENSE b/vendor/github.com/franela/goreq/LICENSE deleted file mode 100644 index 068dee1edcc..00000000000 --- a/vendor/github.com/franela/goreq/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 Jonathan Leibiusky and Marcos Lilljedahl - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/franela/goreq/Makefile b/vendor/github.com/franela/goreq/Makefile deleted file mode 100644 index 0f04d6572df..00000000000 --- a/vendor/github.com/franela/goreq/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -test: - go get -v -d -t ./... - go test -v diff --git a/vendor/github.com/franela/goreq/README.md b/vendor/github.com/franela/goreq/README.md deleted file mode 100644 index c202f32641b..00000000000 --- a/vendor/github.com/franela/goreq/README.md +++ /dev/null @@ -1,444 +0,0 @@ -[![Build Status](https://img.shields.io/travis/franela/goreq/master.svg)](https://travis-ci.org/franela/goreq) -[![GoDoc](https://godoc.org/github.com/franela/goreq?status.svg)](https://godoc.org/github.com/franela/goreq) - -GoReq -======= - -Simple and sane HTTP request library for Go language. - - - -**Table of Contents** - -- [Why GoReq?](#user-content-why-goreq) -- [How do I install it?](#user-content-how-do-i-install-it) -- [What can I do with it?](#user-content-what-can-i-do-with-it) - - [Making requests with different methods](#user-content-making-requests-with-different-methods) - - [GET](#user-content-get) - - [Tags](#user-content-tags) - - [POST](#user-content-post) - - [Sending payloads in the Body](#user-content-sending-payloads-in-the-body) - - [Specifiying request headers](#user-content-specifiying-request-headers) - - [Sending Cookies](#cookie-support) - - [Setting timeouts](#user-content-setting-timeouts) - - [Using the Response and Error](#user-content-using-the-response-and-error) - - [Receiving JSON](#user-content-receiving-json) - - [Sending/Receiving Compressed Payloads](#user-content-sendingreceiving-compressed-payloads) - - [Using gzip compression:](#user-content-using-gzip-compression) - - [Using deflate compression:](#user-content-using-deflate-compression) - - [Using compressed responses:](#user-content-using-compressed-responses) - - [Proxy](#proxy) - - [Debugging requests](#debug) - - [Getting raw Request & Response](#getting-raw-request--response) - - [TODO:](#user-content-todo) - - - -Why GoReq? -========== - -Go has very nice native libraries that allows you to do lots of cool things. But sometimes those libraries are too low level, which means that to do a simple thing, like an HTTP Request, it takes some time. And if you want to do something as simple as adding a timeout to a request, you will end up writing several lines of code. - -This is why we think GoReq is useful. Because you can do all your HTTP requests in a very simple and comprehensive way, while enabling you to do more advanced stuff by giving you access to the native API. - -How do I install it? -==================== - -```bash -go get github.com/franela/goreq -``` - -What can I do with it? -====================== - -## Making requests with different methods - -#### GET -```go -res, err := goreq.Request{ Uri: "http://www.google.com" }.Do() -``` - -GoReq default method is GET. - -You can also set value to GET method easily - -```go -type Item struct { - Limit int - Skip int - Fields string -} - -item := Item { - Limit: 3, - Skip: 5, - Fields: "Value", -} - -res, err := goreq.Request{ - Uri: "http://localhost:3000/", - QueryString: item, -}.Do() -``` -The sample above will send `http://localhost:3000/?limit=3&skip=5&fields=Value` - -Alternatively the `url` tag can be used in struct fields to customize encoding properties - -```go -type Item struct { - TheLimit int `url:"the_limit"` - TheSkip string `url:"the_skip,omitempty"` - TheFields string `url:"-"` -} - -item := Item { - TheLimit: 3, - TheSkip: "", - TheFields: "Value", -} - -res, err := goreq.Request{ - Uri: "http://localhost:3000/", - QueryString: item, -}.Do() -``` -The sample above will send `http://localhost:3000/?the_limit=3` - - -QueryString also support url.Values - -```go -item := url.Values{} -item.Set("Limit", 3) -item.Add("Field", "somefield") -item.Add("Field", "someotherfield") - -res, err := goreq.Request{ - Uri: "http://localhost:3000/", - QueryString: item, -}.Do() -``` - -The sample above will send `http://localhost:3000/?limit=3&field=somefield&field=someotherfield` - -### Tags - -Struct field `url` tag is mainly used as the request parameter name. -Tags can be comma separated multiple values, 1st value is for naming and rest has special meanings. - -- special tag for 1st value - - `-`: value is ignored if set this - -- special tag for rest 2nd value - - `omitempty`: zero-value is ignored if set this - - `squash`: the fields of embedded struct is used for parameter - -#### Tag Examples - -```go -type Place struct { - Country string `url:"country"` - City string `url:"city"` - ZipCode string `url:"zipcode,omitempty"` -} - -type Person struct { - Place `url:",squash"` - - FirstName string `url:"first_name"` - LastName string `url:"last_name"` - Age string `url:"age,omitempty"` - Password string `url:"-"` -} - -johnbull := Person{ - Place: Place{ // squash the embedded struct value - Country: "UK", - City: "London", - ZipCode: "SW1", - }, - FirstName: "John", - LastName: "Doe", - Age: "35", - Password: "my-secret", // ignored for parameter -} - -goreq.Request{ - Uri: "http://localhost/", - QueryString: johnbull, -}.Do() -// => `http://localhost/?first_name=John&last_name=Doe&age=35&country=UK&city=London&zip_code=SW1` - - -// age and zipcode will be ignored because of `omitempty` -// but firstname isn't. -samurai := Person{ - Place: Place{ // squash the embedded struct value - Country: "Japan", - City: "Tokyo", - }, - LastName: "Yagyu", -} - -goreq.Request{ - Uri: "http://localhost/", - QueryString: samurai, -}.Do() -// => `http://localhost/?first_name=&last_name=yagyu&country=Japan&city=Tokyo` -``` - - -#### POST - -```go -res, err := goreq.Request{ Method: "POST", Uri: "http://www.google.com" }.Do() -``` - -## Sending payloads in the Body - -You can send ```string```, ```Reader``` or ```interface{}``` in the body. The first two will be sent as text. The last one will be marshalled to JSON, if possible. - -```go -type Item struct { - Id int - Name string -} - -item := Item{ Id: 1111, Name: "foobar" } - -res, err := goreq.Request{ - Method: "POST", - Uri: "http://www.google.com", - Body: item, -}.Do() -``` - -## Specifiying request headers - -We think that most of the times the request headers that you use are: ```Host```, ```Content-Type```, ```Accept``` and ```User-Agent```. This is why we decided to make it very easy to set these headers. - -```go -res, err := goreq.Request{ - Uri: "http://www.google.com", - Host: "foobar.com", - Accept: "application/json", - ContentType: "application/json", - UserAgent: "goreq", -}.Do() -``` - -But sometimes you need to set other headers. You can still do it. - -```go -req := goreq.Request{ Uri: "http://www.google.com" } - -req.AddHeader("X-Custom", "somevalue") - -req.Do() -``` - -Alternatively you can use the `WithHeader` function to keep the syntax short - -```go -res, err = goreq.Request{ Uri: "http://www.google.com" }.WithHeader("X-Custom", "somevalue").Do() -``` - -## Cookie support - -Cookies can be either set at the request level by sending a [CookieJar](http://golang.org/pkg/net/http/cookiejar/) in the `CookieJar` request field -or you can use goreq's one-liner WithCookie method as shown below - -```go -res, err := goreq.Request{ - Uri: "http://www.google.com", -}. -WithCookie(&http.Cookie{Name: "c1", Value: "v1"}). -Do() -``` - -## Setting timeouts - -GoReq supports 2 kind of timeouts. A general connection timeout and a request specific one. By default the connection timeout is of 1 second. There is no default for request timeout, which means it will wait forever. - -You can change the connection timeout doing: - -```go -goreq.SetConnectTimeout(100 * time.Millisecond) -``` - -And specify the request timeout doing: - -```go -res, err := goreq.Request{ - Uri: "http://www.google.com", - Timeout: 500 * time.Millisecond, -}.Do() -``` - -## Using the Response and Error - -GoReq will always return 2 values: a ```Response``` and an ```Error```. -If ```Error``` is not ```nil``` it means that an error happened while doing the request and you shouldn't use the ```Response``` in any way. -You can check what happened by getting the error message: - -```go -fmt.Println(err.Error()) -``` -And to make it easy to know if it was a timeout error, you can ask the error or return it: - -```go -if serr, ok := err.(*goreq.Error); ok { - if serr.Timeout() { - ... - } -} -return err -``` - -If you don't get an error, you can safely use the ```Response```. - -```go -res.Uri // return final URL location of the response (fulfilled after redirect was made) -res.StatusCode // return the status code of the response -res.Body // gives you access to the body -res.Body.ToString() // will return the body as a string -res.Header.Get("Content-Type") // gives you access to all the response headers -``` -Remember that you should **always** close `res.Body` if it's not `nil` - -## Receiving JSON - -GoReq will help you to receive and unmarshal JSON. - -```go -type Item struct { - Id int - Name string -} - -var item Item - -res.Body.FromJsonTo(&item) -``` - -## Sending/Receiving Compressed Payloads -GoReq supports gzip, deflate and zlib compression of requests' body and transparent decompression of responses provided they have a correct `Content-Encoding` header. - -##### Using gzip compression: -```go -res, err := goreq.Request{ - Method: "POST", - Uri: "http://www.google.com", - Body: item, - Compression: goreq.Gzip(), -}.Do() -``` -##### Using deflate/zlib compression: -```go -res, err := goreq.Request{ - Method: "POST", - Uri: "http://www.google.com", - Body: item, - Compression: goreq.Deflate(), -}.Do() -``` -##### Using compressed responses: -If servers replies a correct and matching `Content-Encoding` header (gzip requires `Content-Encoding: gzip` and deflate `Content-Encoding: deflate`) goreq transparently decompresses the response so the previous example should always work: -```go -type Item struct { - Id int - Name string -} -res, err := goreq.Request{ - Method: "POST", - Uri: "http://www.google.com", - Body: item, - Compression: goreq.Gzip(), -}.Do() -var item Item -res.Body.FromJsonTo(&item) -``` -If no `Content-Encoding` header is replied by the server GoReq will return the crude response. - -## Proxy -If you need to use a proxy for your requests GoReq supports the standard `http_proxy` env variable as well as manually setting the proxy for each request - -```go -res, err := goreq.Request{ - Method: "GET", - Proxy: "http://myproxy:myproxyport", - Uri: "http://www.google.com", -}.Do() -``` - -### Proxy basic auth is also supported - -```go -res, err := goreq.Request{ - Method: "GET", - Proxy: "http://user:pass@myproxy:myproxyport", - Uri: "http://www.google.com", -}.Do() -``` - -## Debug -If you need to debug your http requests, it can print the http request detail. - -```go -res, err := goreq.Request{ - Method: "GET", - Uri: "http://www.google.com", - Compression: goreq.Gzip(), - ShowDebug: true, -}.Do() -fmt.Println(res, err) -``` - -and it will print the log: -``` -GET / HTTP/1.1 -Host: www.google.com -Accept: -Accept-Encoding: gzip -Content-Encoding: gzip -Content-Type: -``` - - -### Getting raw Request & Response - -To get the Request: - -```go -req := goreq.Request{ - Host: "foobar.com", -} - -//req.Request will return a new instance of an http.Request so you can safely use it for something else -request, _ := req.NewRequest() - -``` - - -To get the Response: - -```go -res, err := goreq.Request{ - Method: "GET", - Uri: "http://www.google.com", - Compression: goreq.Gzip(), - ShowDebug: true, -}.Do() - -// res.Response will contain the original http.Response structure -fmt.Println(res.Response, err) -``` - - - - -TODO: ------ - -We do have a couple of [issues](https://github.com/franela/goreq/issues) pending we'll be addressing soon. But feel free to -contribute and send us PRs (with tests please :smile:). diff --git a/vendor/github.com/franela/goreq/goreq.go b/vendor/github.com/franela/goreq/goreq.go deleted file mode 100644 index 801c7d307e5..00000000000 --- a/vendor/github.com/franela/goreq/goreq.go +++ /dev/null @@ -1,515 +0,0 @@ -package goreq - -import ( - "bufio" - "bytes" - "compress/gzip" - "compress/zlib" - "crypto/tls" - "encoding/json" - "errors" - "fmt" - "io" - "io/ioutil" - "log" - "net" - "net/http" - "net/http/httputil" - "net/url" - "reflect" - "strings" - "time" -) - -type itimeout interface { - Timeout() bool -} -type Request struct { - headers []headerTuple - cookies []*http.Cookie - Method string - Uri string - Body interface{} - QueryString interface{} - Timeout time.Duration - ContentType string - Accept string - Host string - UserAgent string - Insecure bool - MaxRedirects int - RedirectHeaders bool - Proxy string - proxyConnectHeaders []headerTuple - Compression *compression - BasicAuthUsername string - BasicAuthPassword string - CookieJar http.CookieJar - ShowDebug bool - OnBeforeRequest func(goreq *Request, httpreq *http.Request) -} - -type compression struct { - writer func(buffer io.Writer) (io.WriteCloser, error) - reader func(buffer io.Reader) (io.ReadCloser, error) - ContentEncoding string -} - -type Response struct { - *http.Response - Uri string - Body *Body - req *http.Request -} - -func (r Response) CancelRequest() { - cancelRequest(DefaultTransport, r.req) - -} - -func cancelRequest(transport interface{}, r *http.Request) { - if tp, ok := transport.(transportRequestCanceler); ok { - tp.CancelRequest(r) - } -} - -type headerTuple struct { - name string - value string -} - -type Body struct { - reader io.ReadCloser - compressedReader io.ReadCloser -} - -type Error struct { - timeout bool - Err error -} - -type transportRequestCanceler interface { - CancelRequest(*http.Request) -} - -func (e *Error) Timeout() bool { - return e.timeout -} - -func (e *Error) Error() string { - return e.Err.Error() -} - -func (b *Body) Read(p []byte) (int, error) { - if b.compressedReader != nil { - return b.compressedReader.Read(p) - } - return b.reader.Read(p) -} - -func (b *Body) Close() error { - err := b.reader.Close() - if b.compressedReader != nil { - return b.compressedReader.Close() - } - return err -} - -func (b *Body) FromJsonTo(o interface{}) error { - return json.NewDecoder(b).Decode(o) -} - -func (b *Body) ToString() (string, error) { - body, err := ioutil.ReadAll(b) - if err != nil { - return "", err - } - return string(body), nil -} - -func Gzip() *compression { - reader := func(buffer io.Reader) (io.ReadCloser, error) { - return gzip.NewReader(buffer) - } - writer := func(buffer io.Writer) (io.WriteCloser, error) { - return gzip.NewWriter(buffer), nil - } - return &compression{writer: writer, reader: reader, ContentEncoding: "gzip"} -} - -func Deflate() *compression { - reader := func(buffer io.Reader) (io.ReadCloser, error) { - return zlib.NewReader(buffer) - } - writer := func(buffer io.Writer) (io.WriteCloser, error) { - return zlib.NewWriter(buffer), nil - } - return &compression{writer: writer, reader: reader, ContentEncoding: "deflate"} -} - -func Zlib() *compression { - return Deflate() -} - -func paramParse(query interface{}) (string, error) { - switch query.(type) { - case url.Values: - return query.(url.Values).Encode(), nil - case *url.Values: - return query.(*url.Values).Encode(), nil - default: - var v = &url.Values{} - err := paramParseStruct(v, query) - return v.Encode(), err - } -} - -func paramParseStruct(v *url.Values, query interface{}) error { - var ( - s = reflect.ValueOf(query) - t = reflect.TypeOf(query) - ) - for t.Kind() == reflect.Ptr || t.Kind() == reflect.Interface { - s = s.Elem() - t = s.Type() - } - - if t.Kind() != reflect.Struct { - return errors.New("Can not parse QueryString.") - } - - for i := 0; i < t.NumField(); i++ { - var name string - - field := s.Field(i) - typeField := t.Field(i) - - if !field.CanInterface() { - continue - } - - urlTag := typeField.Tag.Get("url") - if urlTag == "-" { - continue - } - - name, opts := parseTag(urlTag) - - var omitEmpty, squash bool - omitEmpty = opts.Contains("omitempty") - squash = opts.Contains("squash") - - if squash { - err := paramParseStruct(v, field.Interface()) - if err != nil { - return err - } - continue - } - - if urlTag == "" { - name = strings.ToLower(typeField.Name) - } - - if val := fmt.Sprintf("%v", field.Interface()); !(omitEmpty && len(val) == 0) { - v.Add(name, val) - } - } - return nil -} - -func prepareRequestBody(b interface{}) (io.Reader, error) { - switch b.(type) { - case string: - // treat is as text - return strings.NewReader(b.(string)), nil - case io.Reader: - // treat is as text - return b.(io.Reader), nil - case []byte: - //treat as byte array - return bytes.NewReader(b.([]byte)), nil - case nil: - return nil, nil - default: - // try to jsonify it - j, err := json.Marshal(b) - if err == nil { - return bytes.NewReader(j), nil - } - return nil, err - } -} - -var DefaultDialer = &net.Dialer{Timeout: 1000 * time.Millisecond} -var DefaultTransport http.RoundTripper = &http.Transport{Dial: DefaultDialer.Dial, Proxy: http.ProxyFromEnvironment} -var DefaultClient = &http.Client{Transport: DefaultTransport} - -var proxyTransport http.RoundTripper -var proxyClient *http.Client - -func SetConnectTimeout(duration time.Duration) { - DefaultDialer.Timeout = duration -} - -func (r *Request) AddHeader(name string, value string) { - if r.headers == nil { - r.headers = []headerTuple{} - } - r.headers = append(r.headers, headerTuple{name: name, value: value}) -} - -func (r Request) WithHeader(name string, value string) Request { - r.AddHeader(name, value) - return r -} - -func (r *Request) AddCookie(c *http.Cookie) { - r.cookies = append(r.cookies, c) -} - -func (r Request) WithCookie(c *http.Cookie) Request { - r.AddCookie(c) - return r -} - -func (r *Request) AddProxyConnectHeader(name string, value string) { - if r.proxyConnectHeaders == nil { - r.proxyConnectHeaders = []headerTuple{} - } - r.proxyConnectHeaders = append(r.proxyConnectHeaders, headerTuple{name: name, value: value}) -} - -func (r Request) WithProxyConnectHeader(name string, value string) Request { - r.AddProxyConnectHeader(name, value) - return r -} - -func (r Request) Do() (*Response, error) { - var client = DefaultClient - var transport = DefaultTransport - var resUri string - var redirectFailed bool - - r.Method = valueOrDefault(r.Method, "GET") - - // use a client with a cookie jar if necessary. We create a new client not - // to modify the default one. - if r.CookieJar != nil { - client = &http.Client{ - Transport: transport, - Jar: r.CookieJar, - } - } - - if r.Proxy != "" { - proxyUrl, err := url.Parse(r.Proxy) - if err != nil { - // proxy address is in a wrong format - return nil, &Error{Err: err} - } - - proxyHeader := make(http.Header) - if r.proxyConnectHeaders != nil { - for _, header := range r.proxyConnectHeaders { - proxyHeader.Add(header.name, header.value) - } - } - - //If jar is specified new client needs to be built - if proxyTransport == nil || client.Jar != nil { - proxyTransport = &http.Transport{ - Dial: DefaultDialer.Dial, - Proxy: http.ProxyURL(proxyUrl), - ProxyConnectHeader: proxyHeader, - } - proxyClient = &http.Client{Transport: proxyTransport, Jar: client.Jar} - } else if proxyTransport, ok := proxyTransport.(*http.Transport); ok { - proxyTransport.Proxy = http.ProxyURL(proxyUrl) - proxyTransport.ProxyConnectHeader = proxyHeader - } - transport = proxyTransport - client = proxyClient - } - - client.CheckRedirect = func(req *http.Request, via []*http.Request) error { - - if len(via) > r.MaxRedirects { - redirectFailed = true - return errors.New("Error redirecting. MaxRedirects reached") - } - - resUri = req.URL.String() - - //By default Golang will not redirect request headers - // https://code.google.com/p/go/issues/detail?id=4800&q=request%20header - if r.RedirectHeaders { - for key, val := range via[0].Header { - req.Header[key] = val - } - } - return nil - } - - if transport, ok := transport.(*http.Transport); ok { - if r.Insecure { - if transport.TLSClientConfig != nil { - transport.TLSClientConfig.InsecureSkipVerify = true - } else { - transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} - } - } else if transport.TLSClientConfig != nil { - // the default TLS client (when transport.TLSClientConfig==nil) is - // already set to verify, so do nothing in that case - transport.TLSClientConfig.InsecureSkipVerify = false - } - } - - req, err := r.NewRequest() - - if err != nil { - // we couldn't parse the URL. - return nil, &Error{Err: err} - } - - timeout := false - if r.Timeout > 0 { - client.Timeout = r.Timeout - } - - if r.ShowDebug { - dump, err := httputil.DumpRequest(req, true) - if err != nil { - log.Println(err) - } - log.Println(string(dump)) - } - - if r.OnBeforeRequest != nil { - r.OnBeforeRequest(&r, req) - } - res, err := client.Do(req) - - if err != nil { - if !timeout { - if t, ok := err.(itimeout); ok { - timeout = t.Timeout() - } - if ue, ok := err.(*url.Error); ok { - if t, ok := ue.Err.(itimeout); ok { - timeout = t.Timeout() - } - } - } - - var response *Response - //If redirect fails we still want to return response data - if redirectFailed { - if res != nil { - response = &Response{res, resUri, &Body{reader: res.Body}, req} - } else { - response = &Response{res, resUri, nil, req} - } - } - - //If redirect fails and we haven't set a redirect count we shouldn't return an error - if redirectFailed && r.MaxRedirects == 0 { - return response, nil - } - - return response, &Error{timeout: timeout, Err: err} - } - - if r.Compression != nil && strings.Contains(res.Header.Get("Content-Encoding"), r.Compression.ContentEncoding) { - compressedReader, err := r.Compression.reader(res.Body) - if err != nil { - return nil, &Error{Err: err} - } - return &Response{res, resUri, &Body{reader: res.Body, compressedReader: compressedReader}, req}, nil - } - - return &Response{res, resUri, &Body{reader: res.Body}, req}, nil -} - -func (r Request) addHeaders(headersMap http.Header) { - if len(r.UserAgent) > 0 { - headersMap.Add("User-Agent", r.UserAgent) - } - if r.Accept != "" { - headersMap.Add("Accept", r.Accept) - } - if r.ContentType != "" { - headersMap.Add("Content-Type", r.ContentType) - } -} - -func (r Request) NewRequest() (*http.Request, error) { - - b, e := prepareRequestBody(r.Body) - if e != nil { - // there was a problem marshaling the body - return nil, &Error{Err: e} - } - - if r.QueryString != nil { - param, e := paramParse(r.QueryString) - if e != nil { - return nil, &Error{Err: e} - } - r.Uri = r.Uri + "?" + param - } - - var bodyReader io.Reader - if b != nil && r.Compression != nil { - buffer := bytes.NewBuffer([]byte{}) - readBuffer := bufio.NewReader(b) - writer, err := r.Compression.writer(buffer) - if err != nil { - return nil, &Error{Err: err} - } - _, e = readBuffer.WriteTo(writer) - writer.Close() - if e != nil { - return nil, &Error{Err: e} - } - bodyReader = buffer - } else { - bodyReader = b - } - - req, err := http.NewRequest(r.Method, r.Uri, bodyReader) - if err != nil { - return nil, err - } - // add headers to the request - req.Host = r.Host - - r.addHeaders(req.Header) - if r.Compression != nil { - req.Header.Add("Content-Encoding", r.Compression.ContentEncoding) - req.Header.Add("Accept-Encoding", r.Compression.ContentEncoding) - } - if r.headers != nil { - for _, header := range r.headers { - req.Header.Add(header.name, header.value) - } - } - - //use basic auth if required - if r.BasicAuthUsername != "" { - req.SetBasicAuth(r.BasicAuthUsername, r.BasicAuthPassword) - } - - for _, c := range r.cookies { - req.AddCookie(c) - } - return req, nil -} - -// Return value if nonempty, def otherwise. -func valueOrDefault(value, def string) string { - if value != "" { - return value - } - return def -} diff --git a/vendor/github.com/franela/goreq/tags.go b/vendor/github.com/franela/goreq/tags.go deleted file mode 100644 index f3c147e9a4b..00000000000 --- a/vendor/github.com/franela/goreq/tags.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found here: https://github.com/golang/go/blob/master/LICENSE - -package goreq - -import ( - "strings" - "unicode" -) - -// tagOptions is the string following a comma in a struct field's "json" -// tag, or the empty string. It does not include the leading comma. -type tagOptions string - -// parseTag splits a struct field's json tag into its name and -// comma-separated options. -func parseTag(tag string) (string, tagOptions) { - if idx := strings.Index(tag, ","); idx != -1 { - return tag[:idx], tagOptions(tag[idx+1:]) - } - return tag, tagOptions("") -} - -// Contains reports whether a comma-separated list of options -// contains a particular substr flag. substr must be surrounded by a -// string boundary or commas. -func (o tagOptions) Contains(optionName string) bool { - if len(o) == 0 { - return false - } - s := string(o) - for s != "" { - var next string - i := strings.Index(s, ",") - if i >= 0 { - s, next = s[:i], s[i+1:] - } - if s == optionName { - return true - } - s = next - } - return false -} - -func isValidTag(s string) bool { - if s == "" { - return false - } - for _, c := range s { - switch { - case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): - // Backslash and quote chars are reserved, but - // otherwise any punctuation chars are allowed - // in a tag name. - default: - if !unicode.IsLetter(c) && !unicode.IsDigit(c) { - return false - } - } - } - return true -} diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/.travis.yml b/vendor/github.com/gemnasium/logrus-graylog-hook/.travis.yml deleted file mode 100644 index 9fe30e3cb9a..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/.travis.yml +++ /dev/null @@ -1,14 +0,0 @@ -language: go -go: - - 1.4 - - 1.5 - - 1.6 - - 1.7 - - 1.8 - - 1.9 - - tip -install: - - mkdir -p $HOME/gopath/src/gopkg.in/gemnasium - - mv $HOME/gopath/src/github.com/gemnasium/logrus-graylog-hook $HOME/gopath/src/gopkg.in/gemnasium/logrus-graylog-hook.v2 - - cd $HOME/gopath/src/gopkg.in/gemnasium/logrus-graylog-hook.v2 - - go get -t diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/CHANGELOG.md b/vendor/github.com/gemnasium/logrus-graylog-hook/CHANGELOG.md deleted file mode 100644 index fbe770db9c7..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/CHANGELOG.md +++ /dev/null @@ -1,52 +0,0 @@ -# Logrus Graylog hook - -## 2.0.7 - 2018-02-09 - -* Fix reported levels to match syslog levels (@maxatome / #27) -* Removed go 1.3 support - -## 2.0.6 - 2017-06-01 - -* Update import logrus path. See https://github.com/sirupsen/logrus/pull/384 - -## 2.0.5 - 2017-04-14 - -* Support uncompressed messages (@yuancheng-p / #24) - -## 2.0.4 - 2017-02-19 - -* Avoid panic if the hook can't dial Graylog (@chiffa-org / #21) - -## 2.0.3 - 2016-11-30 - -* Add support for extracting stacktraces from errors (@flimzy / #19) -* Allow specifying the host instead of taking `os.Hostname` by default (@mweibel / #18) - -## 2.0.2 - 2016-09-28 - -* Get rid of github.com/SocialCodeInc/go-gelf/gelf (#14) - -## 2.0.1 - 2016-08-16 - -* Fix an issue with entry constructor (#12) - -## 2.0.0 - 2016-07-02 - -* Remove facility param in constructor, as it's an optional param in Graylog 2.0 (credits: @saward / #9) -* Improve precision of TimeUnix (credits: @RaphYot / #2) -* Expose Gelf Writer (we will make this an interface in later versions) (credits: @cha-won / #10) - -## 1.1.2 - 2016-06-03 - -* Fix another race condition (credits: @dreyinger / #8) - -## 1.1.1 - 2016-05-10 - -* Fix race condition (credits: @rschmukler / #6) - -## 1.1.0 - 2015-12-04 - -* The default behavior is now to send the logs synchronously. -* A new asynchronous hook is available through `NewAsyncGraylogHook` - - diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/LICENSE b/vendor/github.com/gemnasium/logrus-graylog-hook/LICENSE deleted file mode 100644 index a4282b2a1f2..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Gemnasium - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/README.md b/vendor/github.com/gemnasium/logrus-graylog-hook/README.md deleted file mode 100644 index 944cfd76623..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Graylog Hook for [Logrus](https://github.com/sirupsen/logrus) :walrus: [![Build Status](https://travis-ci.org/gemnasium/logrus-graylog-hook.svg?branch=master)](https://travis-ci.org/gemnasium/logrus-graylog-hook) [![godoc reference](https://godoc.org/github.com/gemnasium/logrus-graylog-hook?status.svg)](https://godoc.org/gopkg.in/gemnasium/logrus-graylog-hook.v2) - -Use this hook to send your logs to [Graylog](http://graylog2.org) server over UDP. -The hook is non-blocking: even if UDP is used to send messages, the extra work -should not block the logging function. - -All logrus fields will be sent as additional fields on Graylog. - -## Usage - -The hook must be configured with: - -* A Graylog GELF UDP address (a "ip:port" string). -* an optional hash with extra global fields. These fields will be included in all messages sent to Graylog - -```go -package main - -import ( - "log/syslog" - log "github.com/sirupsen/logrus" - "gopkg.in/gemnasium/logrus-graylog-hook.v2" - ) - -func main() { - hook := graylog.NewGraylogHook(":", map[string]interface{}{"this": "is logged every time"}) - log.AddHook(hook) - log.Info("some logging message") -} -``` - -### Asynchronous logger - -```go -package main - -import ( - "log/syslog" - log "github.com/sirupsen/logrus" - "gopkg.in/gemnasium/logrus-graylog-hook.v2" - ) - -func main() { - hook := graylog.NewAsyncGraylogHook(":", map[string]interface{}{"this": "is logged every time"}) - defer hook.Flush() - log.AddHook(hook) - log.Info("some logging message") -} -``` - -### Disable standard logging - -For some reason, you may want to disable logging on stdout, and keep only the messages in Graylog (ie: a webserver inside a docker container). -You can redirect `stdout` to `/dev/null`, or just not log anything by creating a `NullFormatter` implementing `logrus.Formatter` interface: - -```go -type NullFormatter struct { -} - -// Don't spend time formatting logs -func (NullFormatter) Format(e *log.Entry) ([]byte, error) { - return []byte{}, nil -} -``` - -And set this formatter as the new logging formatter: - -```go -log.Infof("Log messages are now sent to Graylog (udp://%s)", graylogAddr) // Give a hint why logs are empty -log.AddHook(graylog.NewGraylogHook(graylogAddr, "api", map[string]interface{}{})) // set graylogAddr accordingly -log.SetFormatter(new(NullFormatter)) // Don't send logs to stdout -``` diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/error.go b/vendor/github.com/gemnasium/logrus-graylog-hook/error.go deleted file mode 100644 index 5d28124cd5b..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/error.go +++ /dev/null @@ -1,55 +0,0 @@ -package graylog - -import ( - "encoding/json" - "runtime" - - "github.com/pkg/errors" -) - -// newMarshalableError builds an error which encodes its error message into JSON -func newMarshalableError(err error) *marshalableError { - return &marshalableError{err} -} - -// a marshalableError is an error that can be encoded into JSON -type marshalableError struct { - err error -} - -// MarshalJSON implements json.Marshaler for marshalableError -func (m *marshalableError) MarshalJSON() ([]byte, error) { - return json.Marshal(m.err.Error()) -} - -type causer interface { - Cause() error -} - -type stackTracer interface { - StackTrace() errors.StackTrace -} - -func extractStackTrace(err error) errors.StackTrace { - var tracer stackTracer - for { - if st, ok := err.(stackTracer); ok { - tracer = st - } - if cause, ok := err.(causer); ok { - err = cause.Cause() - continue - } - break - } - if tracer == nil { - return nil - } - return tracer.StackTrace() -} - -func extractFileAndLine(stacktrace errors.StackTrace) (string, int) { - pc := uintptr(stacktrace[0]) - fn := runtime.FuncForPC(pc) - return fn.FileLine(pc) -} diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/gelf_reader.go b/vendor/github.com/gemnasium/logrus-graylog-hook/gelf_reader.go deleted file mode 100644 index 4b175f6911d..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/gelf_reader.go +++ /dev/null @@ -1,138 +0,0 @@ -package graylog - -import ( - "bytes" - "compress/gzip" - "compress/zlib" - "encoding/json" - "fmt" - "io" - "net" - "strings" - "sync" -) - -type Reader struct { - mu sync.Mutex - conn net.Conn -} - -func NewReader(addr string) (*Reader, error) { - var err error - udpAddr, err := net.ResolveUDPAddr("udp", addr) - if err != nil { - return nil, fmt.Errorf("ResolveUDPAddr('%s'): %s", addr, err) - } - - conn, err := net.ListenUDP("udp", udpAddr) - if err != nil { - return nil, fmt.Errorf("ListenUDP: %s", err) - } - - r := new(Reader) - r.conn = conn - return r, nil -} - -func (r *Reader) Addr() string { - return r.conn.LocalAddr().String() -} - -// FIXME: this will discard data if p isn't big enough to hold the -// full message. -func (r *Reader) Read(p []byte) (int, error) { - msg, err := r.ReadMessage() - if err != nil { - return -1, err - } - - var data string - - if msg.Full == "" { - data = msg.Short - } else { - data = msg.Full - } - - return strings.NewReader(data).Read(p) -} - -func (r *Reader) ReadMessage() (*Message, error) { - cBuf := make([]byte, ChunkSize) - var ( - err error - n, length int - buf bytes.Buffer - cid, ocid []byte - seq, total uint8 - cHead []byte - cReader io.Reader - chunks [][]byte - ) - - for got := 0; got < 128 && (total == 0 || got < int(total)); got++ { - if n, err = r.conn.Read(cBuf); err != nil { - return nil, fmt.Errorf("Read: %s", err) - } - cHead, cBuf = cBuf[:2], cBuf[:n] - - if bytes.Equal(cHead, magicChunked) { - //fmt.Printf("chunked %v\n", cBuf[:14]) - cid, seq, total = cBuf[2:2+8], cBuf[2+8], cBuf[2+8+1] - if ocid != nil && !bytes.Equal(cid, ocid) { - return nil, fmt.Errorf("out-of-band message %v (awaited %v)", cid, ocid) - } else if ocid == nil { - ocid = cid - chunks = make([][]byte, total) - } - n = len(cBuf) - chunkedHeaderLen - //fmt.Printf("setting chunks[%d]: %d\n", seq, n) - chunks[seq] = append(make([]byte, 0, n), cBuf[chunkedHeaderLen:]...) - length += n - } else { //not chunked - if total > 0 { - return nil, fmt.Errorf("out-of-band message (not chunked)") - } - break - } - } - //fmt.Printf("\nchunks: %v\n", chunks) - - if length > 0 { - if cap(cBuf) < length { - cBuf = append(cBuf, make([]byte, 0, length-cap(cBuf))...) - } - cBuf = cBuf[:0] - for i := range chunks { - //fmt.Printf("appending %d %v\n", i, chunks[i]) - cBuf = append(cBuf, chunks[i]...) - } - cHead = cBuf[:2] - } - - // the data we get from the wire is compressed - if bytes.Equal(cHead, magicGzip) { - cReader, err = gzip.NewReader(bytes.NewReader(cBuf)) - } else if cHead[0] == magicZlib[0] && - (int(cHead[0])*256+int(cHead[1]))%31 == 0 { - // zlib is slightly more complicated, but correct - cReader, err = zlib.NewReader(bytes.NewReader(cBuf)) - } else { - return nil, fmt.Errorf("unknown magic: %x %v", cHead, cHead) - } - - if err != nil { - return nil, fmt.Errorf("NewReader: %s", err) - } - - if _, err = io.Copy(&buf, cReader); err != nil { - return nil, fmt.Errorf("io.Copy: %s", err) - } - - msg := new(Message) - if err := json.Unmarshal(buf.Bytes(), &msg); err != nil { - return nil, fmt.Errorf("json.Unmarshal: %s", err) - } - - return msg, nil -} diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/gelf_writer.go b/vendor/github.com/gemnasium/logrus-graylog-hook/gelf_writer.go deleted file mode 100644 index 3c3d3e21c53..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/gelf_writer.go +++ /dev/null @@ -1,339 +0,0 @@ -// Copyright 2012 SocialCode. All rights reserved. -// Use of this source code is governed by the MIT -// license that can be found in the LICENSE file. - -package graylog - -import ( - "bytes" - "compress/flate" - "compress/gzip" - "compress/zlib" - "crypto/rand" - "encoding/json" - "fmt" - "io" - "net" - "os" - "path" - "sync" - "time" -) - -// Writer implements io.Writer and is used to send both discrete -// messages to a graylog2 server, or data from a stream-oriented -// interface (like the functions in log). -type Writer struct { - mu sync.Mutex - conn net.Conn - hostname string - Facility string // defaults to current process name - CompressionLevel int // one of the consts from compress/flate - CompressionType CompressType -} - -// What compression type the writer should use when sending messages -// to the graylog2 server -type CompressType int - -const ( - CompressGzip CompressType = iota - CompressZlib - NoCompress -) - -// Message represents the contents of the GELF message. It is gzipped -// before sending. -type Message struct { - Version string `json:"version"` - Host string `json:"host"` - Short string `json:"short_message"` - Full string `json:"full_message"` - TimeUnix float64 `json:"timestamp"` - Level int32 `json:"level"` - Facility string `json:"facility"` - File string `json:"file"` - Line int `json:"line"` - Extra map[string]interface{} `json:"-"` -} - -type innerMessage Message //against circular (Un)MarshalJSON - -// Used to control GELF chunking. Should be less than (MTU - len(UDP -// header)). -// -// TODO: generate dynamically using Path MTU Discovery? -const ( - ChunkSize = 1420 - chunkedHeaderLen = 12 - chunkedDataLen = ChunkSize - chunkedHeaderLen -) - -var ( - magicChunked = []byte{0x1e, 0x0f} - magicZlib = []byte{0x78} - magicGzip = []byte{0x1f, 0x8b} -) - -// numChunks returns the number of GELF chunks necessary to transmit -// the given compressed buffer. -func numChunks(b []byte) int { - lenB := len(b) - if lenB <= ChunkSize { - return 1 - } - return len(b)/chunkedDataLen + 1 -} - -// New returns a new GELF Writer. This writer can be used to send the -// output of the standard Go log functions to a central GELF server by -// passing it to log.SetOutput() -func NewWriter(addr string) (*Writer, error) { - var err error - w := new(Writer) - w.CompressionLevel = flate.BestSpeed - - if w.conn, err = net.Dial("udp", addr); err != nil { - return nil, err - } - if w.hostname, err = os.Hostname(); err != nil { - return nil, err - } - - w.Facility = path.Base(os.Args[0]) - - return w, nil -} - -// writes the gzip compressed byte array to the connection as a series -// of GELF chunked messages. The header format is documented at -// https://github.com/Graylog2/graylog2-docs/wiki/GELF as: -// -// 2-byte magic (0x1e 0x0f), 8 byte id, 1 byte sequence id, 1 byte -// total, chunk-data -func (w *Writer) writeChunked(zBytes []byte) (err error) { - b := make([]byte, 0, ChunkSize) - buf := bytes.NewBuffer(b) - nChunksI := numChunks(zBytes) - if nChunksI > 255 { - return fmt.Errorf("msg too large, would need %d chunks", nChunksI) - } - nChunks := uint8(nChunksI) - // use urandom to get a unique message id - msgId := make([]byte, 8) - n, err := io.ReadFull(rand.Reader, msgId) - if err != nil || n != 8 { - return fmt.Errorf("rand.Reader: %d/%s", n, err) - } - - bytesLeft := len(zBytes) - for i := uint8(0); i < nChunks; i++ { - buf.Reset() - // manually write header. Don't care about - // host/network byte order, because the spec only - // deals in individual bytes. - buf.Write(magicChunked) //magic - buf.Write(msgId) - buf.WriteByte(i) - buf.WriteByte(nChunks) - // slice out our chunk from zBytes - chunkLen := chunkedDataLen - if chunkLen > bytesLeft { - chunkLen = bytesLeft - } - off := int(i) * chunkedDataLen - chunk := zBytes[off : off+chunkLen] - buf.Write(chunk) - - // write this chunk, and make sure the write was good - n, err := w.conn.Write(buf.Bytes()) - if err != nil { - return fmt.Errorf("Write (chunk %d/%d): %s", i, - nChunks, err) - } - if n != len(buf.Bytes()) { - return fmt.Errorf("Write len: (chunk %d/%d) (%d/%d)", - i, nChunks, n, len(buf.Bytes())) - } - - bytesLeft -= chunkLen - } - - if bytesLeft != 0 { - return fmt.Errorf("error: %d bytes left after sending", bytesLeft) - } - return nil -} - -type bufferedWriter struct { - buffer *bytes.Buffer -} - -func (bw bufferedWriter) Write(p []byte) (n int, err error) { - return bw.buffer.Write(p) -} - -func (bw bufferedWriter) Close() error { - return nil -} - -// WriteMessage sends the specified message to the GELF server -// specified in the call to New(). It assumes all the fields are -// filled out appropriately. In general, clients will want to use -// Write, rather than WriteMessage. -func (w *Writer) WriteMessage(m *Message) (err error) { - mBytes, err := json.Marshal(m) - if err != nil { - return - } - - var zBuf bytes.Buffer - var zw io.WriteCloser - switch w.CompressionType { - case CompressGzip: - zw, err = gzip.NewWriterLevel(&zBuf, w.CompressionLevel) - case CompressZlib: - zw, err = zlib.NewWriterLevel(&zBuf, w.CompressionLevel) - case NoCompress: - zw = bufferedWriter{buffer: &zBuf} - default: - panic(fmt.Sprintf("unknown compression type %d", - w.CompressionType)) - } - if err != nil { - return - } - if _, err = zw.Write(mBytes); err != nil { - return - } - zw.Close() - - zBytes := zBuf.Bytes() - if numChunks(zBytes) > 1 { - return w.writeChunked(zBytes) - } - - n, err := w.conn.Write(zBytes) - if err != nil { - return - } - if n != len(zBytes) { - return fmt.Errorf("bad write (%d/%d)", n, len(zBytes)) - } - - return nil -} - -/* -func (w *Writer) Alert(m string) (err error) -func (w *Writer) Close() error -func (w *Writer) Crit(m string) (err error) -func (w *Writer) Debug(m string) (err error) -func (w *Writer) Emerg(m string) (err error) -func (w *Writer) Err(m string) (err error) -func (w *Writer) Info(m string) (err error) -func (w *Writer) Notice(m string) (err error) -func (w *Writer) Warning(m string) (err error) -*/ - -// Write encodes the given string in a GELF message and sends it to -// the server specified in New(). -func (w *Writer) Write(p []byte) (n int, err error) { - - // 1 for the function that called us. - file, line := getCallerIgnoringLogMulti(1) - - // remove trailing and leading whitespace - p = bytes.TrimSpace(p) - - // If there are newlines in the message, use the first line - // for the short message and set the full message to the - // original input. If the input has no newlines, stick the - // whole thing in Short. - short := p - full := []byte("") - if i := bytes.IndexRune(p, '\n'); i > 0 { - short = p[:i] - full = p - } - - m := Message{ - Version: "1.0", - Host: w.hostname, - Short: string(short), - Full: string(full), - TimeUnix: float64(time.Now().UnixNano()/1000000) / 1000., - Level: 6, // info - Facility: w.Facility, - File: file, - Line: line, - Extra: map[string]interface{}{}, - } - - if err = w.WriteMessage(&m); err != nil { - return 0, err - } - - return len(p), nil -} - -func (m *Message) MarshalJSON() ([]byte, error) { - var err error - var b, eb []byte - - extra := m.Extra - b, err = json.Marshal((*innerMessage)(m)) - m.Extra = extra - if err != nil { - return nil, err - } - - if len(extra) == 0 { - return b, nil - } - - if eb, err = json.Marshal(extra); err != nil { - return nil, err - } - - // merge serialized message + serialized extra map - b[len(b)-1] = ',' - return append(b, eb[1:len(eb)]...), nil -} - -func (m *Message) UnmarshalJSON(data []byte) error { - i := make(map[string]interface{}, 16) - if err := json.Unmarshal(data, &i); err != nil { - return err - } - for k, v := range i { - if k[0] == '_' { - if m.Extra == nil { - m.Extra = make(map[string]interface{}, 1) - } - m.Extra[k] = v - continue - } - switch k { - case "version": - m.Version = v.(string) - case "host": - m.Host = v.(string) - case "short_message": - m.Short = v.(string) - case "full_message": - m.Full = v.(string) - case "timestamp": - m.TimeUnix = v.(float64) - case "level": - m.Level = int32(v.(float64)) - case "facility": - m.Facility = v.(string) - case "file": - m.File = v.(string) - case "line": - m.Line = int(v.(float64)) - } - } - return nil -} diff --git a/vendor/github.com/gemnasium/logrus-graylog-hook/graylog_hook.go b/vendor/github.com/gemnasium/logrus-graylog-hook/graylog_hook.go deleted file mode 100644 index 8dcda5d5161..00000000000 --- a/vendor/github.com/gemnasium/logrus-graylog-hook/graylog_hook.go +++ /dev/null @@ -1,295 +0,0 @@ -package graylog - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "os" - "runtime" - "strings" - "sync" - "time" - - "github.com/sirupsen/logrus" -) - -const StackTraceKey = "_stacktrace" - -// Set graylog.BufSize = _before_ calling NewGraylogHook -// Once the buffer is full, logging will start blocking, waiting for slots to -// be available in the queue. -var BufSize uint = 8192 - -// GraylogHook to send logs to a logging service compatible with the Graylog API and the GELF format. -type GraylogHook struct { - Extra map[string]interface{} - Host string - Level logrus.Level - gelfLogger *Writer - buf chan graylogEntry - wg sync.WaitGroup - mu sync.RWMutex - synchronous bool - blacklist map[string]bool -} - -// Graylog needs file and line params -type graylogEntry struct { - *logrus.Entry - file string - line int -} - -// NewGraylogHook creates a hook to be added to an instance of logger. -func NewGraylogHook(addr string, extra map[string]interface{}) *GraylogHook { - g, err := NewWriter(addr) - if err != nil { - logrus.WithError(err).Error("Can't create Gelf logger") - } - - host, err := os.Hostname() - if err != nil { - host = "localhost" - } - - hook := &GraylogHook{ - Host: host, - Extra: extra, - Level: logrus.DebugLevel, - gelfLogger: g, - synchronous: true, - } - return hook -} - -// NewAsyncGraylogHook creates a hook to be added to an instance of logger. -// The hook created will be asynchronous, and it's the responsibility of the user to call the Flush method -// before exiting to empty the log queue. -func NewAsyncGraylogHook(addr string, extra map[string]interface{}) *GraylogHook { - g, err := NewWriter(addr) - if err != nil { - logrus.WithError(err).Error("Can't create Gelf logger") - } - - host, err := os.Hostname() - if err != nil { - host = "localhost" - } - - hook := &GraylogHook{ - Host: host, - Extra: extra, - Level: logrus.DebugLevel, - gelfLogger: g, - buf: make(chan graylogEntry, BufSize), - } - go hook.fire() // Log in background - return hook -} - -// Fire is called when a log event is fired. -// We assume the entry will be altered by another hook, -// otherwise we might logging something wrong to Graylog -func (hook *GraylogHook) Fire(entry *logrus.Entry) error { - hook.mu.RLock() // Claim the mutex as a RLock - allowing multiple go routines to log simultaneously - defer hook.mu.RUnlock() - - // get caller file and line here, it won't be available inside the goroutine - // 1 for the function that called us. - file, line := getCallerIgnoringLogMulti(1) - - newData := make(map[string]interface{}) - for k, v := range entry.Data { - newData[k] = v - } - - newEntry := &logrus.Entry{ - Logger: entry.Logger, - Data: newData, - Time: entry.Time, - Level: entry.Level, - Message: entry.Message, - } - gEntry := graylogEntry{newEntry, file, line} - - if hook.synchronous { - hook.sendEntry(gEntry) - } else { - hook.wg.Add(1) - hook.buf <- gEntry - } - - return nil -} - -// Flush waits for the log queue to be empty. -// This func is meant to be used when the hook was created with NewAsyncGraylogHook. -func (hook *GraylogHook) Flush() { - hook.mu.Lock() // claim the mutex as a Lock - we want exclusive access to it - defer hook.mu.Unlock() - - hook.wg.Wait() -} - -// fire will loop on the 'buf' channel, and write entries to graylog -func (hook *GraylogHook) fire() { - for { - entry := <-hook.buf // receive new entry on channel - hook.sendEntry(entry) - hook.wg.Done() - } -} - -func logrusLevelToSylog(level logrus.Level) int32 { - // Till warn, logrus levels are lower than syslog by 1 - // (logrus has no equivalent of syslog LOG_NOTICE) - if level <= logrus.WarnLevel { - return int32(level) + 1 - } - // From info, logrus levels are lower than syslog by 2 - return int32(level) + 2 -} - -// sendEntry sends an entry to graylog synchronously -func (hook *GraylogHook) sendEntry(entry graylogEntry) { - if hook.gelfLogger == nil { - fmt.Println("Can't connect to Graylog") - return - } - w := hook.gelfLogger - - // remove trailing and leading whitespace - p := bytes.TrimSpace([]byte(entry.Message)) - - // If there are newlines in the message, use the first line - // for the short message and set the full message to the - // original input. If the input has no newlines, stick the - // whole thing in Short. - short := p - full := []byte("") - if i := bytes.IndexRune(p, '\n'); i > 0 { - short = p[:i] - full = p - } - - level := logrusLevelToSylog(entry.Level) - - // Don't modify entry.Data directly, as the entry will used after this hook was fired - extra := map[string]interface{}{} - // Merge extra fields - for k, v := range hook.Extra { - k = fmt.Sprintf("_%s", k) // "[...] every field you send and prefix with a _ (underscore) will be treated as an additional field." - extra[k] = v - } - for k, v := range entry.Data { - if !hook.blacklist[k] { - extraK := fmt.Sprintf("_%s", k) // "[...] every field you send and prefix with a _ (underscore) will be treated as an additional field." - if k == logrus.ErrorKey { - asError, isError := v.(error) - _, isMarshaler := v.(json.Marshaler) - if isError && !isMarshaler { - extra[extraK] = newMarshalableError(asError) - } else { - extra[extraK] = v - } - if stackTrace := extractStackTrace(asError); stackTrace != nil { - extra[StackTraceKey] = fmt.Sprintf("%+v", stackTrace) - file, line := extractFileAndLine(stackTrace) - if file != "" && line != 0 { - entry.file = file - entry.line = line - } - } - } else { - extra[extraK] = v - } - } - } - - m := Message{ - Version: "1.1", - Host: hook.Host, - Short: string(short), - Full: string(full), - TimeUnix: float64(time.Now().UnixNano()/1000000) / 1000., - Level: level, - File: entry.file, - Line: entry.line, - Extra: extra, - } - - if err := w.WriteMessage(&m); err != nil { - fmt.Println(err) - } -} - -// Levels returns the available logging levels. -func (hook *GraylogHook) Levels() []logrus.Level { - levels := []logrus.Level{} - for _, level := range logrus.AllLevels { - if level <= hook.Level { - levels = append(levels, level) - } - } - return levels -} - -// Blacklist create a blacklist map to filter some message keys. -// This useful when you want your application to log extra fields locally -// but don't want graylog to store them. -func (hook *GraylogHook) Blacklist(b []string) { - hook.blacklist = make(map[string]bool) - for _, elem := range b { - hook.blacklist[elem] = true - } -} - -// SetWriter sets the hook Gelf Writer -func (hook *GraylogHook) SetWriter(w *Writer) error { - if w == nil { - return errors.New("writer can't be nil") - } - hook.gelfLogger = w - return nil -} - -// Writer returns the logger Gelf Writer -func (hook *GraylogHook) Writer() *Writer { - return hook.gelfLogger -} - -// getCaller returns the filename and the line info of a function -// further down in the call stack. Passing 0 in as callDepth would -// return info on the function calling getCallerIgnoringLog, 1 the -// parent function, and so on. Any suffixes passed to getCaller are -// path fragments like "/pkg/log/log.go", and functions in the call -// stack from that file are ignored. -func getCaller(callDepth int, suffixesToIgnore ...string) (file string, line int) { - // bump by 1 to ignore the getCaller (this) stackframe - callDepth++ -outer: - for { - var ok bool - _, file, line, ok = runtime.Caller(callDepth) - if !ok { - file = "???" - line = 0 - break - } - - for _, s := range suffixesToIgnore { - if strings.HasSuffix(file, s) { - callDepth++ - continue outer - } - } - break - } - return -} - -func getCallerIgnoringLogMulti(callDepth int) (string, int) { - // the +1 is to ignore this (getCallerIgnoringLogMulti) frame - return getCaller(callDepth+1, "logrus/hooks.go", "logrus/entry.go", "logrus/logger.go", "logrus/exported.go", "asm_amd64.s") -} diff --git a/vendor/github.com/getsentry/raven-go/.dockerignore b/vendor/github.com/getsentry/raven-go/.dockerignore deleted file mode 100644 index 6b8710a711f..00000000000 --- a/vendor/github.com/getsentry/raven-go/.dockerignore +++ /dev/null @@ -1 +0,0 @@ -.git diff --git a/vendor/github.com/getsentry/raven-go/.gitignore b/vendor/github.com/getsentry/raven-go/.gitignore deleted file mode 100644 index 0f66ce75dc9..00000000000 --- a/vendor/github.com/getsentry/raven-go/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -*.test -*.out -example/example -/xunit.xml -/coverage.xml diff --git a/vendor/github.com/getsentry/raven-go/.gitmodules b/vendor/github.com/getsentry/raven-go/.gitmodules deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/vendor/github.com/getsentry/raven-go/.travis.yml b/vendor/github.com/getsentry/raven-go/.travis.yml deleted file mode 100644 index 8ec4eca8109..00000000000 --- a/vendor/github.com/getsentry/raven-go/.travis.yml +++ /dev/null @@ -1,41 +0,0 @@ -sudo: false -language: go -go: - - 1.7.x - - 1.8.x - - 1.9.x - - 1.10.x - - 1.11.x - - tip - -before_install: - - go install -race std - - go get golang.org/x/tools/cmd/cover - - go get github.com/tebeka/go2xunit - - go get github.com/t-yuki/gocover-cobertura - - go get -v ./... - -script: - - go test -v -race ./... | tee gotest.out - - $GOPATH/bin/go2xunit -fail -input gotest.out -output xunit.xml - - go test -v -coverprofile=coverage.txt -covermode count . - - $GOPATH/bin/gocover-cobertura < coverage.txt > coverage.xml - -after_script: - - npm install -g @zeus-ci/cli - - zeus upload -t "application/x-cobertura+xml" coverage.xml - - zeus upload -t "application/x-xunit+xml" xunit.xml - -matrix: - allow_failures: - - go: tip - -notifications: - webhooks: - urls: - - https://zeus.ci/hooks/cd949996-d30a-11e8-ba53-0a580a28042d/public/provider/travis/webhook - on_success: always - on_failure: always - on_start: always - on_cancel: always - on_error: always diff --git a/vendor/github.com/getsentry/raven-go/LICENSE b/vendor/github.com/getsentry/raven-go/LICENSE deleted file mode 100644 index b0301b57e8a..00000000000 --- a/vendor/github.com/getsentry/raven-go/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2013 Apollic Software, LLC. All rights reserved. -Copyright (c) 2015 Functional Software, Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Apollic Software, LLC nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/getsentry/raven-go/README.md b/vendor/github.com/getsentry/raven-go/README.md deleted file mode 100644 index 16c9483e8a2..00000000000 --- a/vendor/github.com/getsentry/raven-go/README.md +++ /dev/null @@ -1,19 +0,0 @@ -# raven - -[![Build Status](https://api.travis-ci.org/getsentry/raven-go.svg?branch=master)](https://travis-ci.org/getsentry/raven-go) -[![Go Report Card](https://goreportcard.com/badge/github.com/getsentry/raven-go)](https://goreportcard.com/report/github.com/getsentry/raven-go) -[![GoDoc](https://godoc.org/github.com/getsentry/raven-go?status.svg)](https://godoc.org/github.com/getsentry/raven-go) - -raven is the official Go SDK for the [Sentry](https://github.com/getsentry/sentry) -event/error logging system. - -- [**API Documentation**](https://godoc.org/github.com/getsentry/raven-go) -- [**Usage and Examples**](https://docs.sentry.io/clients/go/) - -## Installation - -```text -go get github.com/getsentry/raven-go -``` - -Note: Go 1.7 and newer are supported. diff --git a/vendor/github.com/getsentry/raven-go/client.go b/vendor/github.com/getsentry/raven-go/client.go deleted file mode 100644 index a2c9a6c35d2..00000000000 --- a/vendor/github.com/getsentry/raven-go/client.go +++ /dev/null @@ -1,977 +0,0 @@ -// Package raven implements a client for the Sentry error logging service. -package raven - -import ( - "bytes" - "compress/zlib" - "crypto/rand" - "crypto/tls" - "encoding/base64" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "io" - "io/ioutil" - "log" - mrand "math/rand" - "net/http" - "net/url" - "os" - "regexp" - "runtime" - "strings" - "sync" - "time" - - "github.com/certifi/gocertifi" - pkgErrors "github.com/pkg/errors" -) - -const ( - userAgent = "raven-go/1.0" - timestampFormat = `"2006-01-02T15:04:05.00"` -) - -var ( - ErrPacketDropped = errors.New("raven: packet dropped") - ErrUnableToUnmarshalJSON = errors.New("raven: unable to unmarshal JSON") - ErrMissingUser = errors.New("raven: dsn missing public key and/or password") - ErrMissingProjectID = errors.New("raven: dsn missing project id") - ErrInvalidSampleRate = errors.New("raven: sample rate should be between 0 and 1") -) - -type Severity string - -// http://docs.python.org/2/howto/logging.html#logging-levels -const ( - DEBUG = Severity("debug") - INFO = Severity("info") - WARNING = Severity("warning") - ERROR = Severity("error") - FATAL = Severity("fatal") -) - -type Timestamp time.Time - -func (t Timestamp) MarshalJSON() ([]byte, error) { - return []byte(time.Time(t).UTC().Format(timestampFormat)), nil -} - -func (timestamp *Timestamp) UnmarshalJSON(data []byte) error { - t, err := time.Parse(timestampFormat, string(data)) - if err != nil { - return err - } - - *timestamp = Timestamp(t) - return nil -} - -func (timestamp Timestamp) Format(format string) string { - t := time.Time(timestamp) - return t.Format(format) -} - -// An Interface is a Sentry interface that will be serialized as JSON. -// It must implement json.Marshaler or use json struct tags. -type Interface interface { - // The Sentry class name. Example: sentry.interfaces.Stacktrace - Class() string -} - -type Culpriter interface { - Culprit() string -} - -type Transport interface { - Send(url, authHeader string, packet *Packet) error -} - -type Extra map[string]interface{} - -type outgoingPacket struct { - packet *Packet - ch chan error -} - -type Tag struct { - Key string - Value string -} - -type Tags []Tag - -func (tag *Tag) MarshalJSON() ([]byte, error) { - return json.Marshal([2]string{tag.Key, tag.Value}) -} - -func (t *Tag) UnmarshalJSON(data []byte) error { - var tag [2]string - if err := json.Unmarshal(data, &tag); err != nil { - return err - } - *t = Tag{tag[0], tag[1]} - return nil -} - -func (t *Tags) UnmarshalJSON(data []byte) error { - var tags []Tag - - switch data[0] { - case '[': - // Unmarshal into []Tag - if err := json.Unmarshal(data, &tags); err != nil { - return err - } - case '{': - // Unmarshal into map[string]string - tagMap := make(map[string]string) - if err := json.Unmarshal(data, &tagMap); err != nil { - return err - } - - // Convert to []Tag - for k, v := range tagMap { - tags = append(tags, Tag{k, v}) - } - default: - return ErrUnableToUnmarshalJSON - } - - *t = tags - return nil -} - -// https://docs.getsentry.com/hosted/clientdev/#building-the-json-packet -type Packet struct { - // Required - Message string `json:"message"` - - // Required, set automatically by Client.Send/Report via Packet.Init if blank - EventID string `json:"event_id"` - Project string `json:"project"` - Timestamp Timestamp `json:"timestamp"` - Level Severity `json:"level"` - Logger string `json:"logger"` - - // Optional - Platform string `json:"platform,omitempty"` - Culprit string `json:"culprit,omitempty"` - ServerName string `json:"server_name,omitempty"` - Release string `json:"release,omitempty"` - Environment string `json:"environment,omitempty"` - Tags Tags `json:"tags,omitempty"` - Modules map[string]string `json:"modules,omitempty"` - Fingerprint []string `json:"fingerprint,omitempty"` - Extra Extra `json:"extra,omitempty"` - - Interfaces []Interface `json:"-"` -} - -// NewPacket constructs a packet with the specified message and interfaces. -func NewPacket(message string, interfaces ...Interface) *Packet { - extra := Extra{} - setExtraDefaults(extra) - return &Packet{ - Message: message, - Interfaces: interfaces, - Extra: extra, - } -} - -// NewPacketWithExtra constructs a packet with the specified message, extra information, and interfaces. -func NewPacketWithExtra(message string, extra Extra, interfaces ...Interface) *Packet { - if extra == nil { - extra = Extra{} - } - setExtraDefaults(extra) - - return &Packet{ - Message: message, - Interfaces: interfaces, - Extra: extra, - } -} - -func setExtraDefaults(extra Extra) Extra { - extra["runtime.Version"] = runtime.Version() - extra["runtime.NumCPU"] = runtime.NumCPU() - extra["runtime.GOMAXPROCS"] = runtime.GOMAXPROCS(0) // 0 just returns the current value - extra["runtime.NumGoroutine"] = runtime.NumGoroutine() - return extra -} - -// Init initializes required fields in a packet. It is typically called by -// Client.Send/Report automatically. -func (packet *Packet) Init(project string) error { - if packet.Project == "" { - packet.Project = project - } - if packet.EventID == "" { - var err error - packet.EventID, err = uuid() - if err != nil { - return err - } - } - if time.Time(packet.Timestamp).IsZero() { - packet.Timestamp = Timestamp(time.Now()) - } - if packet.Level == "" { - packet.Level = ERROR - } - if packet.Logger == "" { - packet.Logger = "root" - } - if packet.ServerName == "" { - packet.ServerName = hostname - } - if packet.Platform == "" { - packet.Platform = "go" - } - - if packet.Culprit == "" { - for _, inter := range packet.Interfaces { - if c, ok := inter.(Culpriter); ok { - packet.Culprit = c.Culprit() - if packet.Culprit != "" { - break - } - } - } - } - - return nil -} - -func (packet *Packet) AddTags(tags map[string]string) { - for k, v := range tags { - packet.Tags = append(packet.Tags, Tag{k, v}) - } -} - -func uuid() (string, error) { - id := make([]byte, 16) - _, err := io.ReadFull(rand.Reader, id) - if err != nil { - return "", err - } - id[6] &= 0x0F // clear version - id[6] |= 0x40 // set version to 4 (random uuid) - id[8] &= 0x3F // clear variant - id[8] |= 0x80 // set to IETF variant - return hex.EncodeToString(id), nil -} - -func (packet *Packet) JSON() ([]byte, error) { - packetJSON, err := json.Marshal(packet) - if err != nil { - return nil, err - } - - interfaces := make(map[string]Interface, len(packet.Interfaces)) - for _, inter := range packet.Interfaces { - if inter != nil { - interfaces[inter.Class()] = inter - } - } - - if len(interfaces) > 0 { - interfaceJSON, err := json.Marshal(interfaces) - if err != nil { - return nil, err - } - packetJSON[len(packetJSON)-1] = ',' - packetJSON = append(packetJSON, interfaceJSON[1:]...) - } - - return packetJSON, nil -} - -type context struct { - user *User - http *Http - tags map[string]string -} - -func (c *context) setUser(u *User) { c.user = u } -func (c *context) setHttp(h *Http) { c.http = h } -func (c *context) setTags(t map[string]string) { - if c.tags == nil { - c.tags = make(map[string]string) - } - for k, v := range t { - c.tags[k] = v - } -} -func (c *context) clear() { - c.user = nil - c.http = nil - c.tags = nil -} - -// Return a list of interfaces to be used in appending with the rest -func (c *context) interfaces() []Interface { - len, i := 0, 0 - if c.user != nil { - len++ - } - if c.http != nil { - len++ - } - interfaces := make([]Interface, len) - if c.user != nil { - interfaces[i] = c.user - i++ - } - if c.http != nil { - interfaces[i] = c.http - i++ - } - return interfaces -} - -// The maximum number of packets that will be buffered waiting to be delivered. -// Packets will be dropped if the buffer is full. Used by NewClient. -var MaxQueueBuffer = 100 - -func newTransport() Transport { - t := &HTTPTransport{} - rootCAs, err := gocertifi.CACerts() - if err != nil { - log.Println("raven: failed to load root TLS certificates:", err) - } else { - t.Client = &http.Client{ - Transport: &http.Transport{ - Proxy: http.ProxyFromEnvironment, - TLSClientConfig: &tls.Config{RootCAs: rootCAs}, - }, - } - } - return t -} - -func newClient(tags map[string]string) *Client { - client := &Client{ - Transport: newTransport(), - Tags: tags, - context: &context{}, - sampleRate: 1.0, - queue: make(chan *outgoingPacket, MaxQueueBuffer), - } - client.SetDSN(os.Getenv("SENTRY_DSN")) - client.SetRelease(os.Getenv("SENTRY_RELEASE")) - client.SetEnvironment(os.Getenv("SENTRY_ENVIRONMENT")) - return client -} - -// New constructs a new Sentry client instance -func New(dsn string) (*Client, error) { - client := newClient(nil) - return client, client.SetDSN(dsn) -} - -// NewWithTags constructs a new Sentry client instance with default tags. -func NewWithTags(dsn string, tags map[string]string) (*Client, error) { - client := newClient(tags) - return client, client.SetDSN(dsn) -} - -// NewClient constructs a Sentry client and spawns a background goroutine to -// handle packets sent by Client.Report. -// -// Deprecated: use New and NewWithTags instead -func NewClient(dsn string, tags map[string]string) (*Client, error) { - client := newClient(tags) - return client, client.SetDSN(dsn) -} - -// Client encapsulates a connection to a Sentry server. It must be initialized -// by calling NewClient. Modification of fields concurrently with Send or after -// calling Report for the first time is not thread-safe. -type Client struct { - Tags map[string]string - - Transport Transport - - // DropHandler is called when a packet is dropped because the buffer is full. - DropHandler func(*Packet) - - // Context that will get appending to all packets - context *context - - mu sync.RWMutex - url string - projectID string - authHeader string - release string - environment string - sampleRate float32 - - // default logger name (leave empty for 'root') - defaultLoggerName string - - includePaths []string - ignoreErrorsRegexp *regexp.Regexp - queue chan *outgoingPacket - - // A WaitGroup to keep track of all currently in-progress captures - // This is intended to be used with Client.Wait() to assure that - // all messages have been transported before exiting the process. - wg sync.WaitGroup - - // A Once to track only starting up the background worker once - start sync.Once -} - -// Initialize a default *Client instance -var DefaultClient = newClient(nil) - -func (c *Client) SetIgnoreErrors(errs []string) error { - joinedRegexp := strings.Join(errs, "|") - r, err := regexp.Compile(joinedRegexp) - if err != nil { - return fmt.Errorf("failed to compile regexp %q for %q: %v", joinedRegexp, errs, err) - } - - c.mu.Lock() - c.ignoreErrorsRegexp = r - c.mu.Unlock() - return nil -} - -func (c *Client) shouldExcludeErr(errStr string) bool { - c.mu.RLock() - defer c.mu.RUnlock() - return c.ignoreErrorsRegexp != nil && c.ignoreErrorsRegexp.MatchString(errStr) -} - -func SetIgnoreErrors(errs ...string) error { - return DefaultClient.SetIgnoreErrors(errs) -} - -// SetDSN updates a client with a new DSN. It safe to call after and -// concurrently with calls to Report and Send. -func (client *Client) SetDSN(dsn string) error { - if dsn == "" { - return nil - } - - client.mu.Lock() - defer client.mu.Unlock() - - uri, err := url.Parse(dsn) - if err != nil { - return err - } - - if uri.User == nil { - return ErrMissingUser - } - publicKey := uri.User.Username() - secretKey, hasSecretKey := uri.User.Password() - uri.User = nil - - if idx := strings.LastIndex(uri.Path, "/"); idx != -1 { - client.projectID = uri.Path[idx+1:] - uri.Path = uri.Path[:idx+1] + "api/" + client.projectID + "/store/" - } - if client.projectID == "" { - return ErrMissingProjectID - } - - client.url = uri.String() - - if hasSecretKey { - client.authHeader = fmt.Sprintf("Sentry sentry_version=4, sentry_key=%s, sentry_secret=%s", publicKey, secretKey) - } else { - client.authHeader = fmt.Sprintf("Sentry sentry_version=4, sentry_key=%s", publicKey) - } - - return nil -} - -// Sets the DSN for the default *Client instance -func SetDSN(dsn string) error { return DefaultClient.SetDSN(dsn) } - -// SetRelease sets the "release" tag. -func (client *Client) SetRelease(release string) { - client.mu.Lock() - defer client.mu.Unlock() - client.release = release -} - -// SetEnvironment sets the "environment" tag. -func (client *Client) SetEnvironment(environment string) { - client.mu.Lock() - defer client.mu.Unlock() - client.environment = environment -} - -// SetDefaultLoggerName sets the default logger name. -func (client *Client) SetDefaultLoggerName(name string) { - client.mu.Lock() - defer client.mu.Unlock() - client.defaultLoggerName = name -} - -// SetSampleRate sets how much sampling we want on client side -func (client *Client) SetSampleRate(rate float32) error { - client.mu.Lock() - defer client.mu.Unlock() - - if rate < 0 || rate > 1 { - return ErrInvalidSampleRate - } - client.sampleRate = rate - return nil -} - -// SetRelease sets the "release" tag on the default *Client -func SetRelease(release string) { DefaultClient.SetRelease(release) } - -// SetEnvironment sets the "environment" tag on the default *Client -func SetEnvironment(environment string) { DefaultClient.SetEnvironment(environment) } - -// SetDefaultLoggerName sets the "defaultLoggerName" on the default *Client -func SetDefaultLoggerName(name string) { - DefaultClient.SetDefaultLoggerName(name) -} - -// SetSampleRate sets the "sample rate" on the degault *Client -func SetSampleRate(rate float32) error { return DefaultClient.SetSampleRate(rate) } - -func (client *Client) worker() { - for outgoingPacket := range client.queue { - - client.mu.RLock() - url, authHeader := client.url, client.authHeader - client.mu.RUnlock() - - outgoingPacket.ch <- client.Transport.Send(url, authHeader, outgoingPacket.packet) - client.wg.Done() - } -} - -// Capture asynchronously delivers a packet to the Sentry server. It is a no-op -// when client is nil. A channel is provided if it is important to check for a -// send's success. -func (client *Client) Capture(packet *Packet, captureTags map[string]string) (eventID string, ch chan error) { - ch = make(chan error, 1) - - if client == nil { - // return a chan that always returns nil when the caller receives from it - close(ch) - return - } - - if client.sampleRate < 1.0 && mrand.Float32() > client.sampleRate { - return - } - - if packet == nil { - close(ch) - return - } - - if client.shouldExcludeErr(packet.Message) { - return - } - - // Keep track of all running Captures so that we can wait for them all to finish - // *Must* call client.wg.Done() on any path that indicates that an event was - // finished being acted upon, whether success or failure - client.wg.Add(1) - - // Merge capture tags and client tags - packet.AddTags(captureTags) - packet.AddTags(client.Tags) - - // Initialize any required packet fields - client.mu.RLock() - packet.AddTags(client.context.tags) - projectID := client.projectID - release := client.release - environment := client.environment - defaultLoggerName := client.defaultLoggerName - client.mu.RUnlock() - - // set the global logger name on the packet if we must - if packet.Logger == "" && defaultLoggerName != "" { - packet.Logger = defaultLoggerName - } - - err := packet.Init(projectID) - if err != nil { - ch <- err - client.wg.Done() - return - } - - if packet.Release == "" { - packet.Release = release - } - - if packet.Environment == "" { - packet.Environment = environment - } - - outgoingPacket := &outgoingPacket{packet, ch} - - // Lazily start background worker until we - // do our first write into the queue. - client.start.Do(func() { - go client.worker() - }) - - select { - case client.queue <- outgoingPacket: - default: - // Send would block, drop the packet - if client.DropHandler != nil { - client.DropHandler(packet) - } - ch <- ErrPacketDropped - client.wg.Done() - } - - return packet.EventID, ch -} - -// Capture asynchronously delivers a packet to the Sentry server with the default *Client. -// It is a no-op when client is nil. A channel is provided if it is important to check for a -// send's success. -func Capture(packet *Packet, captureTags map[string]string) (eventID string, ch chan error) { - return DefaultClient.Capture(packet, captureTags) -} - -// CaptureMessage formats and delivers a string message to the Sentry server. -func (client *Client) CaptureMessage(message string, tags map[string]string, interfaces ...Interface) string { - if client == nil { - return "" - } - - if client.shouldExcludeErr(message) { - return "" - } - - packet := NewPacket(message, append(append(interfaces, client.context.interfaces()...), &Message{message, nil})...) - eventID, _ := client.Capture(packet, tags) - - return eventID -} - -// CaptureMessage formats and delivers a string message to the Sentry server with the default *Client -func CaptureMessage(message string, tags map[string]string, interfaces ...Interface) string { - return DefaultClient.CaptureMessage(message, tags, interfaces...) -} - -// CaptureMessageAndWait is identical to CaptureMessage except it blocks and waits for the message to be sent. -func (client *Client) CaptureMessageAndWait(message string, tags map[string]string, interfaces ...Interface) string { - if client == nil { - return "" - } - - if client.shouldExcludeErr(message) { - return "" - } - - packet := NewPacket(message, append(append(interfaces, client.context.interfaces()...), &Message{message, nil})...) - eventID, ch := client.Capture(packet, tags) - if eventID != "" { - <-ch - } - - return eventID -} - -// CaptureMessageAndWait is identical to CaptureMessage except it blocks and waits for the message to be sent. -func CaptureMessageAndWait(message string, tags map[string]string, interfaces ...Interface) string { - return DefaultClient.CaptureMessageAndWait(message, tags, interfaces...) -} - -// CaptureErrors formats and delivers an error to the Sentry server. -// Adds a stacktrace to the packet, excluding the call to this method. -func (client *Client) CaptureError(err error, tags map[string]string, interfaces ...Interface) string { - if client == nil { - return "" - } - - if err == nil { - return "" - } - - if client.shouldExcludeErr(err.Error()) { - return "" - } - - extra := extractExtra(err) - cause := pkgErrors.Cause(err) - - packet := NewPacketWithExtra(err.Error(), extra, append(append(interfaces, client.context.interfaces()...), NewException(cause, GetOrNewStacktrace(cause, 1, 3, client.includePaths)))...) - eventID, _ := client.Capture(packet, tags) - - return eventID -} - -// CaptureErrors formats and delivers an error to the Sentry server using the default *Client. -// Adds a stacktrace to the packet, excluding the call to this method. -func CaptureError(err error, tags map[string]string, interfaces ...Interface) string { - return DefaultClient.CaptureError(err, tags, interfaces...) -} - -// CaptureErrorAndWait is identical to CaptureError, except it blocks and assures that the event was sent -func (client *Client) CaptureErrorAndWait(err error, tags map[string]string, interfaces ...Interface) string { - if client == nil { - return "" - } - - if client.shouldExcludeErr(err.Error()) { - return "" - } - - extra := extractExtra(err) - cause := pkgErrors.Cause(err) - - packet := NewPacketWithExtra(err.Error(), extra, append(append(interfaces, client.context.interfaces()...), NewException(cause, GetOrNewStacktrace(cause, 1, 3, client.includePaths)))...) - eventID, ch := client.Capture(packet, tags) - if eventID != "" { - <-ch - } - - return eventID -} - -// CaptureErrorAndWait is identical to CaptureError, except it blocks and assures that the event was sent -func CaptureErrorAndWait(err error, tags map[string]string, interfaces ...Interface) string { - return DefaultClient.CaptureErrorAndWait(err, tags, interfaces...) -} - -// CapturePanic calls f and then recovers and reports a panic to the Sentry server if it occurs. -// If an error is captured, both the error and the reported Sentry error ID are returned. -func (client *Client) CapturePanic(f func(), tags map[string]string, interfaces ...Interface) (err interface{}, errorID string) { - // Note: This doesn't need to check for client, because we still want to go through the defer/recover path - // Down the line, Capture will be noop'd, so while this does a _tiny_ bit of overhead constructing the - // *Packet just to be thrown away, this should not be the normal case. Could be refactored to - // be completely noop though if we cared. - defer func() { - var packet *Packet - err = recover() - switch rval := err.(type) { - case nil: - return - case error: - if client.shouldExcludeErr(rval.Error()) { - return - } - packet = NewPacket(rval.Error(), append(append(interfaces, client.context.interfaces()...), NewException(rval, NewStacktrace(2, 3, client.includePaths)))...) - default: - rvalStr := fmt.Sprint(rval) - if client.shouldExcludeErr(rvalStr) { - return - } - packet = NewPacket(rvalStr, append(append(interfaces, client.context.interfaces()...), NewException(errors.New(rvalStr), NewStacktrace(2, 3, client.includePaths)))...) - } - - errorID, _ = client.Capture(packet, tags) - }() - - f() - return -} - -// CapturePanic calls f and then recovers and reports a panic to the Sentry server if it occurs. -// If an error is captured, both the error and the reported Sentry error ID are returned. -func CapturePanic(f func(), tags map[string]string, interfaces ...Interface) (interface{}, string) { - return DefaultClient.CapturePanic(f, tags, interfaces...) -} - -// CapturePanicAndWait is identical to CaptureError, except it blocks and assures that the event was sent -func (client *Client) CapturePanicAndWait(f func(), tags map[string]string, interfaces ...Interface) (err interface{}, errorID string) { - // Note: This doesn't need to check for client, because we still want to go through the defer/recover path - // Down the line, Capture will be noop'd, so while this does a _tiny_ bit of overhead constructing the - // *Packet just to be thrown away, this should not be the normal case. Could be refactored to - // be completely noop though if we cared. - defer func() { - var packet *Packet - err = recover() - switch rval := err.(type) { - case nil: - return - case error: - if client.shouldExcludeErr(rval.Error()) { - return - } - packet = NewPacket(rval.Error(), append(append(interfaces, client.context.interfaces()...), NewException(rval, NewStacktrace(2, 3, client.includePaths)))...) - default: - rvalStr := fmt.Sprint(rval) - if client.shouldExcludeErr(rvalStr) { - return - } - packet = NewPacket(rvalStr, append(append(interfaces, client.context.interfaces()...), NewException(errors.New(rvalStr), NewStacktrace(2, 3, client.includePaths)))...) - } - - var ch chan error - errorID, ch = client.Capture(packet, tags) - if errorID != "" { - <-ch - } - }() - - f() - return -} - -// CapturePanicAndWait is identical to CaptureError, except it blocks and assures that the event was sent -func CapturePanicAndWait(f func(), tags map[string]string, interfaces ...Interface) (interface{}, string) { - return DefaultClient.CapturePanicAndWait(f, tags, interfaces...) -} - -func (client *Client) Close() { - close(client.queue) -} - -func Close() { DefaultClient.Close() } - -// Wait blocks and waits for all events to finish being sent to Sentry server -func (client *Client) Wait() { - client.wg.Wait() -} - -// Wait blocks and waits for all events to finish being sent to Sentry server -func Wait() { DefaultClient.Wait() } - -func (client *Client) URL() string { - client.mu.RLock() - defer client.mu.RUnlock() - - return client.url -} - -func URL() string { return DefaultClient.URL() } - -func (client *Client) ProjectID() string { - client.mu.RLock() - defer client.mu.RUnlock() - - return client.projectID -} - -func ProjectID() string { return DefaultClient.ProjectID() } - -func (client *Client) Release() string { - client.mu.RLock() - defer client.mu.RUnlock() - - return client.release -} - -func Release() string { return DefaultClient.Release() } - -func IncludePaths() []string { return DefaultClient.IncludePaths() } - -func (client *Client) IncludePaths() []string { - client.mu.RLock() - defer client.mu.RUnlock() - - return client.includePaths -} - -func SetIncludePaths(p []string) { DefaultClient.SetIncludePaths(p) } - -func (client *Client) SetIncludePaths(p []string) { - client.mu.Lock() - defer client.mu.Unlock() - - client.includePaths = p -} - -func (c *Client) SetUserContext(u *User) { - c.mu.Lock() - defer c.mu.Unlock() - c.context.setUser(u) -} - -func (c *Client) SetHttpContext(h *Http) { - c.mu.Lock() - defer c.mu.Unlock() - c.context.setHttp(h) -} - -func (c *Client) SetTagsContext(t map[string]string) { - c.mu.Lock() - defer c.mu.Unlock() - c.context.setTags(t) -} - -func (c *Client) ClearContext() { - c.mu.Lock() - defer c.mu.Unlock() - c.context.clear() -} - -func SetUserContext(u *User) { DefaultClient.SetUserContext(u) } -func SetHttpContext(h *Http) { DefaultClient.SetHttpContext(h) } -func SetTagsContext(t map[string]string) { DefaultClient.SetTagsContext(t) } -func ClearContext() { DefaultClient.ClearContext() } - -// HTTPTransport is the default transport, delivering packets to Sentry via the -// HTTP API. -type HTTPTransport struct { - *http.Client -} - -func (t *HTTPTransport) Send(url, authHeader string, packet *Packet) error { - if url == "" { - return nil - } - - body, contentType, err := serializedPacket(packet) - if err != nil { - return fmt.Errorf("error serializing packet: %v", err) - } - req, err := http.NewRequest("POST", url, body) - if err != nil { - return fmt.Errorf("can't create new request: %v", err) - } - req.Header.Set("X-Sentry-Auth", authHeader) - req.Header.Set("User-Agent", userAgent) - req.Header.Set("Content-Type", contentType) - res, err := t.Do(req) - if err != nil { - return err - } - io.Copy(ioutil.Discard, res.Body) - res.Body.Close() - if res.StatusCode != 200 { - return fmt.Errorf("raven: got http status %d - x-sentry-error: %s", res.StatusCode, res.Header.Get("X-Sentry-Error")) - } - return nil -} - -func serializedPacket(packet *Packet) (io.Reader, string, error) { - packetJSON, err := packet.JSON() - if err != nil { - return nil, "", fmt.Errorf("error marshaling packet %+v to JSON: %v", packet, err) - } - - // Only deflate/base64 the packet if it is bigger than 1KB, as there is - // overhead. - if len(packetJSON) > 1000 { - buf := &bytes.Buffer{} - b64 := base64.NewEncoder(base64.StdEncoding, buf) - deflate, _ := zlib.NewWriterLevel(b64, zlib.BestCompression) - deflate.Write(packetJSON) - deflate.Close() - b64.Close() - return buf, "application/octet-stream", nil - } - return bytes.NewReader(packetJSON), "application/json", nil -} - -var hostname string - -func init() { - hostname, _ = os.Hostname() -} diff --git a/vendor/github.com/getsentry/raven-go/errors.go b/vendor/github.com/getsentry/raven-go/errors.go deleted file mode 100644 index 5e57270436d..00000000000 --- a/vendor/github.com/getsentry/raven-go/errors.go +++ /dev/null @@ -1,60 +0,0 @@ -package raven - -type causer interface { - Cause() error -} - -type errWrappedWithExtra struct { - err error - extraInfo map[string]interface{} -} - -func (ewx *errWrappedWithExtra) Error() string { - return ewx.err.Error() -} - -func (ewx *errWrappedWithExtra) Cause() error { - return ewx.err -} - -func (ewx *errWrappedWithExtra) ExtraInfo() Extra { - return ewx.extraInfo -} - -// Adds extra data to an error before reporting to Sentry -func WrapWithExtra(err error, extraInfo map[string]interface{}) error { - return &errWrappedWithExtra{ - err: err, - extraInfo: extraInfo, - } -} - -type ErrWithExtra interface { - Error() string - Cause() error - ExtraInfo() Extra -} - -// Iteratively fetches all the Extra data added to an error, -// and it's underlying errors. Extra data defined first is -// respected, and is not overridden when extracting. -func extractExtra(err error) Extra { - extra := Extra{} - - currentErr := err - for currentErr != nil { - if errWithExtra, ok := currentErr.(ErrWithExtra); ok { - for k, v := range errWithExtra.ExtraInfo() { - extra[k] = v - } - } - - if errWithCause, ok := currentErr.(causer); ok { - currentErr = errWithCause.Cause() - } else { - currentErr = nil - } - } - - return extra -} diff --git a/vendor/github.com/getsentry/raven-go/exception.go b/vendor/github.com/getsentry/raven-go/exception.go deleted file mode 100644 index 552eaad128c..00000000000 --- a/vendor/github.com/getsentry/raven-go/exception.go +++ /dev/null @@ -1,50 +0,0 @@ -package raven - -import ( - "reflect" - "regexp" -) - -var errorMsgPattern = regexp.MustCompile(`\A(\w+): (.+)\z`) - -func NewException(err error, stacktrace *Stacktrace) *Exception { - msg := err.Error() - ex := &Exception{ - Stacktrace: stacktrace, - Value: msg, - Type: reflect.TypeOf(err).String(), - } - if m := errorMsgPattern.FindStringSubmatch(msg); m != nil { - ex.Module, ex.Value = m[1], m[2] - } - return ex -} - -// https://docs.getsentry.com/hosted/clientdev/interfaces/#failure-interfaces -type Exception struct { - // Required - Value string `json:"value"` - - // Optional - Type string `json:"type,omitempty"` - Module string `json:"module,omitempty"` - Stacktrace *Stacktrace `json:"stacktrace,omitempty"` -} - -func (e *Exception) Class() string { return "exception" } - -func (e *Exception) Culprit() string { - if e.Stacktrace == nil { - return "" - } - return e.Stacktrace.Culprit() -} - -// Exceptions allows for chained errors -// https://docs.sentry.io/clientdev/interfaces/exception/ -type Exceptions struct { - // Required - Values []*Exception `json:"values"` -} - -func (es Exceptions) Class() string { return "exception" } diff --git a/vendor/github.com/getsentry/raven-go/http.go b/vendor/github.com/getsentry/raven-go/http.go deleted file mode 100644 index ae8f47234c1..00000000000 --- a/vendor/github.com/getsentry/raven-go/http.go +++ /dev/null @@ -1,99 +0,0 @@ -package raven - -import ( - "errors" - "fmt" - "net" - "net/http" - "net/url" - "runtime/debug" - "strings" -) - -func NewHttp(req *http.Request) *Http { - proto := "http" - if req.TLS != nil || req.Header.Get("X-Forwarded-Proto") == "https" { - proto = "https" - } - h := &Http{ - Method: req.Method, - Cookies: req.Header.Get("Cookie"), - Query: sanitizeQuery(req.URL.Query()).Encode(), - URL: proto + "://" + req.Host + req.URL.Path, - Headers: make(map[string]string, len(req.Header)), - } - if addr, port, err := net.SplitHostPort(req.RemoteAddr); err == nil { - h.Env = map[string]string{"REMOTE_ADDR": addr, "REMOTE_PORT": port} - } - for k, v := range req.Header { - h.Headers[k] = strings.Join(v, ",") - } - h.Headers["Host"] = req.Host - return h -} - -var querySecretFields = []string{"password", "passphrase", "passwd", "secret"} - -func sanitizeQuery(query url.Values) url.Values { - for _, keyword := range querySecretFields { - for field := range query { - if strings.Contains(field, keyword) { - query[field] = []string{"********"} - } - } - } - return query -} - -// https://docs.getsentry.com/hosted/clientdev/interfaces/#context-interfaces -type Http struct { - // Required - URL string `json:"url"` - Method string `json:"method"` - Query string `json:"query_string,omitempty"` - - // Optional - Cookies string `json:"cookies,omitempty"` - Headers map[string]string `json:"headers,omitempty"` - Env map[string]string `json:"env,omitempty"` - - // Must be either a string or map[string]string - Data interface{} `json:"data,omitempty"` -} - -func (h *Http) Class() string { return "request" } - -// Recovery handler to wrap the stdlib net/http Mux. -// Example: -// http.HandleFunc("/", raven.RecoveryHandler(func(w http.ResponseWriter, r *http.Request) { -// ... -// })) -func RecoveryHandler(handler func(http.ResponseWriter, *http.Request)) func(http.ResponseWriter, *http.Request) { - return Recoverer(http.HandlerFunc(handler)).ServeHTTP -} - -// Recovery handler to wrap the stdlib net/http Mux. -// Example: -// mux := http.NewServeMux -// ... -// http.Handle("/", raven.Recoverer(mux)) -func Recoverer(handler http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - defer func() { - if rval := recover(); rval != nil { - debug.PrintStack() - rvalStr := fmt.Sprint(rval) - var packet *Packet - if err, ok := rval.(error); ok { - packet = NewPacket(rvalStr, NewException(errors.New(rvalStr), GetOrNewStacktrace(err, 2, 3, nil)), NewHttp(r)) - } else { - packet = NewPacket(rvalStr, NewException(errors.New(rvalStr), NewStacktrace(2, 3, nil)), NewHttp(r)) - } - Capture(packet, nil) - w.WriteHeader(http.StatusInternalServerError) - } - }() - - handler.ServeHTTP(w, r) - }) -} diff --git a/vendor/github.com/getsentry/raven-go/interfaces.go b/vendor/github.com/getsentry/raven-go/interfaces.go deleted file mode 100644 index a05dc3de472..00000000000 --- a/vendor/github.com/getsentry/raven-go/interfaces.go +++ /dev/null @@ -1,49 +0,0 @@ -package raven - -// https://docs.getsentry.com/hosted/clientdev/interfaces/#message-interface -type Message struct { - // Required - Message string `json:"message"` - - // Optional - Params []interface{} `json:"params,omitempty"` -} - -func (m *Message) Class() string { return "logentry" } - -// https://docs.getsentry.com/hosted/clientdev/interfaces/#template-interface -type Template struct { - // Required - Filename string `json:"filename"` - Lineno int `json:"lineno"` - ContextLine string `json:"context_line"` - - // Optional - PreContext []string `json:"pre_context,omitempty"` - PostContext []string `json:"post_context,omitempty"` - AbsolutePath string `json:"abs_path,omitempty"` -} - -func (t *Template) Class() string { return "template" } - -// https://docs.getsentry.com/hosted/clientdev/interfaces/#context-interfaces -type User struct { - // All fields are optional - ID string `json:"id,omitempty"` - Username string `json:"username,omitempty"` - Email string `json:"email,omitempty"` - IP string `json:"ip_address,omitempty"` -} - -func (h *User) Class() string { return "user" } - -// https://docs.getsentry.com/hosted/clientdev/interfaces/#context-interfaces -type Query struct { - // Required - Query string `json:"query"` - - // Optional - Engine string `json:"engine,omitempty"` -} - -func (q *Query) Class() string { return "query" } diff --git a/vendor/github.com/getsentry/raven-go/runtests.sh b/vendor/github.com/getsentry/raven-go/runtests.sh deleted file mode 100644 index 9ed279c966e..00000000000 --- a/vendor/github.com/getsentry/raven-go/runtests.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -go test -race ./... -go test -cover ./... -go test -v ./... diff --git a/vendor/github.com/getsentry/raven-go/stacktrace.go b/vendor/github.com/getsentry/raven-go/stacktrace.go deleted file mode 100644 index bc302ba119f..00000000000 --- a/vendor/github.com/getsentry/raven-go/stacktrace.go +++ /dev/null @@ -1,277 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -// Some code from the runtime/debug package of the Go standard library. - -package raven - -import ( - "bytes" - "go/build" - "io/ioutil" - "path/filepath" - "runtime" - "strings" - "sync" - - "github.com/pkg/errors" -) - -// https://docs.getsentry.com/hosted/clientdev/interfaces/#failure-interfaces -type Stacktrace struct { - // Required - Frames []*StacktraceFrame `json:"frames"` -} - -func (s *Stacktrace) Class() string { return "stacktrace" } - -func (s *Stacktrace) Culprit() string { - for i := len(s.Frames) - 1; i >= 0; i-- { - frame := s.Frames[i] - if frame.InApp == true && frame.Module != "" && frame.Function != "" { - return frame.Module + "." + frame.Function - } - } - return "" -} - -type StacktraceFrame struct { - // At least one required - Filename string `json:"filename,omitempty"` - Function string `json:"function,omitempty"` - Module string `json:"module,omitempty"` - - // Optional - Lineno int `json:"lineno,omitempty"` - Colno int `json:"colno,omitempty"` - AbsolutePath string `json:"abs_path,omitempty"` - ContextLine string `json:"context_line,omitempty"` - PreContext []string `json:"pre_context,omitempty"` - PostContext []string `json:"post_context,omitempty"` - InApp bool `json:"in_app"` -} - -// Try to get stacktrace from err as an interface of github.com/pkg/errors, or else NewStacktrace() -func GetOrNewStacktrace(err error, skip int, context int, appPackagePrefixes []string) *Stacktrace { - stacktracer, errHasStacktrace := err.(interface { - StackTrace() errors.StackTrace - }) - if errHasStacktrace { - var frames []*StacktraceFrame - for _, f := range stacktracer.StackTrace() { - pc := uintptr(f) - 1 - fn := runtime.FuncForPC(pc) - var fName string - var file string - var line int - if fn != nil { - file, line = fn.FileLine(pc) - fName = fn.Name() - } else { - file = "unknown" - fName = "unknown" - } - frame := NewStacktraceFrame(pc, fName, file, line, context, appPackagePrefixes) - if frame != nil { - frames = append([]*StacktraceFrame{frame}, frames...) - } - } - return &Stacktrace{Frames: frames} - } else { - return NewStacktrace(skip+1, context, appPackagePrefixes) - } -} - -// Intialize and populate a new stacktrace, skipping skip frames. -// -// context is the number of surrounding lines that should be included for context. -// Setting context to 3 would try to get seven lines. Setting context to -1 returns -// one line with no surrounding context, and 0 returns no context. -// -// appPackagePrefixes is a list of prefixes used to check whether a package should -// be considered "in app". -func NewStacktrace(skip int, context int, appPackagePrefixes []string) *Stacktrace { - var frames []*StacktraceFrame - - callerPcs := make([]uintptr, 100) - numCallers := runtime.Callers(skip+2, callerPcs) - - // If there are no callers, the entire stacktrace is nil - if numCallers == 0 { - return nil - } - - callersFrames := runtime.CallersFrames(callerPcs) - - for { - fr, more := callersFrames.Next() - if fr.Func != nil { - frame := NewStacktraceFrame(fr.PC, fr.Function, fr.File, fr.Line, context, appPackagePrefixes) - if frame != nil { - frames = append(frames, frame) - } - } - if !more { - break - } - } - // If there are no frames, the entire stacktrace is nil - if len(frames) == 0 { - return nil - } - // Optimize the path where there's only 1 frame - if len(frames) == 1 { - return &Stacktrace{frames} - } - // Sentry wants the frames with the oldest first, so reverse them - for i, j := 0, len(frames)-1; i < j; i, j = i+1, j-1 { - frames[i], frames[j] = frames[j], frames[i] - } - return &Stacktrace{frames} -} - -// Build a single frame using data returned from runtime.Caller. -// -// context is the number of surrounding lines that should be included for context. -// Setting context to 3 would try to get seven lines. Setting context to -1 returns -// one line with no surrounding context, and 0 returns no context. -// -// appPackagePrefixes is a list of prefixes used to check whether a package should -// be considered "in app". -func NewStacktraceFrame(pc uintptr, fName, file string, line, context int, appPackagePrefixes []string) *StacktraceFrame { - frame := &StacktraceFrame{AbsolutePath: file, Filename: trimPath(file), Lineno: line, InApp: false} - frame.Module, frame.Function = functionName(fName) - - // `runtime.goexit` is effectively a placeholder that comes from - // runtime/asm_amd64.s and is meaningless. - if frame.Module == "runtime" && frame.Function == "goexit" { - return nil - } - - if frame.Module == "main" { - frame.InApp = true - } else { - for _, prefix := range appPackagePrefixes { - if strings.HasPrefix(frame.Module, prefix) && !strings.Contains(frame.Module, "vendor") && !strings.Contains(frame.Module, "third_party") { - frame.InApp = true - } - } - } - - if context > 0 { - contextLines, lineIdx := sourceCodeLoader.Load(file, line, context) - if len(contextLines) > 0 { - for i, line := range contextLines { - switch { - case i < lineIdx: - frame.PreContext = append(frame.PreContext, string(line)) - case i == lineIdx: - frame.ContextLine = string(line) - default: - frame.PostContext = append(frame.PostContext, string(line)) - } - } - } - } else if context == -1 { - contextLine, _ := sourceCodeLoader.Load(file, line, 0) - if len(contextLine) > 0 { - frame.ContextLine = string(contextLine[0]) - } - } - return frame -} - -// Retrieve the name of the package and function containing the PC. -func functionName(fName string) (pack string, name string) { - name = fName - // We get this: - // runtime/debug.*T·ptrmethod - // and want this: - // pack = runtime/debug - // name = *T.ptrmethod - if idx := strings.LastIndex(name, "."); idx != -1 { - pack = name[:idx] - name = name[idx+1:] - } - name = strings.Replace(name, "·", ".", -1) - return -} - -type SourceCodeLoader interface { - Load(filename string, line, context int) ([][]byte, int) -} - -var sourceCodeLoader SourceCodeLoader = &fsLoader{cache: make(map[string][][]byte)} - -func SetSourceCodeLoader(loader SourceCodeLoader) { - sourceCodeLoader = loader -} - -type fsLoader struct { - mu sync.Mutex - cache map[string][][]byte -} - -func (fs *fsLoader) Load(filename string, line, context int) ([][]byte, int) { - fs.mu.Lock() - defer fs.mu.Unlock() - lines, ok := fs.cache[filename] - if !ok { - data, err := ioutil.ReadFile(filename) - if err != nil { - // cache errors as nil slice: code below handles it correctly - // otherwise when missing the source or running as a different user, we try - // reading the file on each error which is unnecessary - fs.cache[filename] = nil - return nil, 0 - } - lines = bytes.Split(data, []byte{'\n'}) - fs.cache[filename] = lines - } - - if lines == nil { - // cached error from ReadFile: return no lines - return nil, 0 - } - - line-- // stack trace lines are 1-indexed - start := line - context - var idx int - if start < 0 { - start = 0 - idx = line - } else { - idx = context - } - end := line + context + 1 - if line >= len(lines) { - return nil, 0 - } - if end > len(lines) { - end = len(lines) - } - return lines[start:end], idx -} - -var trimPaths []string - -// Try to trim the GOROOT or GOPATH prefix off of a filename -func trimPath(filename string) string { - for _, prefix := range trimPaths { - if trimmed := strings.TrimPrefix(filename, prefix); len(trimmed) < len(filename) { - return trimmed - } - } - return filename -} - -func init() { - // Collect all source directories, and make sure they - // end in a trailing "separator" - for _, prefix := range build.Default.SrcDirs() { - if prefix[len(prefix)-1] != filepath.Separator { - prefix += string(filepath.Separator) - } - trimPaths = append(trimPaths, prefix) - } -} diff --git a/vendor/github.com/getsentry/raven-go/writer.go b/vendor/github.com/getsentry/raven-go/writer.go deleted file mode 100644 index 61f7a91088e..00000000000 --- a/vendor/github.com/getsentry/raven-go/writer.go +++ /dev/null @@ -1,20 +0,0 @@ -package raven - -type Writer struct { - Client *Client - Level Severity - Logger string // Logger name reported to Sentry -} - -// Write formats the byte slice p into a string, and sends a message to -// Sentry at the severity level indicated by the Writer w. -func (w *Writer) Write(p []byte) (int, error) { - message := string(p) - - packet := NewPacket(message, &Message{message, nil}) - packet.Level = w.Level - packet.Logger = w.Logger - w.Client.Capture(packet, nil) - - return len(p), nil -} diff --git a/vendor/github.com/go-redis/redis/v8/.gitignore b/vendor/github.com/go-redis/redis/v8/.gitignore deleted file mode 100644 index b975a7b4c32..00000000000 --- a/vendor/github.com/go-redis/redis/v8/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*.rdb -testdata/*/ -.idea/ diff --git a/vendor/github.com/go-redis/redis/v8/.golangci.yml b/vendor/github.com/go-redis/redis/v8/.golangci.yml deleted file mode 100644 index 2132eee96f2..00000000000 --- a/vendor/github.com/go-redis/redis/v8/.golangci.yml +++ /dev/null @@ -1,21 +0,0 @@ -run: - concurrency: 8 - deadline: 5m - tests: false -linters: - enable-all: true - disable: - - funlen - - gochecknoglobals - - gochecknoinits - - gocognit - - goconst - - godox - - gosec - - maligned - - wsl - - gomnd - - goerr113 - - exhaustive - - gofumpt - - nestif diff --git a/vendor/github.com/go-redis/redis/v8/.prettierrc b/vendor/github.com/go-redis/redis/v8/.prettierrc deleted file mode 100644 index 8b7f044ad1f..00000000000 --- a/vendor/github.com/go-redis/redis/v8/.prettierrc +++ /dev/null @@ -1,4 +0,0 @@ -semi: false -singleQuote: true -proseWrap: always -printWidth: 100 diff --git a/vendor/github.com/go-redis/redis/v8/.travis.yml b/vendor/github.com/go-redis/redis/v8/.travis.yml deleted file mode 100644 index adedd8df4e4..00000000000 --- a/vendor/github.com/go-redis/redis/v8/.travis.yml +++ /dev/null @@ -1,20 +0,0 @@ -dist: xenial -language: go - -services: - - redis-server - -go: - - 1.14.x - - 1.15.x - - tip - -matrix: - allow_failures: - - go: tip - -go_import_path: github.com/go-redis/redis - -before_install: - - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- - -b $(go env GOPATH)/bin v1.28.3 diff --git a/vendor/github.com/go-redis/redis/v8/CHANGELOG.md b/vendor/github.com/go-redis/redis/v8/CHANGELOG.md deleted file mode 100644 index d0c4c8c1cb3..00000000000 --- a/vendor/github.com/go-redis/redis/v8/CHANGELOG.md +++ /dev/null @@ -1,97 +0,0 @@ -# Changelog - -> :heart: [**Uptrace.dev** - distributed traces, logs, and errors in one place](https://uptrace.dev) - -## v8 - -- Documentation at https://redis.uptrace.dev/ - -- All commands require `context.Context` as a first argument, e.g. `rdb.Ping(ctx)`. If you are not - using `context.Context` yet, the simplest option is to define global package variable - `var ctx = context.TODO()` and use it when `ctx` is required. - -- Full support for `context.Context` canceling. - -- Added `redis.NewFailoverClusterClient` that supports routing read-only commands to a slave node. - -- Added `redisext.OpenTemetryHook` that adds - [Redis OpenTelemetry instrumentation](https://redis.uptrace.dev/tracing/). - -- Redis slow log support. - -- Ring uses Rendezvous Hashing by default which provides better distribution. You need to move - existing keys to a new location or keys will be inaccessible / lost. To use old hashing scheme: - -```go -import "github.com/golang/groupcache/consistenthash" - -ring := redis.NewRing(&redis.RingOptions{ - NewConsistentHash: func() { - return consistenthash.New(100, crc32.ChecksumIEEE) - }, -}) -``` - -- `ClusterOptions.MaxRedirects` default value is changed from 8 to 3. -- `Options.MaxRetries` default value is changed from 0 to 3. - -- `Cluster.ForEachNode` is renamed to `ForEachShard` for consistency with `Ring`. - -## v7.3 - -- New option `Options.Username` which causes client to use `AuthACL`. Be aware if your connection - URL contains username. - -## v7.2 - -- Existing `HMSet` is renamed to `HSet` and old deprecated `HMSet` is restored for Redis 3 users. - -## v7.1 - -- Existing `Cmd.String` is renamed to `Cmd.Text`. New `Cmd.String` implements `fmt.Stringer` - interface. - -## v7 - -- _Important_. Tx.Pipeline now returns a non-transactional pipeline. Use Tx.TxPipeline for a - transactional pipeline. -- WrapProcess is replaced with more convenient AddHook that has access to context.Context. -- WithContext now can not be used to create a shallow copy of the client. -- New methods ProcessContext, DoContext, and ExecContext. -- Client respects Context.Deadline when setting net.Conn deadline. -- Client listens on Context.Done while waiting for a connection from the pool and returns an error - when context context is cancelled. -- Add PubSub.ChannelWithSubscriptions that sends `*Subscription` in addition to `*Message` to allow - detecting reconnections. -- `time.Time` is now marshalled in RFC3339 format. `rdb.Get("foo").Time()` helper is added to parse - the time. -- `SetLimiter` is removed and added `Options.Limiter` instead. -- `HMSet` is deprecated as of Redis v4. - -## v6.15 - -- Cluster and Ring pipelines process commands for each node in its own goroutine. - -## 6.14 - -- Added Options.MinIdleConns. -- Added Options.MaxConnAge. -- PoolStats.FreeConns is renamed to PoolStats.IdleConns. -- Add Client.Do to simplify creating custom commands. -- Add Cmd.String, Cmd.Int, Cmd.Int64, Cmd.Uint64, Cmd.Float64, and Cmd.Bool helpers. -- Lower memory usage. - -## v6.13 - -- Ring got new options called `HashReplicas` and `Hash`. It is recommended to set - `HashReplicas = 1000` for better keys distribution between shards. -- Cluster client was optimized to use much less memory when reloading cluster state. -- PubSub.ReceiveMessage is re-worked to not use ReceiveTimeout so it does not lose data when timeout - occurres. In most cases it is recommended to use PubSub.Channel instead. -- Dialer.KeepAlive is set to 5 minutes by default. - -## v6.12 - -- ClusterClient got new option called `ClusterSlots` which allows to build cluster of normal Redis - Servers that don't have cluster mode enabled. See - https://godoc.org/github.com/go-redis/redis#example-NewClusterClient--ManualSetup diff --git a/vendor/github.com/go-redis/redis/v8/LICENSE b/vendor/github.com/go-redis/redis/v8/LICENSE deleted file mode 100644 index 298bed9beaf..00000000000 --- a/vendor/github.com/go-redis/redis/v8/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2013 The github.com/go-redis/redis Authors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/go-redis/redis/v8/Makefile b/vendor/github.com/go-redis/redis/v8/Makefile deleted file mode 100644 index 49e4c96f068..00000000000 --- a/vendor/github.com/go-redis/redis/v8/Makefile +++ /dev/null @@ -1,21 +0,0 @@ -all: testdeps - go test ./... - go test ./... -short -race - go test ./... -run=NONE -bench=. -benchmem - env GOOS=linux GOARCH=386 go test ./... - go vet - golangci-lint run - -testdeps: testdata/redis/src/redis-server - -bench: testdeps - go test ./... -test.run=NONE -test.bench=. -test.benchmem - -.PHONY: all test testdeps bench - -testdata/redis: - mkdir -p $@ - wget -qO- http://download.redis.io/redis-stable.tar.gz | tar xvz --strip-components=1 -C $@ - -testdata/redis/src/redis-server: testdata/redis - cd $< && make all diff --git a/vendor/github.com/go-redis/redis/v8/README.md b/vendor/github.com/go-redis/redis/v8/README.md deleted file mode 100644 index 28c1f0b2de1..00000000000 --- a/vendor/github.com/go-redis/redis/v8/README.md +++ /dev/null @@ -1,138 +0,0 @@ -# Redis client for Golang - -[![Build Status](https://travis-ci.org/go-redis/redis.png?branch=master)](https://travis-ci.org/go-redis/redis) -[![PkgGoDev](https://pkg.go.dev/badge/github.com/go-redis/redis/v8)](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc) -[![Documentation](https://img.shields.io/badge/redis-documentation-informational)](https://redis.uptrace.dev/) -[![Chat](https://discordapp.com/api/guilds/752070105847955518/widget.png)](https://discord.gg/rWtp5Aj) - -> :heart: [**Uptrace.dev** - distributed traces, logs, and errors in one place](https://uptrace.dev) - -- Join [Discord](https://discord.gg/rWtp5Aj) to ask questions. -- [Documentation](https://redis.uptrace.dev) -- [Reference](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc) -- [Examples](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#pkg-examples) -- [RealWorld example app](https://github.com/uptrace/go-treemux-realworld-example-app) - -## Ecosystem - -- [redisext](https://github.com/go-redis/redisext) - tracing using OpenTelemetry and OpenCensus. -- [Distributed Locks](https://github.com/bsm/redislock). -- [Redis Cache](https://github.com/go-redis/cache). -- [Rate limiting](https://github.com/go-redis/redis_rate). - -## Features - -- Redis 3 commands except QUIT, MONITOR, and SYNC. -- Automatic connection pooling with - [circuit breaker](https://en.wikipedia.org/wiki/Circuit_breaker_design_pattern) support. -- [Pub/Sub](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#PubSub). -- [Transactions](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-Client-TxPipeline). -- [Pipeline](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-Client-Pipeline) and - [TxPipeline](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-Client-TxPipeline). -- [Scripting](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#Script). -- [Timeouts](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#Options). -- [Redis Sentinel](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#NewFailoverClient). -- [Redis Cluster](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#NewClusterClient). -- [Cluster of Redis Servers](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-NewClusterClient--ManualSetup) - without using cluster mode and Redis Sentinel. -- [Ring](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#NewRing). -- [Instrumentation](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#ex-package--Instrumentation). - -## Installation - -go-redis supports 2 last Go versions and requires a Go version with -[modules](https://github.com/golang/go/wiki/Modules) support. So make sure to initialize a Go -module: - -```shell -go mod init github.com/my/repo -``` - -And then install go-redis (note _v8_ in the import; omitting it is a popular mistake): - -```shell -go get github.com/go-redis/redis/v8 -``` - -## Quickstart - -```go -import ( - "context" - "github.com/go-redis/redis/v8" -) - -var ctx = context.Background() - -func ExampleClient() { - rdb := redis.NewClient(&redis.Options{ - Addr: "localhost:6379", - Password: "", // no password set - DB: 0, // use default DB - }) - - err := rdb.Set(ctx, "key", "value", 0).Err() - if err != nil { - panic(err) - } - - val, err := rdb.Get(ctx, "key").Result() - if err != nil { - panic(err) - } - fmt.Println("key", val) - - val2, err := rdb.Get(ctx, "key2").Result() - if err == redis.Nil { - fmt.Println("key2 does not exist") - } else if err != nil { - panic(err) - } else { - fmt.Println("key2", val2) - } - // Output: key value - // key2 does not exist -} -``` - -## Look and feel - -Some corner cases: - -```go -// SET key value EX 10 NX -set, err := rdb.SetNX(ctx, "key", "value", 10*time.Second).Result() - -// SET key value keepttl NX -set, err := rdb.SetNX(ctx, "key", "value", redis.KeepTTL).Result() - -// SORT list LIMIT 0 2 ASC -vals, err := rdb.Sort(ctx, "list", &redis.Sort{Offset: 0, Count: 2, Order: "ASC"}).Result() - -// ZRANGEBYSCORE zset -inf +inf WITHSCORES LIMIT 0 2 -vals, err := rdb.ZRangeByScoreWithScores(ctx, "zset", &redis.ZRangeBy{ - Min: "-inf", - Max: "+inf", - Offset: 0, - Count: 2, -}).Result() - -// ZINTERSTORE out 2 zset1 zset2 WEIGHTS 2 3 AGGREGATE SUM -vals, err := rdb.ZInterStore(ctx, "out", &redis.ZStore{ - Keys: []string{"zset1", "zset2"}, - Weights: []int64{2, 3} -}).Result() - -// EVAL "return {KEYS[1],ARGV[1]}" 1 "key" "hello" -vals, err := rdb.Eval(ctx, "return {KEYS[1],ARGV[1]}", []string{"key"}, "hello").Result() - -// custom command -res, err := rdb.Do(ctx, "set", "key", "value").Result() -``` - -## See also - -- [Fast and flexible HTTP router](https://github.com/vmihailenco/treemux) -- [Golang PostgreSQL ORM](https://github.com/go-pg/pg) -- [Golang msgpack](https://github.com/vmihailenco/msgpack) -- [Golang message task queue](https://github.com/vmihailenco/taskq) diff --git a/vendor/github.com/go-redis/redis/v8/cluster.go b/vendor/github.com/go-redis/redis/v8/cluster.go deleted file mode 100644 index a6ce5c58462..00000000000 --- a/vendor/github.com/go-redis/redis/v8/cluster.go +++ /dev/null @@ -1,1697 +0,0 @@ -package redis - -import ( - "context" - "crypto/tls" - "fmt" - "math" - "net" - "runtime" - "sort" - "sync" - "sync/atomic" - "time" - - "github.com/go-redis/redis/v8/internal" - "github.com/go-redis/redis/v8/internal/hashtag" - "github.com/go-redis/redis/v8/internal/pool" - "github.com/go-redis/redis/v8/internal/proto" - "github.com/go-redis/redis/v8/internal/rand" -) - -var errClusterNoNodes = fmt.Errorf("redis: cluster has no nodes") - -// ClusterOptions are used to configure a cluster client and should be -// passed to NewClusterClient. -type ClusterOptions struct { - // A seed list of host:port addresses of cluster nodes. - Addrs []string - - // NewClient creates a cluster node client with provided name and options. - NewClient func(opt *Options) *Client - - // The maximum number of retries before giving up. Command is retried - // on network errors and MOVED/ASK redirects. - // Default is 3 retries. - MaxRedirects int - - // Enables read-only commands on slave nodes. - ReadOnly bool - // Allows routing read-only commands to the closest master or slave node. - // It automatically enables ReadOnly. - RouteByLatency bool - // Allows routing read-only commands to the random master or slave node. - // It automatically enables ReadOnly. - RouteRandomly bool - - // Optional function that returns cluster slots information. - // It is useful to manually create cluster of standalone Redis servers - // and load-balance read/write operations between master and slaves. - // It can use service like ZooKeeper to maintain configuration information - // and Cluster.ReloadState to manually trigger state reloading. - ClusterSlots func(context.Context) ([]ClusterSlot, error) - - // Following options are copied from Options struct. - - Dialer func(ctx context.Context, network, addr string) (net.Conn, error) - - OnConnect func(ctx context.Context, cn *Conn) error - - Username string - Password string - - MaxRetries int - MinRetryBackoff time.Duration - MaxRetryBackoff time.Duration - - DialTimeout time.Duration - ReadTimeout time.Duration - WriteTimeout time.Duration - - // PoolSize applies per cluster node and not for the whole cluster. - PoolSize int - MinIdleConns int - MaxConnAge time.Duration - PoolTimeout time.Duration - IdleTimeout time.Duration - IdleCheckFrequency time.Duration - - TLSConfig *tls.Config -} - -func (opt *ClusterOptions) init() { - if opt.MaxRedirects == -1 { - opt.MaxRedirects = 0 - } else if opt.MaxRedirects == 0 { - opt.MaxRedirects = 3 - } - - if (opt.RouteByLatency || opt.RouteRandomly) && opt.ClusterSlots == nil { - opt.ReadOnly = true - } - - if opt.PoolSize == 0 { - opt.PoolSize = 5 * runtime.NumCPU() - } - - switch opt.ReadTimeout { - case -1: - opt.ReadTimeout = 0 - case 0: - opt.ReadTimeout = 3 * time.Second - } - switch opt.WriteTimeout { - case -1: - opt.WriteTimeout = 0 - case 0: - opt.WriteTimeout = opt.ReadTimeout - } - - if opt.MaxRetries == 0 { - opt.MaxRetries = -1 - } - switch opt.MinRetryBackoff { - case -1: - opt.MinRetryBackoff = 0 - case 0: - opt.MinRetryBackoff = 8 * time.Millisecond - } - switch opt.MaxRetryBackoff { - case -1: - opt.MaxRetryBackoff = 0 - case 0: - opt.MaxRetryBackoff = 512 * time.Millisecond - } - - if opt.NewClient == nil { - opt.NewClient = NewClient - } -} - -func (opt *ClusterOptions) clientOptions() *Options { - const disableIdleCheck = -1 - - return &Options{ - Dialer: opt.Dialer, - OnConnect: opt.OnConnect, - - Username: opt.Username, - Password: opt.Password, - - MaxRetries: opt.MaxRetries, - MinRetryBackoff: opt.MinRetryBackoff, - MaxRetryBackoff: opt.MaxRetryBackoff, - - DialTimeout: opt.DialTimeout, - ReadTimeout: opt.ReadTimeout, - WriteTimeout: opt.WriteTimeout, - - PoolSize: opt.PoolSize, - MinIdleConns: opt.MinIdleConns, - MaxConnAge: opt.MaxConnAge, - PoolTimeout: opt.PoolTimeout, - IdleTimeout: opt.IdleTimeout, - IdleCheckFrequency: disableIdleCheck, - - readOnly: opt.ReadOnly, - - TLSConfig: opt.TLSConfig, - } -} - -//------------------------------------------------------------------------------ - -type clusterNode struct { - Client *Client - - latency uint32 // atomic - generation uint32 // atomic - failing uint32 // atomic -} - -func newClusterNode(clOpt *ClusterOptions, addr string) *clusterNode { - opt := clOpt.clientOptions() - opt.Addr = addr - node := clusterNode{ - Client: clOpt.NewClient(opt), - } - - node.latency = math.MaxUint32 - if clOpt.RouteByLatency { - go node.updateLatency() - } - - return &node -} - -func (n *clusterNode) String() string { - return n.Client.String() -} - -func (n *clusterNode) Close() error { - return n.Client.Close() -} - -func (n *clusterNode) updateLatency() { - const numProbe = 10 - var dur uint64 - - for i := 0; i < numProbe; i++ { - time.Sleep(time.Duration(10+rand.Intn(10)) * time.Millisecond) - - start := time.Now() - n.Client.Ping(context.TODO()) - dur += uint64(time.Since(start) / time.Microsecond) - } - - latency := float64(dur) / float64(numProbe) - atomic.StoreUint32(&n.latency, uint32(latency+0.5)) -} - -func (n *clusterNode) Latency() time.Duration { - latency := atomic.LoadUint32(&n.latency) - return time.Duration(latency) * time.Microsecond -} - -func (n *clusterNode) MarkAsFailing() { - atomic.StoreUint32(&n.failing, uint32(time.Now().Unix())) -} - -func (n *clusterNode) Failing() bool { - const timeout = 15 // 15 seconds - - failing := atomic.LoadUint32(&n.failing) - if failing == 0 { - return false - } - if time.Now().Unix()-int64(failing) < timeout { - return true - } - atomic.StoreUint32(&n.failing, 0) - return false -} - -func (n *clusterNode) Generation() uint32 { - return atomic.LoadUint32(&n.generation) -} - -func (n *clusterNode) SetGeneration(gen uint32) { - for { - v := atomic.LoadUint32(&n.generation) - if gen < v || atomic.CompareAndSwapUint32(&n.generation, v, gen) { - break - } - } -} - -//------------------------------------------------------------------------------ - -type clusterNodes struct { - opt *ClusterOptions - - mu sync.RWMutex - addrs []string - nodes map[string]*clusterNode - activeAddrs []string - closed bool - - _generation uint32 // atomic -} - -func newClusterNodes(opt *ClusterOptions) *clusterNodes { - return &clusterNodes{ - opt: opt, - - addrs: opt.Addrs, - nodes: make(map[string]*clusterNode), - } -} - -func (c *clusterNodes) Close() error { - c.mu.Lock() - defer c.mu.Unlock() - - if c.closed { - return nil - } - c.closed = true - - var firstErr error - for _, node := range c.nodes { - if err := node.Client.Close(); err != nil && firstErr == nil { - firstErr = err - } - } - - c.nodes = nil - c.activeAddrs = nil - - return firstErr -} - -func (c *clusterNodes) Addrs() ([]string, error) { - var addrs []string - c.mu.RLock() - closed := c.closed - if !closed { - if len(c.activeAddrs) > 0 { - addrs = c.activeAddrs - } else { - addrs = c.addrs - } - } - c.mu.RUnlock() - - if closed { - return nil, pool.ErrClosed - } - if len(addrs) == 0 { - return nil, errClusterNoNodes - } - return addrs, nil -} - -func (c *clusterNodes) NextGeneration() uint32 { - return atomic.AddUint32(&c._generation, 1) -} - -// GC removes unused nodes. -func (c *clusterNodes) GC(generation uint32) { - //nolint:prealloc - var collected []*clusterNode - - c.mu.Lock() - - c.activeAddrs = c.activeAddrs[:0] - for addr, node := range c.nodes { - if node.Generation() >= generation { - c.activeAddrs = append(c.activeAddrs, addr) - if c.opt.RouteByLatency { - go node.updateLatency() - } - continue - } - - delete(c.nodes, addr) - collected = append(collected, node) - } - - c.mu.Unlock() - - for _, node := range collected { - _ = node.Client.Close() - } -} - -func (c *clusterNodes) Get(addr string) (*clusterNode, error) { - node, err := c.get(addr) - if err != nil { - return nil, err - } - if node != nil { - return node, nil - } - - c.mu.Lock() - defer c.mu.Unlock() - - if c.closed { - return nil, pool.ErrClosed - } - - node, ok := c.nodes[addr] - if ok { - return node, nil - } - - node = newClusterNode(c.opt, addr) - - c.addrs = appendIfNotExists(c.addrs, addr) - c.nodes[addr] = node - - return node, nil -} - -func (c *clusterNodes) get(addr string) (*clusterNode, error) { - var node *clusterNode - var err error - c.mu.RLock() - if c.closed { - err = pool.ErrClosed - } else { - node = c.nodes[addr] - } - c.mu.RUnlock() - return node, err -} - -func (c *clusterNodes) All() ([]*clusterNode, error) { - c.mu.RLock() - defer c.mu.RUnlock() - - if c.closed { - return nil, pool.ErrClosed - } - - cp := make([]*clusterNode, 0, len(c.nodes)) - for _, node := range c.nodes { - cp = append(cp, node) - } - return cp, nil -} - -func (c *clusterNodes) Random() (*clusterNode, error) { - addrs, err := c.Addrs() - if err != nil { - return nil, err - } - - n := rand.Intn(len(addrs)) - return c.Get(addrs[n]) -} - -//------------------------------------------------------------------------------ - -type clusterSlot struct { - start, end int - nodes []*clusterNode -} - -type clusterSlotSlice []*clusterSlot - -func (p clusterSlotSlice) Len() int { - return len(p) -} - -func (p clusterSlotSlice) Less(i, j int) bool { - return p[i].start < p[j].start -} - -func (p clusterSlotSlice) Swap(i, j int) { - p[i], p[j] = p[j], p[i] -} - -type clusterState struct { - nodes *clusterNodes - Masters []*clusterNode - Slaves []*clusterNode - - slots []*clusterSlot - - generation uint32 - createdAt time.Time -} - -func newClusterState( - nodes *clusterNodes, slots []ClusterSlot, origin string, -) (*clusterState, error) { - c := clusterState{ - nodes: nodes, - - slots: make([]*clusterSlot, 0, len(slots)), - - generation: nodes.NextGeneration(), - createdAt: time.Now(), - } - - originHost, _, _ := net.SplitHostPort(origin) - isLoopbackOrigin := isLoopback(originHost) - - for _, slot := range slots { - var nodes []*clusterNode - for i, slotNode := range slot.Nodes { - addr := slotNode.Addr - if !isLoopbackOrigin { - addr = replaceLoopbackHost(addr, originHost) - } - - node, err := c.nodes.Get(addr) - if err != nil { - return nil, err - } - - node.SetGeneration(c.generation) - nodes = append(nodes, node) - - if i == 0 { - c.Masters = appendUniqueNode(c.Masters, node) - } else { - c.Slaves = appendUniqueNode(c.Slaves, node) - } - } - - c.slots = append(c.slots, &clusterSlot{ - start: slot.Start, - end: slot.End, - nodes: nodes, - }) - } - - sort.Sort(clusterSlotSlice(c.slots)) - - time.AfterFunc(time.Minute, func() { - nodes.GC(c.generation) - }) - - return &c, nil -} - -func replaceLoopbackHost(nodeAddr, originHost string) string { - nodeHost, nodePort, err := net.SplitHostPort(nodeAddr) - if err != nil { - return nodeAddr - } - - nodeIP := net.ParseIP(nodeHost) - if nodeIP == nil { - return nodeAddr - } - - if !nodeIP.IsLoopback() { - return nodeAddr - } - - // Use origin host which is not loopback and node port. - return net.JoinHostPort(originHost, nodePort) -} - -func isLoopback(host string) bool { - ip := net.ParseIP(host) - if ip == nil { - return true - } - return ip.IsLoopback() -} - -func (c *clusterState) slotMasterNode(slot int) (*clusterNode, error) { - nodes := c.slotNodes(slot) - if len(nodes) > 0 { - return nodes[0], nil - } - return c.nodes.Random() -} - -func (c *clusterState) slotSlaveNode(slot int) (*clusterNode, error) { - nodes := c.slotNodes(slot) - switch len(nodes) { - case 0: - return c.nodes.Random() - case 1: - return nodes[0], nil - case 2: - if slave := nodes[1]; !slave.Failing() { - return slave, nil - } - return nodes[0], nil - default: - var slave *clusterNode - for i := 0; i < 10; i++ { - n := rand.Intn(len(nodes)-1) + 1 - slave = nodes[n] - if !slave.Failing() { - return slave, nil - } - } - - // All slaves are loading - use master. - return nodes[0], nil - } -} - -func (c *clusterState) slotClosestNode(slot int) (*clusterNode, error) { - nodes := c.slotNodes(slot) - if len(nodes) == 0 { - return c.nodes.Random() - } - - var node *clusterNode - for _, n := range nodes { - if n.Failing() { - continue - } - if node == nil || n.Latency() < node.Latency() { - node = n - } - } - if node != nil { - return node, nil - } - - // If all nodes are failing - return random node - return c.nodes.Random() -} - -func (c *clusterState) slotRandomNode(slot int) (*clusterNode, error) { - nodes := c.slotNodes(slot) - if len(nodes) == 0 { - return c.nodes.Random() - } - n := rand.Intn(len(nodes)) - return nodes[n], nil -} - -func (c *clusterState) slotNodes(slot int) []*clusterNode { - i := sort.Search(len(c.slots), func(i int) bool { - return c.slots[i].end >= slot - }) - if i >= len(c.slots) { - return nil - } - x := c.slots[i] - if slot >= x.start && slot <= x.end { - return x.nodes - } - return nil -} - -//------------------------------------------------------------------------------ - -type clusterStateHolder struct { - load func(ctx context.Context) (*clusterState, error) - - state atomic.Value - reloading uint32 // atomic -} - -func newClusterStateHolder(fn func(ctx context.Context) (*clusterState, error)) *clusterStateHolder { - return &clusterStateHolder{ - load: fn, - } -} - -func (c *clusterStateHolder) Reload(ctx context.Context) (*clusterState, error) { - state, err := c.load(ctx) - if err != nil { - return nil, err - } - c.state.Store(state) - return state, nil -} - -func (c *clusterStateHolder) LazyReload(ctx context.Context) { - if !atomic.CompareAndSwapUint32(&c.reloading, 0, 1) { - return - } - go func() { - defer atomic.StoreUint32(&c.reloading, 0) - - _, err := c.Reload(ctx) - if err != nil { - return - } - time.Sleep(200 * time.Millisecond) - }() -} - -func (c *clusterStateHolder) Get(ctx context.Context) (*clusterState, error) { - v := c.state.Load() - if v != nil { - state := v.(*clusterState) - if time.Since(state.createdAt) > 10*time.Second { - c.LazyReload(ctx) - } - return state, nil - } - return c.Reload(ctx) -} - -func (c *clusterStateHolder) ReloadOrGet(ctx context.Context) (*clusterState, error) { - state, err := c.Reload(ctx) - if err == nil { - return state, nil - } - return c.Get(ctx) -} - -//------------------------------------------------------------------------------ - -type clusterClient struct { - opt *ClusterOptions - nodes *clusterNodes - state *clusterStateHolder //nolint:structcheck - cmdsInfoCache *cmdsInfoCache //nolint:structcheck -} - -// ClusterClient is a Redis Cluster client representing a pool of zero -// or more underlying connections. It's safe for concurrent use by -// multiple goroutines. -type ClusterClient struct { - *clusterClient - cmdable - hooks - ctx context.Context -} - -// NewClusterClient returns a Redis Cluster client as described in -// http://redis.io/topics/cluster-spec. -func NewClusterClient(opt *ClusterOptions) *ClusterClient { - opt.init() - - c := &ClusterClient{ - clusterClient: &clusterClient{ - opt: opt, - nodes: newClusterNodes(opt), - }, - ctx: context.Background(), - } - c.state = newClusterStateHolder(c.loadState) - c.cmdsInfoCache = newCmdsInfoCache(c.cmdsInfo) - c.cmdable = c.Process - - if opt.IdleCheckFrequency > 0 { - go c.reaper(opt.IdleCheckFrequency) - } - - return c -} - -func (c *ClusterClient) Context() context.Context { - return c.ctx -} - -func (c *ClusterClient) WithContext(ctx context.Context) *ClusterClient { - if ctx == nil { - panic("nil context") - } - clone := *c - clone.cmdable = clone.Process - clone.hooks.lock() - clone.ctx = ctx - return &clone -} - -// Options returns read-only Options that were used to create the client. -func (c *ClusterClient) Options() *ClusterOptions { - return c.opt -} - -// ReloadState reloads cluster state. If available it calls ClusterSlots func -// to get cluster slots information. -func (c *ClusterClient) ReloadState(ctx context.Context) { - c.state.LazyReload(ctx) -} - -// Close closes the cluster client, releasing any open resources. -// -// It is rare to Close a ClusterClient, as the ClusterClient is meant -// to be long-lived and shared between many goroutines. -func (c *ClusterClient) Close() error { - return c.nodes.Close() -} - -// Do creates a Cmd from the args and processes the cmd. -func (c *ClusterClient) Do(ctx context.Context, args ...interface{}) *Cmd { - cmd := NewCmd(ctx, args...) - _ = c.Process(ctx, cmd) - return cmd -} - -func (c *ClusterClient) Process(ctx context.Context, cmd Cmder) error { - return c.hooks.process(ctx, cmd, c.process) -} - -func (c *ClusterClient) process(ctx context.Context, cmd Cmder) error { - cmdInfo := c.cmdInfo(cmd.Name()) - slot := c.cmdSlot(cmd) - - var node *clusterNode - var ask bool - var lastErr error - for attempt := 0; attempt <= c.opt.MaxRedirects; attempt++ { - if attempt > 0 { - if err := internal.Sleep(ctx, c.retryBackoff(attempt)); err != nil { - return err - } - } - - if node == nil { - var err error - node, err = c.cmdNode(ctx, cmdInfo, slot) - if err != nil { - return err - } - } - - if ask { - pipe := node.Client.Pipeline() - _ = pipe.Process(ctx, NewCmd(ctx, "asking")) - _ = pipe.Process(ctx, cmd) - _, lastErr = pipe.Exec(ctx) - _ = pipe.Close() - ask = false - } else { - lastErr = node.Client.Process(ctx, cmd) - } - - // If there is no error - we are done. - if lastErr == nil { - return nil - } - if isReadOnly := isReadOnlyError(lastErr); isReadOnly || lastErr == pool.ErrClosed { - if isReadOnly { - c.state.LazyReload(ctx) - } - node = nil - continue - } - - // If slave is loading - pick another node. - if c.opt.ReadOnly && isLoadingError(lastErr) { - node.MarkAsFailing() - node = nil - continue - } - - var moved bool - var addr string - moved, ask, addr = isMovedError(lastErr) - if moved || ask { - var err error - node, err = c.nodes.Get(addr) - if err != nil { - return err - } - continue - } - - if shouldRetry(lastErr, cmd.readTimeout() == nil) { - // First retry the same node. - if attempt == 0 { - continue - } - - // Second try another node. - node.MarkAsFailing() - node = nil - continue - } - - return lastErr - } - return lastErr -} - -// ForEachMaster concurrently calls the fn on each master node in the cluster. -// It returns the first error if any. -func (c *ClusterClient) ForEachMaster( - ctx context.Context, - fn func(ctx context.Context, client *Client) error, -) error { - state, err := c.state.ReloadOrGet(ctx) - if err != nil { - return err - } - - var wg sync.WaitGroup - errCh := make(chan error, 1) - - for _, master := range state.Masters { - wg.Add(1) - go func(node *clusterNode) { - defer wg.Done() - err := fn(ctx, node.Client) - if err != nil { - select { - case errCh <- err: - default: - } - } - }(master) - } - - wg.Wait() - - select { - case err := <-errCh: - return err - default: - return nil - } -} - -// ForEachSlave concurrently calls the fn on each slave node in the cluster. -// It returns the first error if any. -func (c *ClusterClient) ForEachSlave( - ctx context.Context, - fn func(ctx context.Context, client *Client) error, -) error { - state, err := c.state.ReloadOrGet(ctx) - if err != nil { - return err - } - - var wg sync.WaitGroup - errCh := make(chan error, 1) - - for _, slave := range state.Slaves { - wg.Add(1) - go func(node *clusterNode) { - defer wg.Done() - err := fn(ctx, node.Client) - if err != nil { - select { - case errCh <- err: - default: - } - } - }(slave) - } - - wg.Wait() - - select { - case err := <-errCh: - return err - default: - return nil - } -} - -// ForEachShard concurrently calls the fn on each known node in the cluster. -// It returns the first error if any. -func (c *ClusterClient) ForEachShard( - ctx context.Context, - fn func(ctx context.Context, client *Client) error, -) error { - state, err := c.state.ReloadOrGet(ctx) - if err != nil { - return err - } - - var wg sync.WaitGroup - errCh := make(chan error, 1) - - worker := func(node *clusterNode) { - defer wg.Done() - err := fn(ctx, node.Client) - if err != nil { - select { - case errCh <- err: - default: - } - } - } - - for _, node := range state.Masters { - wg.Add(1) - go worker(node) - } - for _, node := range state.Slaves { - wg.Add(1) - go worker(node) - } - - wg.Wait() - - select { - case err := <-errCh: - return err - default: - return nil - } -} - -// PoolStats returns accumulated connection pool stats. -func (c *ClusterClient) PoolStats() *PoolStats { - var acc PoolStats - - state, _ := c.state.Get(context.TODO()) - if state == nil { - return &acc - } - - for _, node := range state.Masters { - s := node.Client.connPool.Stats() - acc.Hits += s.Hits - acc.Misses += s.Misses - acc.Timeouts += s.Timeouts - - acc.TotalConns += s.TotalConns - acc.IdleConns += s.IdleConns - acc.StaleConns += s.StaleConns - } - - for _, node := range state.Slaves { - s := node.Client.connPool.Stats() - acc.Hits += s.Hits - acc.Misses += s.Misses - acc.Timeouts += s.Timeouts - - acc.TotalConns += s.TotalConns - acc.IdleConns += s.IdleConns - acc.StaleConns += s.StaleConns - } - - return &acc -} - -func (c *ClusterClient) loadState(ctx context.Context) (*clusterState, error) { - if c.opt.ClusterSlots != nil { - slots, err := c.opt.ClusterSlots(ctx) - if err != nil { - return nil, err - } - return newClusterState(c.nodes, slots, "") - } - - addrs, err := c.nodes.Addrs() - if err != nil { - return nil, err - } - - var firstErr error - - for _, idx := range rand.Perm(len(addrs)) { - addr := addrs[idx] - - node, err := c.nodes.Get(addr) - if err != nil { - if firstErr == nil { - firstErr = err - } - continue - } - - slots, err := node.Client.ClusterSlots(ctx).Result() - if err != nil { - if firstErr == nil { - firstErr = err - } - continue - } - - return newClusterState(c.nodes, slots, node.Client.opt.Addr) - } - - /* - * No node is connectable. It's possible that all nodes' IP has changed. - * Clear activeAddrs to let client be able to re-connect using the initial - * setting of the addresses (e.g. [redis-cluster-0:6379, redis-cluster-1:6379]), - * which might have chance to resolve domain name and get updated IP address. - */ - c.nodes.mu.Lock() - c.nodes.activeAddrs = nil - c.nodes.mu.Unlock() - - return nil, firstErr -} - -// reaper closes idle connections to the cluster. -func (c *ClusterClient) reaper(idleCheckFrequency time.Duration) { - ticker := time.NewTicker(idleCheckFrequency) - defer ticker.Stop() - - for range ticker.C { - nodes, err := c.nodes.All() - if err != nil { - break - } - - for _, node := range nodes { - _, err := node.Client.connPool.(*pool.ConnPool).ReapStaleConns() - if err != nil { - internal.Logger.Printf(c.Context(), "ReapStaleConns failed: %s", err) - } - } - } -} - -func (c *ClusterClient) Pipeline() Pipeliner { - pipe := Pipeline{ - ctx: c.ctx, - exec: c.processPipeline, - } - pipe.init() - return &pipe -} - -func (c *ClusterClient) Pipelined(ctx context.Context, fn func(Pipeliner) error) ([]Cmder, error) { - return c.Pipeline().Pipelined(ctx, fn) -} - -func (c *ClusterClient) processPipeline(ctx context.Context, cmds []Cmder) error { - return c.hooks.processPipeline(ctx, cmds, c._processPipeline) -} - -func (c *ClusterClient) _processPipeline(ctx context.Context, cmds []Cmder) error { - cmdsMap := newCmdsMap() - err := c.mapCmdsByNode(ctx, cmdsMap, cmds) - if err != nil { - setCmdsErr(cmds, err) - return err - } - - for attempt := 0; attempt <= c.opt.MaxRedirects; attempt++ { - if attempt > 0 { - if err := internal.Sleep(ctx, c.retryBackoff(attempt)); err != nil { - setCmdsErr(cmds, err) - return err - } - } - - failedCmds := newCmdsMap() - var wg sync.WaitGroup - - for node, cmds := range cmdsMap.m { - wg.Add(1) - go func(node *clusterNode, cmds []Cmder) { - defer wg.Done() - - err := c._processPipelineNode(ctx, node, cmds, failedCmds) - if err == nil { - return - } - if attempt < c.opt.MaxRedirects { - if err := c.mapCmdsByNode(ctx, failedCmds, cmds); err != nil { - setCmdsErr(cmds, err) - } - } else { - setCmdsErr(cmds, err) - } - }(node, cmds) - } - - wg.Wait() - if len(failedCmds.m) == 0 { - break - } - cmdsMap = failedCmds - } - - return cmdsFirstErr(cmds) -} - -func (c *ClusterClient) mapCmdsByNode(ctx context.Context, cmdsMap *cmdsMap, cmds []Cmder) error { - state, err := c.state.Get(ctx) - if err != nil { - return err - } - - if c.opt.ReadOnly && c.cmdsAreReadOnly(cmds) { - for _, cmd := range cmds { - slot := c.cmdSlot(cmd) - node, err := c.slotReadOnlyNode(state, slot) - if err != nil { - return err - } - cmdsMap.Add(node, cmd) - } - return nil - } - - for _, cmd := range cmds { - slot := c.cmdSlot(cmd) - node, err := state.slotMasterNode(slot) - if err != nil { - return err - } - cmdsMap.Add(node, cmd) - } - return nil -} - -func (c *ClusterClient) cmdsAreReadOnly(cmds []Cmder) bool { - for _, cmd := range cmds { - cmdInfo := c.cmdInfo(cmd.Name()) - if cmdInfo == nil || !cmdInfo.ReadOnly { - return false - } - } - return true -} - -func (c *ClusterClient) _processPipelineNode( - ctx context.Context, node *clusterNode, cmds []Cmder, failedCmds *cmdsMap, -) error { - return node.Client.hooks.processPipeline(ctx, cmds, func(ctx context.Context, cmds []Cmder) error { - return node.Client.withConn(ctx, func(ctx context.Context, cn *pool.Conn) error { - err := cn.WithWriter(ctx, c.opt.WriteTimeout, func(wr *proto.Writer) error { - return writeCmds(wr, cmds) - }) - if err != nil { - return err - } - - return cn.WithReader(ctx, c.opt.ReadTimeout, func(rd *proto.Reader) error { - return c.pipelineReadCmds(ctx, node, rd, cmds, failedCmds) - }) - }) - }) -} - -func (c *ClusterClient) pipelineReadCmds( - ctx context.Context, - node *clusterNode, - rd *proto.Reader, - cmds []Cmder, - failedCmds *cmdsMap, -) error { - for _, cmd := range cmds { - err := cmd.readReply(rd) - cmd.SetErr(err) - - if err == nil { - continue - } - - if c.checkMovedErr(ctx, cmd, err, failedCmds) { - continue - } - - if c.opt.ReadOnly && isLoadingError(err) { - node.MarkAsFailing() - return err - } - if isRedisError(err) { - continue - } - return err - } - return nil -} - -func (c *ClusterClient) checkMovedErr( - ctx context.Context, cmd Cmder, err error, failedCmds *cmdsMap, -) bool { - moved, ask, addr := isMovedError(err) - if !moved && !ask { - return false - } - - node, err := c.nodes.Get(addr) - if err != nil { - return false - } - - if moved { - c.state.LazyReload(ctx) - failedCmds.Add(node, cmd) - return true - } - - if ask { - failedCmds.Add(node, NewCmd(ctx, "asking"), cmd) - return true - } - - panic("not reached") -} - -// TxPipeline acts like Pipeline, but wraps queued commands with MULTI/EXEC. -func (c *ClusterClient) TxPipeline() Pipeliner { - pipe := Pipeline{ - ctx: c.ctx, - exec: c.processTxPipeline, - } - pipe.init() - return &pipe -} - -func (c *ClusterClient) TxPipelined(ctx context.Context, fn func(Pipeliner) error) ([]Cmder, error) { - return c.TxPipeline().Pipelined(ctx, fn) -} - -func (c *ClusterClient) processTxPipeline(ctx context.Context, cmds []Cmder) error { - return c.hooks.processPipeline(ctx, cmds, c._processTxPipeline) -} - -func (c *ClusterClient) _processTxPipeline(ctx context.Context, cmds []Cmder) error { - state, err := c.state.Get(ctx) - if err != nil { - setCmdsErr(cmds, err) - return err - } - - cmdsMap := c.mapCmdsBySlot(cmds) - for slot, cmds := range cmdsMap { - node, err := state.slotMasterNode(slot) - if err != nil { - setCmdsErr(cmds, err) - continue - } - - cmdsMap := map[*clusterNode][]Cmder{node: cmds} - for attempt := 0; attempt <= c.opt.MaxRedirects; attempt++ { - if attempt > 0 { - if err := internal.Sleep(ctx, c.retryBackoff(attempt)); err != nil { - setCmdsErr(cmds, err) - return err - } - } - - failedCmds := newCmdsMap() - var wg sync.WaitGroup - - for node, cmds := range cmdsMap { - wg.Add(1) - go func(node *clusterNode, cmds []Cmder) { - defer wg.Done() - - err := c._processTxPipelineNode(ctx, node, cmds, failedCmds) - if err == nil { - return - } - if attempt < c.opt.MaxRedirects { - if err := c.mapCmdsByNode(ctx, failedCmds, cmds); err != nil { - setCmdsErr(cmds, err) - } - } else { - setCmdsErr(cmds, err) - } - }(node, cmds) - } - - wg.Wait() - if len(failedCmds.m) == 0 { - break - } - cmdsMap = failedCmds.m - } - } - - return cmdsFirstErr(cmds) -} - -func (c *ClusterClient) mapCmdsBySlot(cmds []Cmder) map[int][]Cmder { - cmdsMap := make(map[int][]Cmder) - for _, cmd := range cmds { - slot := c.cmdSlot(cmd) - cmdsMap[slot] = append(cmdsMap[slot], cmd) - } - return cmdsMap -} - -func (c *ClusterClient) _processTxPipelineNode( - ctx context.Context, node *clusterNode, cmds []Cmder, failedCmds *cmdsMap, -) error { - return node.Client.hooks.processTxPipeline(ctx, cmds, func(ctx context.Context, cmds []Cmder) error { - return node.Client.withConn(ctx, func(ctx context.Context, cn *pool.Conn) error { - err := cn.WithWriter(ctx, c.opt.WriteTimeout, func(wr *proto.Writer) error { - return writeCmds(wr, cmds) - }) - if err != nil { - return err - } - - return cn.WithReader(ctx, c.opt.ReadTimeout, func(rd *proto.Reader) error { - statusCmd := cmds[0].(*StatusCmd) - // Trim multi and exec. - cmds = cmds[1 : len(cmds)-1] - - err := c.txPipelineReadQueued(ctx, rd, statusCmd, cmds, failedCmds) - if err != nil { - moved, ask, addr := isMovedError(err) - if moved || ask { - return c.cmdsMoved(ctx, cmds, moved, ask, addr, failedCmds) - } - return err - } - - return pipelineReadCmds(rd, cmds) - }) - }) - }) -} - -func (c *ClusterClient) txPipelineReadQueued( - ctx context.Context, - rd *proto.Reader, - statusCmd *StatusCmd, - cmds []Cmder, - failedCmds *cmdsMap, -) error { - // Parse queued replies. - if err := statusCmd.readReply(rd); err != nil { - return err - } - - for _, cmd := range cmds { - err := statusCmd.readReply(rd) - if err == nil || c.checkMovedErr(ctx, cmd, err, failedCmds) || isRedisError(err) { - continue - } - return err - } - - // Parse number of replies. - line, err := rd.ReadLine() - if err != nil { - if err == Nil { - err = TxFailedErr - } - return err - } - - switch line[0] { - case proto.ErrorReply: - return proto.ParseErrorReply(line) - case proto.ArrayReply: - // ok - default: - return fmt.Errorf("redis: expected '*', but got line %q", line) - } - - return nil -} - -func (c *ClusterClient) cmdsMoved( - ctx context.Context, cmds []Cmder, - moved, ask bool, - addr string, - failedCmds *cmdsMap, -) error { - node, err := c.nodes.Get(addr) - if err != nil { - return err - } - - if moved { - c.state.LazyReload(ctx) - for _, cmd := range cmds { - failedCmds.Add(node, cmd) - } - return nil - } - - if ask { - for _, cmd := range cmds { - failedCmds.Add(node, NewCmd(ctx, "asking"), cmd) - } - return nil - } - - return nil -} - -func (c *ClusterClient) Watch(ctx context.Context, fn func(*Tx) error, keys ...string) error { - if len(keys) == 0 { - return fmt.Errorf("redis: Watch requires at least one key") - } - - slot := hashtag.Slot(keys[0]) - for _, key := range keys[1:] { - if hashtag.Slot(key) != slot { - err := fmt.Errorf("redis: Watch requires all keys to be in the same slot") - return err - } - } - - node, err := c.slotMasterNode(ctx, slot) - if err != nil { - return err - } - - for attempt := 0; attempt <= c.opt.MaxRedirects; attempt++ { - if attempt > 0 { - if err := internal.Sleep(ctx, c.retryBackoff(attempt)); err != nil { - return err - } - } - - err = node.Client.Watch(ctx, fn, keys...) - if err == nil { - break - } - - moved, ask, addr := isMovedError(err) - if moved || ask { - node, err = c.nodes.Get(addr) - if err != nil { - return err - } - continue - } - - if isReadOnly := isReadOnlyError(err); isReadOnly || err == pool.ErrClosed { - if isReadOnly { - c.state.LazyReload(ctx) - } - node, err = c.slotMasterNode(ctx, slot) - if err != nil { - return err - } - continue - } - - if shouldRetry(err, true) { - continue - } - - return err - } - - return err -} - -func (c *ClusterClient) pubSub() *PubSub { - var node *clusterNode - pubsub := &PubSub{ - opt: c.opt.clientOptions(), - - newConn: func(ctx context.Context, channels []string) (*pool.Conn, error) { - if node != nil { - panic("node != nil") - } - - var err error - if len(channels) > 0 { - slot := hashtag.Slot(channels[0]) - node, err = c.slotMasterNode(ctx, slot) - } else { - node, err = c.nodes.Random() - } - if err != nil { - return nil, err - } - - cn, err := node.Client.newConn(context.TODO()) - if err != nil { - node = nil - - return nil, err - } - - return cn, nil - }, - closeConn: func(cn *pool.Conn) error { - err := node.Client.connPool.CloseConn(cn) - node = nil - return err - }, - } - pubsub.init() - - return pubsub -} - -// Subscribe subscribes the client to the specified channels. -// Channels can be omitted to create empty subscription. -func (c *ClusterClient) Subscribe(ctx context.Context, channels ...string) *PubSub { - pubsub := c.pubSub() - if len(channels) > 0 { - _ = pubsub.Subscribe(ctx, channels...) - } - return pubsub -} - -// PSubscribe subscribes the client to the given patterns. -// Patterns can be omitted to create empty subscription. -func (c *ClusterClient) PSubscribe(ctx context.Context, channels ...string) *PubSub { - pubsub := c.pubSub() - if len(channels) > 0 { - _ = pubsub.PSubscribe(ctx, channels...) - } - return pubsub -} - -func (c *ClusterClient) retryBackoff(attempt int) time.Duration { - return internal.RetryBackoff(attempt, c.opt.MinRetryBackoff, c.opt.MaxRetryBackoff) -} - -func (c *ClusterClient) cmdsInfo(ctx context.Context) (map[string]*CommandInfo, error) { - // Try 3 random nodes. - const nodeLimit = 3 - - addrs, err := c.nodes.Addrs() - if err != nil { - return nil, err - } - - var firstErr error - - perm := rand.Perm(len(addrs)) - if len(perm) > nodeLimit { - perm = perm[:nodeLimit] - } - - for _, idx := range perm { - addr := addrs[idx] - - node, err := c.nodes.Get(addr) - if err != nil { - if firstErr == nil { - firstErr = err - } - continue - } - - info, err := node.Client.Command(ctx).Result() - if err == nil { - return info, nil - } - if firstErr == nil { - firstErr = err - } - } - - if firstErr == nil { - panic("not reached") - } - return nil, firstErr -} - -func (c *ClusterClient) cmdInfo(name string) *CommandInfo { - cmdsInfo, err := c.cmdsInfoCache.Get(c.ctx) - if err != nil { - return nil - } - - info := cmdsInfo[name] - if info == nil { - internal.Logger.Printf(c.Context(), "info for cmd=%s not found", name) - } - return info -} - -func (c *ClusterClient) cmdSlot(cmd Cmder) int { - args := cmd.Args() - if args[0] == "cluster" && args[1] == "getkeysinslot" { - return args[2].(int) - } - - cmdInfo := c.cmdInfo(cmd.Name()) - return cmdSlot(cmd, cmdFirstKeyPos(cmd, cmdInfo)) -} - -func cmdSlot(cmd Cmder, pos int) int { - if pos == 0 { - return hashtag.RandomSlot() - } - firstKey := cmd.stringArg(pos) - return hashtag.Slot(firstKey) -} - -func (c *ClusterClient) cmdNode( - ctx context.Context, - cmdInfo *CommandInfo, - slot int, -) (*clusterNode, error) { - state, err := c.state.Get(ctx) - if err != nil { - return nil, err - } - - if (c.opt.RouteByLatency || c.opt.RouteRandomly) && cmdInfo != nil && cmdInfo.ReadOnly { - return c.slotReadOnlyNode(state, slot) - } - return state.slotMasterNode(slot) -} - -func (c *clusterClient) slotReadOnlyNode(state *clusterState, slot int) (*clusterNode, error) { - if c.opt.RouteByLatency { - return state.slotClosestNode(slot) - } - if c.opt.RouteRandomly { - return state.slotRandomNode(slot) - } - return state.slotSlaveNode(slot) -} - -func (c *ClusterClient) slotMasterNode(ctx context.Context, slot int) (*clusterNode, error) { - state, err := c.state.Get(ctx) - if err != nil { - return nil, err - } - return state.slotMasterNode(slot) -} - -func appendUniqueNode(nodes []*clusterNode, node *clusterNode) []*clusterNode { - for _, n := range nodes { - if n == node { - return nodes - } - } - return append(nodes, node) -} - -func appendIfNotExists(ss []string, es ...string) []string { -loop: - for _, e := range es { - for _, s := range ss { - if s == e { - continue loop - } - } - ss = append(ss, e) - } - return ss -} - -//------------------------------------------------------------------------------ - -type cmdsMap struct { - mu sync.Mutex - m map[*clusterNode][]Cmder -} - -func newCmdsMap() *cmdsMap { - return &cmdsMap{ - m: make(map[*clusterNode][]Cmder), - } -} - -func (m *cmdsMap) Add(node *clusterNode, cmds ...Cmder) { - m.mu.Lock() - m.m[node] = append(m.m[node], cmds...) - m.mu.Unlock() -} diff --git a/vendor/github.com/go-redis/redis/v8/cluster_commands.go b/vendor/github.com/go-redis/redis/v8/cluster_commands.go deleted file mode 100644 index 1f0bae067ae..00000000000 --- a/vendor/github.com/go-redis/redis/v8/cluster_commands.go +++ /dev/null @@ -1,25 +0,0 @@ -package redis - -import ( - "context" - "sync/atomic" -) - -func (c *ClusterClient) DBSize(ctx context.Context) *IntCmd { - cmd := NewIntCmd(ctx, "dbsize") - var size int64 - err := c.ForEachMaster(ctx, func(ctx context.Context, master *Client) error { - n, err := master.DBSize(ctx).Result() - if err != nil { - return err - } - atomic.AddInt64(&size, n) - return nil - }) - if err != nil { - cmd.SetErr(err) - return cmd - } - cmd.val = size - return cmd -} diff --git a/vendor/github.com/go-redis/redis/v8/command.go b/vendor/github.com/go-redis/redis/v8/command.go deleted file mode 100644 index 5dd55332503..00000000000 --- a/vendor/github.com/go-redis/redis/v8/command.go +++ /dev/null @@ -1,2396 +0,0 @@ -package redis - -import ( - "context" - "fmt" - "net" - "strconv" - "time" - - "github.com/go-redis/redis/v8/internal" - "github.com/go-redis/redis/v8/internal/proto" - "github.com/go-redis/redis/v8/internal/util" -) - -type Cmder interface { - Name() string - FullName() string - Args() []interface{} - String() string - stringArg(int) string - firstKeyPos() int8 - setFirstKeyPos(int8) - - readTimeout() *time.Duration - readReply(rd *proto.Reader) error - - SetErr(error) - Err() error -} - -func setCmdsErr(cmds []Cmder, e error) { - for _, cmd := range cmds { - if cmd.Err() == nil { - cmd.SetErr(e) - } - } -} - -func cmdsFirstErr(cmds []Cmder) error { - for _, cmd := range cmds { - if err := cmd.Err(); err != nil { - return err - } - } - return nil -} - -func writeCmds(wr *proto.Writer, cmds []Cmder) error { - for _, cmd := range cmds { - if err := writeCmd(wr, cmd); err != nil { - return err - } - } - return nil -} - -func writeCmd(wr *proto.Writer, cmd Cmder) error { - return wr.WriteArgs(cmd.Args()) -} - -func cmdFirstKeyPos(cmd Cmder, info *CommandInfo) int { - if pos := cmd.firstKeyPos(); pos != 0 { - return int(pos) - } - - switch cmd.Name() { - case "eval", "evalsha": - if cmd.stringArg(2) != "0" { - return 3 - } - - return 0 - case "publish": - return 1 - case "memory": - // https://github.com/redis/redis/issues/7493 - if cmd.stringArg(1) == "usage" { - return 2 - } - } - - if info != nil { - return int(info.FirstKeyPos) - } - return 0 -} - -func cmdString(cmd Cmder, val interface{}) string { - b := make([]byte, 0, 64) - - for i, arg := range cmd.Args() { - if i > 0 { - b = append(b, ' ') - } - b = internal.AppendArg(b, arg) - } - - if err := cmd.Err(); err != nil { - b = append(b, ": "...) - b = append(b, err.Error()...) - } else if val != nil { - b = append(b, ": "...) - b = internal.AppendArg(b, val) - } - - return internal.String(b) -} - -//------------------------------------------------------------------------------ - -type baseCmd struct { - ctx context.Context - args []interface{} - err error - keyPos int8 - - _readTimeout *time.Duration -} - -var _ Cmder = (*Cmd)(nil) - -func (cmd *baseCmd) Name() string { - if len(cmd.args) == 0 { - return "" - } - // Cmd name must be lower cased. - return internal.ToLower(cmd.stringArg(0)) -} - -func (cmd *baseCmd) FullName() string { - switch name := cmd.Name(); name { - case "cluster", "command": - if len(cmd.args) == 1 { - return name - } - if s2, ok := cmd.args[1].(string); ok { - return name + " " + s2 - } - return name - default: - return name - } -} - -func (cmd *baseCmd) Args() []interface{} { - return cmd.args -} - -func (cmd *baseCmd) stringArg(pos int) string { - if pos < 0 || pos >= len(cmd.args) { - return "" - } - s, _ := cmd.args[pos].(string) - return s -} - -func (cmd *baseCmd) firstKeyPos() int8 { - return cmd.keyPos -} - -func (cmd *baseCmd) setFirstKeyPos(keyPos int8) { - cmd.keyPos = keyPos -} - -func (cmd *baseCmd) SetErr(e error) { - cmd.err = e -} - -func (cmd *baseCmd) Err() error { - return cmd.err -} - -func (cmd *baseCmd) readTimeout() *time.Duration { - return cmd._readTimeout -} - -func (cmd *baseCmd) setReadTimeout(d time.Duration) { - cmd._readTimeout = &d -} - -//------------------------------------------------------------------------------ - -type Cmd struct { - baseCmd - - val interface{} -} - -func NewCmd(ctx context.Context, args ...interface{}) *Cmd { - return &Cmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *Cmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *Cmd) Val() interface{} { - return cmd.val -} - -func (cmd *Cmd) Result() (interface{}, error) { - return cmd.val, cmd.err -} - -func (cmd *Cmd) Text() (string, error) { - if cmd.err != nil { - return "", cmd.err - } - switch val := cmd.val.(type) { - case string: - return val, nil - default: - err := fmt.Errorf("redis: unexpected type=%T for String", val) - return "", err - } -} - -func (cmd *Cmd) Int() (int, error) { - if cmd.err != nil { - return 0, cmd.err - } - switch val := cmd.val.(type) { - case int64: - return int(val), nil - case string: - return strconv.Atoi(val) - default: - err := fmt.Errorf("redis: unexpected type=%T for Int", val) - return 0, err - } -} - -func (cmd *Cmd) Int64() (int64, error) { - if cmd.err != nil { - return 0, cmd.err - } - switch val := cmd.val.(type) { - case int64: - return val, nil - case string: - return strconv.ParseInt(val, 10, 64) - default: - err := fmt.Errorf("redis: unexpected type=%T for Int64", val) - return 0, err - } -} - -func (cmd *Cmd) Uint64() (uint64, error) { - if cmd.err != nil { - return 0, cmd.err - } - switch val := cmd.val.(type) { - case int64: - return uint64(val), nil - case string: - return strconv.ParseUint(val, 10, 64) - default: - err := fmt.Errorf("redis: unexpected type=%T for Uint64", val) - return 0, err - } -} - -func (cmd *Cmd) Float32() (float32, error) { - if cmd.err != nil { - return 0, cmd.err - } - switch val := cmd.val.(type) { - case int64: - return float32(val), nil - case string: - f, err := strconv.ParseFloat(val, 32) - if err != nil { - return 0, err - } - return float32(f), nil - default: - err := fmt.Errorf("redis: unexpected type=%T for Float32", val) - return 0, err - } -} - -func (cmd *Cmd) Float64() (float64, error) { - if cmd.err != nil { - return 0, cmd.err - } - switch val := cmd.val.(type) { - case int64: - return float64(val), nil - case string: - return strconv.ParseFloat(val, 64) - default: - err := fmt.Errorf("redis: unexpected type=%T for Float64", val) - return 0, err - } -} - -func (cmd *Cmd) Bool() (bool, error) { - if cmd.err != nil { - return false, cmd.err - } - switch val := cmd.val.(type) { - case int64: - return val != 0, nil - case string: - return strconv.ParseBool(val) - default: - err := fmt.Errorf("redis: unexpected type=%T for Bool", val) - return false, err - } -} - -func (cmd *Cmd) readReply(rd *proto.Reader) (err error) { - cmd.val, err = rd.ReadReply(sliceParser) - return err -} - -// sliceParser implements proto.MultiBulkParse. -func sliceParser(rd *proto.Reader, n int64) (interface{}, error) { - vals := make([]interface{}, n) - for i := 0; i < len(vals); i++ { - v, err := rd.ReadReply(sliceParser) - if err != nil { - if err == Nil { - vals[i] = nil - continue - } - if err, ok := err.(proto.RedisError); ok { - vals[i] = err - continue - } - return nil, err - } - vals[i] = v - } - return vals, nil -} - -//------------------------------------------------------------------------------ - -type SliceCmd struct { - baseCmd - - val []interface{} -} - -var _ Cmder = (*SliceCmd)(nil) - -func NewSliceCmd(ctx context.Context, args ...interface{}) *SliceCmd { - return &SliceCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *SliceCmd) Val() []interface{} { - return cmd.val -} - -func (cmd *SliceCmd) Result() ([]interface{}, error) { - return cmd.val, cmd.err -} - -func (cmd *SliceCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *SliceCmd) readReply(rd *proto.Reader) error { - v, err := rd.ReadArrayReply(sliceParser) - if err != nil { - return err - } - cmd.val = v.([]interface{}) - return nil -} - -//------------------------------------------------------------------------------ - -type StatusCmd struct { - baseCmd - - val string -} - -var _ Cmder = (*StatusCmd)(nil) - -func NewStatusCmd(ctx context.Context, args ...interface{}) *StatusCmd { - return &StatusCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *StatusCmd) Val() string { - return cmd.val -} - -func (cmd *StatusCmd) Result() (string, error) { - return cmd.val, cmd.err -} - -func (cmd *StatusCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *StatusCmd) readReply(rd *proto.Reader) (err error) { - cmd.val, err = rd.ReadString() - return err -} - -//------------------------------------------------------------------------------ - -type IntCmd struct { - baseCmd - - val int64 -} - -var _ Cmder = (*IntCmd)(nil) - -func NewIntCmd(ctx context.Context, args ...interface{}) *IntCmd { - return &IntCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *IntCmd) Val() int64 { - return cmd.val -} - -func (cmd *IntCmd) Result() (int64, error) { - return cmd.val, cmd.err -} - -func (cmd *IntCmd) Uint64() (uint64, error) { - return uint64(cmd.val), cmd.err -} - -func (cmd *IntCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *IntCmd) readReply(rd *proto.Reader) (err error) { - cmd.val, err = rd.ReadIntReply() - return err -} - -//------------------------------------------------------------------------------ - -type IntSliceCmd struct { - baseCmd - - val []int64 -} - -var _ Cmder = (*IntSliceCmd)(nil) - -func NewIntSliceCmd(ctx context.Context, args ...interface{}) *IntSliceCmd { - return &IntSliceCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *IntSliceCmd) Val() []int64 { - return cmd.val -} - -func (cmd *IntSliceCmd) Result() ([]int64, error) { - return cmd.val, cmd.err -} - -func (cmd *IntSliceCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *IntSliceCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]int64, n) - for i := 0; i < len(cmd.val); i++ { - num, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - cmd.val[i] = num - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type DurationCmd struct { - baseCmd - - val time.Duration - precision time.Duration -} - -var _ Cmder = (*DurationCmd)(nil) - -func NewDurationCmd(ctx context.Context, precision time.Duration, args ...interface{}) *DurationCmd { - return &DurationCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - precision: precision, - } -} - -func (cmd *DurationCmd) Val() time.Duration { - return cmd.val -} - -func (cmd *DurationCmd) Result() (time.Duration, error) { - return cmd.val, cmd.err -} - -func (cmd *DurationCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *DurationCmd) readReply(rd *proto.Reader) error { - n, err := rd.ReadIntReply() - if err != nil { - return err - } - switch n { - // -2 if the key does not exist - // -1 if the key exists but has no associated expire - case -2, -1: - cmd.val = time.Duration(n) - default: - cmd.val = time.Duration(n) * cmd.precision - } - return nil -} - -//------------------------------------------------------------------------------ - -type TimeCmd struct { - baseCmd - - val time.Time -} - -var _ Cmder = (*TimeCmd)(nil) - -func NewTimeCmd(ctx context.Context, args ...interface{}) *TimeCmd { - return &TimeCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *TimeCmd) Val() time.Time { - return cmd.val -} - -func (cmd *TimeCmd) Result() (time.Time, error) { - return cmd.val, cmd.err -} - -func (cmd *TimeCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *TimeCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - if n != 2 { - return nil, fmt.Errorf("got %d elements, expected 2", n) - } - - sec, err := rd.ReadInt() - if err != nil { - return nil, err - } - - microsec, err := rd.ReadInt() - if err != nil { - return nil, err - } - - cmd.val = time.Unix(sec, microsec*1000) - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type BoolCmd struct { - baseCmd - - val bool -} - -var _ Cmder = (*BoolCmd)(nil) - -func NewBoolCmd(ctx context.Context, args ...interface{}) *BoolCmd { - return &BoolCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *BoolCmd) Val() bool { - return cmd.val -} - -func (cmd *BoolCmd) Result() (bool, error) { - return cmd.val, cmd.err -} - -func (cmd *BoolCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *BoolCmd) readReply(rd *proto.Reader) error { - v, err := rd.ReadReply(nil) - // `SET key value NX` returns nil when key already exists. But - // `SETNX key value` returns bool (0/1). So convert nil to bool. - if err == Nil { - cmd.val = false - return nil - } - if err != nil { - return err - } - switch v := v.(type) { - case int64: - cmd.val = v == 1 - return nil - case string: - cmd.val = v == "OK" - return nil - default: - return fmt.Errorf("got %T, wanted int64 or string", v) - } -} - -//------------------------------------------------------------------------------ - -type StringCmd struct { - baseCmd - - val string -} - -var _ Cmder = (*StringCmd)(nil) - -func NewStringCmd(ctx context.Context, args ...interface{}) *StringCmd { - return &StringCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *StringCmd) Val() string { - return cmd.val -} - -func (cmd *StringCmd) Result() (string, error) { - return cmd.Val(), cmd.err -} - -func (cmd *StringCmd) Bytes() ([]byte, error) { - return util.StringToBytes(cmd.val), cmd.err -} - -func (cmd *StringCmd) Int() (int, error) { - if cmd.err != nil { - return 0, cmd.err - } - return strconv.Atoi(cmd.Val()) -} - -func (cmd *StringCmd) Int64() (int64, error) { - if cmd.err != nil { - return 0, cmd.err - } - return strconv.ParseInt(cmd.Val(), 10, 64) -} - -func (cmd *StringCmd) Uint64() (uint64, error) { - if cmd.err != nil { - return 0, cmd.err - } - return strconv.ParseUint(cmd.Val(), 10, 64) -} - -func (cmd *StringCmd) Float32() (float32, error) { - if cmd.err != nil { - return 0, cmd.err - } - f, err := strconv.ParseFloat(cmd.Val(), 32) - if err != nil { - return 0, err - } - return float32(f), nil -} - -func (cmd *StringCmd) Float64() (float64, error) { - if cmd.err != nil { - return 0, cmd.err - } - return strconv.ParseFloat(cmd.Val(), 64) -} - -func (cmd *StringCmd) Time() (time.Time, error) { - if cmd.err != nil { - return time.Time{}, cmd.err - } - return time.Parse(time.RFC3339Nano, cmd.Val()) -} - -func (cmd *StringCmd) Scan(val interface{}) error { - if cmd.err != nil { - return cmd.err - } - return proto.Scan([]byte(cmd.val), val) -} - -func (cmd *StringCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *StringCmd) readReply(rd *proto.Reader) (err error) { - cmd.val, err = rd.ReadString() - return err -} - -//------------------------------------------------------------------------------ - -type FloatCmd struct { - baseCmd - - val float64 -} - -var _ Cmder = (*FloatCmd)(nil) - -func NewFloatCmd(ctx context.Context, args ...interface{}) *FloatCmd { - return &FloatCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *FloatCmd) Val() float64 { - return cmd.val -} - -func (cmd *FloatCmd) Result() (float64, error) { - return cmd.Val(), cmd.Err() -} - -func (cmd *FloatCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *FloatCmd) readReply(rd *proto.Reader) (err error) { - cmd.val, err = rd.ReadFloatReply() - return err -} - -//------------------------------------------------------------------------------ - -type StringSliceCmd struct { - baseCmd - - val []string -} - -var _ Cmder = (*StringSliceCmd)(nil) - -func NewStringSliceCmd(ctx context.Context, args ...interface{}) *StringSliceCmd { - return &StringSliceCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *StringSliceCmd) Val() []string { - return cmd.val -} - -func (cmd *StringSliceCmd) Result() ([]string, error) { - return cmd.Val(), cmd.Err() -} - -func (cmd *StringSliceCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *StringSliceCmd) ScanSlice(container interface{}) error { - return proto.ScanSlice(cmd.Val(), container) -} - -func (cmd *StringSliceCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]string, n) - for i := 0; i < len(cmd.val); i++ { - switch s, err := rd.ReadString(); { - case err == Nil: - cmd.val[i] = "" - case err != nil: - return nil, err - default: - cmd.val[i] = s - } - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type BoolSliceCmd struct { - baseCmd - - val []bool -} - -var _ Cmder = (*BoolSliceCmd)(nil) - -func NewBoolSliceCmd(ctx context.Context, args ...interface{}) *BoolSliceCmd { - return &BoolSliceCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *BoolSliceCmd) Val() []bool { - return cmd.val -} - -func (cmd *BoolSliceCmd) Result() ([]bool, error) { - return cmd.val, cmd.err -} - -func (cmd *BoolSliceCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *BoolSliceCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]bool, n) - for i := 0; i < len(cmd.val); i++ { - n, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - cmd.val[i] = n == 1 - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type StringStringMapCmd struct { - baseCmd - - val map[string]string -} - -var _ Cmder = (*StringStringMapCmd)(nil) - -func NewStringStringMapCmd(ctx context.Context, args ...interface{}) *StringStringMapCmd { - return &StringStringMapCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *StringStringMapCmd) Val() map[string]string { - return cmd.val -} - -func (cmd *StringStringMapCmd) Result() (map[string]string, error) { - return cmd.val, cmd.err -} - -func (cmd *StringStringMapCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *StringStringMapCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make(map[string]string, n/2) - for i := int64(0); i < n; i += 2 { - key, err := rd.ReadString() - if err != nil { - return nil, err - } - - value, err := rd.ReadString() - if err != nil { - return nil, err - } - - cmd.val[key] = value - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type StringIntMapCmd struct { - baseCmd - - val map[string]int64 -} - -var _ Cmder = (*StringIntMapCmd)(nil) - -func NewStringIntMapCmd(ctx context.Context, args ...interface{}) *StringIntMapCmd { - return &StringIntMapCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *StringIntMapCmd) Val() map[string]int64 { - return cmd.val -} - -func (cmd *StringIntMapCmd) Result() (map[string]int64, error) { - return cmd.val, cmd.err -} - -func (cmd *StringIntMapCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *StringIntMapCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make(map[string]int64, n/2) - for i := int64(0); i < n; i += 2 { - key, err := rd.ReadString() - if err != nil { - return nil, err - } - - n, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - - cmd.val[key] = n - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type StringStructMapCmd struct { - baseCmd - - val map[string]struct{} -} - -var _ Cmder = (*StringStructMapCmd)(nil) - -func NewStringStructMapCmd(ctx context.Context, args ...interface{}) *StringStructMapCmd { - return &StringStructMapCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *StringStructMapCmd) Val() map[string]struct{} { - return cmd.val -} - -func (cmd *StringStructMapCmd) Result() (map[string]struct{}, error) { - return cmd.val, cmd.err -} - -func (cmd *StringStructMapCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *StringStructMapCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make(map[string]struct{}, n) - for i := int64(0); i < n; i++ { - key, err := rd.ReadString() - if err != nil { - return nil, err - } - cmd.val[key] = struct{}{} - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type XMessage struct { - ID string - Values map[string]interface{} -} - -type XMessageSliceCmd struct { - baseCmd - - val []XMessage -} - -var _ Cmder = (*XMessageSliceCmd)(nil) - -func NewXMessageSliceCmd(ctx context.Context, args ...interface{}) *XMessageSliceCmd { - return &XMessageSliceCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *XMessageSliceCmd) Val() []XMessage { - return cmd.val -} - -func (cmd *XMessageSliceCmd) Result() ([]XMessage, error) { - return cmd.val, cmd.err -} - -func (cmd *XMessageSliceCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *XMessageSliceCmd) readReply(rd *proto.Reader) error { - var err error - cmd.val, err = readXMessageSlice(rd) - return err -} - -func readXMessageSlice(rd *proto.Reader) ([]XMessage, error) { - n, err := rd.ReadArrayLen() - if err != nil { - return nil, err - } - - msgs := make([]XMessage, n) - for i := 0; i < n; i++ { - var err error - msgs[i], err = readXMessage(rd) - if err != nil { - return nil, err - } - } - return msgs, nil -} - -func readXMessage(rd *proto.Reader) (XMessage, error) { - n, err := rd.ReadArrayLen() - if err != nil { - return XMessage{}, err - } - if n != 2 { - return XMessage{}, fmt.Errorf("got %d, wanted 2", n) - } - - id, err := rd.ReadString() - if err != nil { - return XMessage{}, err - } - - var values map[string]interface{} - - v, err := rd.ReadArrayReply(stringInterfaceMapParser) - if err != nil { - if err != proto.Nil { - return XMessage{}, err - } - } else { - values = v.(map[string]interface{}) - } - - return XMessage{ - ID: id, - Values: values, - }, nil -} - -// stringInterfaceMapParser implements proto.MultiBulkParse. -func stringInterfaceMapParser(rd *proto.Reader, n int64) (interface{}, error) { - m := make(map[string]interface{}, n/2) - for i := int64(0); i < n; i += 2 { - key, err := rd.ReadString() - if err != nil { - return nil, err - } - - value, err := rd.ReadString() - if err != nil { - return nil, err - } - - m[key] = value - } - return m, nil -} - -//------------------------------------------------------------------------------ - -type XStream struct { - Stream string - Messages []XMessage -} - -type XStreamSliceCmd struct { - baseCmd - - val []XStream -} - -var _ Cmder = (*XStreamSliceCmd)(nil) - -func NewXStreamSliceCmd(ctx context.Context, args ...interface{}) *XStreamSliceCmd { - return &XStreamSliceCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *XStreamSliceCmd) Val() []XStream { - return cmd.val -} - -func (cmd *XStreamSliceCmd) Result() ([]XStream, error) { - return cmd.val, cmd.err -} - -func (cmd *XStreamSliceCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *XStreamSliceCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]XStream, n) - for i := 0; i < len(cmd.val); i++ { - i := i - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - if n != 2 { - return nil, fmt.Errorf("got %d, wanted 2", n) - } - - stream, err := rd.ReadString() - if err != nil { - return nil, err - } - - msgs, err := readXMessageSlice(rd) - if err != nil { - return nil, err - } - - cmd.val[i] = XStream{ - Stream: stream, - Messages: msgs, - } - return nil, nil - }) - if err != nil { - return nil, err - } - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type XPending struct { - Count int64 - Lower string - Higher string - Consumers map[string]int64 -} - -type XPendingCmd struct { - baseCmd - val *XPending -} - -var _ Cmder = (*XPendingCmd)(nil) - -func NewXPendingCmd(ctx context.Context, args ...interface{}) *XPendingCmd { - return &XPendingCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *XPendingCmd) Val() *XPending { - return cmd.val -} - -func (cmd *XPendingCmd) Result() (*XPending, error) { - return cmd.val, cmd.err -} - -func (cmd *XPendingCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *XPendingCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - if n != 4 { - return nil, fmt.Errorf("got %d, wanted 4", n) - } - - count, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - - lower, err := rd.ReadString() - if err != nil && err != Nil { - return nil, err - } - - higher, err := rd.ReadString() - if err != nil && err != Nil { - return nil, err - } - - cmd.val = &XPending{ - Count: count, - Lower: lower, - Higher: higher, - } - _, err = rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - for i := int64(0); i < n; i++ { - _, err = rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - if n != 2 { - return nil, fmt.Errorf("got %d, wanted 2", n) - } - - consumerName, err := rd.ReadString() - if err != nil { - return nil, err - } - - consumerPending, err := rd.ReadInt() - if err != nil { - return nil, err - } - - if cmd.val.Consumers == nil { - cmd.val.Consumers = make(map[string]int64) - } - cmd.val.Consumers[consumerName] = consumerPending - - return nil, nil - }) - if err != nil { - return nil, err - } - } - return nil, nil - }) - if err != nil && err != Nil { - return nil, err - } - - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type XPendingExt struct { - ID string - Consumer string - Idle time.Duration - RetryCount int64 -} - -type XPendingExtCmd struct { - baseCmd - val []XPendingExt -} - -var _ Cmder = (*XPendingExtCmd)(nil) - -func NewXPendingExtCmd(ctx context.Context, args ...interface{}) *XPendingExtCmd { - return &XPendingExtCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *XPendingExtCmd) Val() []XPendingExt { - return cmd.val -} - -func (cmd *XPendingExtCmd) Result() ([]XPendingExt, error) { - return cmd.val, cmd.err -} - -func (cmd *XPendingExtCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *XPendingExtCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]XPendingExt, 0, n) - for i := int64(0); i < n; i++ { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - if n != 4 { - return nil, fmt.Errorf("got %d, wanted 4", n) - } - - id, err := rd.ReadString() - if err != nil { - return nil, err - } - - consumer, err := rd.ReadString() - if err != nil && err != Nil { - return nil, err - } - - idle, err := rd.ReadIntReply() - if err != nil && err != Nil { - return nil, err - } - - retryCount, err := rd.ReadIntReply() - if err != nil && err != Nil { - return nil, err - } - - cmd.val = append(cmd.val, XPendingExt{ - ID: id, - Consumer: consumer, - Idle: time.Duration(idle) * time.Millisecond, - RetryCount: retryCount, - }) - return nil, nil - }) - if err != nil { - return nil, err - } - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type XInfoGroupsCmd struct { - baseCmd - val []XInfoGroup -} - -type XInfoGroup struct { - Name string - Consumers int64 - Pending int64 - LastDeliveredID string -} - -var _ Cmder = (*XInfoGroupsCmd)(nil) - -func NewXInfoGroupsCmd(ctx context.Context, stream string) *XInfoGroupsCmd { - return &XInfoGroupsCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: []interface{}{"xinfo", "groups", stream}, - }, - } -} - -func (cmd *XInfoGroupsCmd) Val() []XInfoGroup { - return cmd.val -} - -func (cmd *XInfoGroupsCmd) Result() ([]XInfoGroup, error) { - return cmd.val, cmd.err -} - -func (cmd *XInfoGroupsCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *XInfoGroupsCmd) readReply(rd *proto.Reader) error { - n, err := rd.ReadArrayLen() - if err != nil { - return err - } - - cmd.val = make([]XInfoGroup, n) - - for i := 0; i < n; i++ { - cmd.val[i], err = readXGroupInfo(rd) - if err != nil { - return err - } - } - - return nil -} - -func readXGroupInfo(rd *proto.Reader) (XInfoGroup, error) { - var group XInfoGroup - - n, err := rd.ReadArrayLen() - if err != nil { - return group, err - } - if n != 8 { - return group, fmt.Errorf("redis: got %d elements in XINFO GROUPS reply, wanted 8", n) - } - - for i := 0; i < 4; i++ { - key, err := rd.ReadString() - if err != nil { - return group, err - } - - val, err := rd.ReadString() - if err != nil { - return group, err - } - - switch key { - case "name": - group.Name = val - case "consumers": - group.Consumers, err = strconv.ParseInt(val, 0, 64) - if err != nil { - return group, err - } - case "pending": - group.Pending, err = strconv.ParseInt(val, 0, 64) - if err != nil { - return group, err - } - case "last-delivered-id": - group.LastDeliveredID = val - default: - return group, fmt.Errorf("redis: unexpected content %s in XINFO GROUPS reply", key) - } - } - - return group, nil -} - -//------------------------------------------------------------------------------ - -type XInfoStreamCmd struct { - baseCmd - val *XInfoStream -} - -type XInfoStream struct { - Length int64 - RadixTreeKeys int64 - RadixTreeNodes int64 - Groups int64 - LastGeneratedID string - FirstEntry XMessage - LastEntry XMessage -} - -var _ Cmder = (*XInfoStreamCmd)(nil) - -func NewXInfoStreamCmd(ctx context.Context, stream string) *XInfoStreamCmd { - return &XInfoStreamCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: []interface{}{"xinfo", "stream", stream}, - }, - } -} - -func (cmd *XInfoStreamCmd) Val() *XInfoStream { - return cmd.val -} - -func (cmd *XInfoStreamCmd) Result() (*XInfoStream, error) { - return cmd.val, cmd.err -} - -func (cmd *XInfoStreamCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *XInfoStreamCmd) readReply(rd *proto.Reader) error { - v, err := rd.ReadReply(xStreamInfoParser) - if err != nil { - return err - } - cmd.val = v.(*XInfoStream) - return nil -} - -func xStreamInfoParser(rd *proto.Reader, n int64) (interface{}, error) { - if n != 14 { - return nil, fmt.Errorf("redis: got %d elements in XINFO STREAM reply,"+ - "wanted 14", n) - } - var info XInfoStream - for i := 0; i < 7; i++ { - key, err := rd.ReadString() - if err != nil { - return nil, err - } - switch key { - case "length": - info.Length, err = rd.ReadIntReply() - case "radix-tree-keys": - info.RadixTreeKeys, err = rd.ReadIntReply() - case "radix-tree-nodes": - info.RadixTreeNodes, err = rd.ReadIntReply() - case "groups": - info.Groups, err = rd.ReadIntReply() - case "last-generated-id": - info.LastGeneratedID, err = rd.ReadString() - case "first-entry": - info.FirstEntry, err = readXMessage(rd) - case "last-entry": - info.LastEntry, err = readXMessage(rd) - default: - return nil, fmt.Errorf("redis: unexpected content %s "+ - "in XINFO STREAM reply", key) - } - if err != nil { - return nil, err - } - } - return &info, nil -} - -//------------------------------------------------------------------------------ - -type ZSliceCmd struct { - baseCmd - - val []Z -} - -var _ Cmder = (*ZSliceCmd)(nil) - -func NewZSliceCmd(ctx context.Context, args ...interface{}) *ZSliceCmd { - return &ZSliceCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *ZSliceCmd) Val() []Z { - return cmd.val -} - -func (cmd *ZSliceCmd) Result() ([]Z, error) { - return cmd.val, cmd.err -} - -func (cmd *ZSliceCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *ZSliceCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]Z, n/2) - for i := 0; i < len(cmd.val); i++ { - member, err := rd.ReadString() - if err != nil { - return nil, err - } - - score, err := rd.ReadFloatReply() - if err != nil { - return nil, err - } - - cmd.val[i] = Z{ - Member: member, - Score: score, - } - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type ZWithKeyCmd struct { - baseCmd - - val *ZWithKey -} - -var _ Cmder = (*ZWithKeyCmd)(nil) - -func NewZWithKeyCmd(ctx context.Context, args ...interface{}) *ZWithKeyCmd { - return &ZWithKeyCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *ZWithKeyCmd) Val() *ZWithKey { - return cmd.val -} - -func (cmd *ZWithKeyCmd) Result() (*ZWithKey, error) { - return cmd.Val(), cmd.Err() -} - -func (cmd *ZWithKeyCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *ZWithKeyCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - if n != 3 { - return nil, fmt.Errorf("got %d elements, expected 3", n) - } - - cmd.val = &ZWithKey{} - var err error - - cmd.val.Key, err = rd.ReadString() - if err != nil { - return nil, err - } - - cmd.val.Member, err = rd.ReadString() - if err != nil { - return nil, err - } - - cmd.val.Score, err = rd.ReadFloatReply() - if err != nil { - return nil, err - } - - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type ScanCmd struct { - baseCmd - - page []string - cursor uint64 - - process cmdable -} - -var _ Cmder = (*ScanCmd)(nil) - -func NewScanCmd(ctx context.Context, process cmdable, args ...interface{}) *ScanCmd { - return &ScanCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - process: process, - } -} - -func (cmd *ScanCmd) Val() (keys []string, cursor uint64) { - return cmd.page, cmd.cursor -} - -func (cmd *ScanCmd) Result() (keys []string, cursor uint64, err error) { - return cmd.page, cmd.cursor, cmd.err -} - -func (cmd *ScanCmd) String() string { - return cmdString(cmd, cmd.page) -} - -func (cmd *ScanCmd) readReply(rd *proto.Reader) (err error) { - cmd.page, cmd.cursor, err = rd.ReadScanReply() - return err -} - -// Iterator creates a new ScanIterator. -func (cmd *ScanCmd) Iterator() *ScanIterator { - return &ScanIterator{ - cmd: cmd, - } -} - -//------------------------------------------------------------------------------ - -type ClusterNode struct { - ID string - Addr string -} - -type ClusterSlot struct { - Start int - End int - Nodes []ClusterNode -} - -type ClusterSlotsCmd struct { - baseCmd - - val []ClusterSlot -} - -var _ Cmder = (*ClusterSlotsCmd)(nil) - -func NewClusterSlotsCmd(ctx context.Context, args ...interface{}) *ClusterSlotsCmd { - return &ClusterSlotsCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *ClusterSlotsCmd) Val() []ClusterSlot { - return cmd.val -} - -func (cmd *ClusterSlotsCmd) Result() ([]ClusterSlot, error) { - return cmd.Val(), cmd.Err() -} - -func (cmd *ClusterSlotsCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *ClusterSlotsCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]ClusterSlot, n) - for i := 0; i < len(cmd.val); i++ { - n, err := rd.ReadArrayLen() - if err != nil { - return nil, err - } - if n < 2 { - err := fmt.Errorf("redis: got %d elements in cluster info, expected at least 2", n) - return nil, err - } - - start, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - - end, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - - nodes := make([]ClusterNode, n-2) - for j := 0; j < len(nodes); j++ { - n, err := rd.ReadArrayLen() - if err != nil { - return nil, err - } - if n != 2 && n != 3 { - err := fmt.Errorf("got %d elements in cluster info address, expected 2 or 3", n) - return nil, err - } - - ip, err := rd.ReadString() - if err != nil { - return nil, err - } - - port, err := rd.ReadString() - if err != nil { - return nil, err - } - - nodes[j].Addr = net.JoinHostPort(ip, port) - - if n == 3 { - id, err := rd.ReadString() - if err != nil { - return nil, err - } - nodes[j].ID = id - } - } - - cmd.val[i] = ClusterSlot{ - Start: int(start), - End: int(end), - Nodes: nodes, - } - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -// GeoLocation is used with GeoAdd to add geospatial location. -type GeoLocation struct { - Name string - Longitude, Latitude, Dist float64 - GeoHash int64 -} - -// GeoRadiusQuery is used with GeoRadius to query geospatial index. -type GeoRadiusQuery struct { - Radius float64 - // Can be m, km, ft, or mi. Default is km. - Unit string - WithCoord bool - WithDist bool - WithGeoHash bool - Count int - // Can be ASC or DESC. Default is no sort order. - Sort string - Store string - StoreDist string -} - -type GeoLocationCmd struct { - baseCmd - - q *GeoRadiusQuery - locations []GeoLocation -} - -var _ Cmder = (*GeoLocationCmd)(nil) - -func NewGeoLocationCmd(ctx context.Context, q *GeoRadiusQuery, args ...interface{}) *GeoLocationCmd { - return &GeoLocationCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: geoLocationArgs(q, args...), - }, - q: q, - } -} - -func geoLocationArgs(q *GeoRadiusQuery, args ...interface{}) []interface{} { - args = append(args, q.Radius) - if q.Unit != "" { - args = append(args, q.Unit) - } else { - args = append(args, "km") - } - if q.WithCoord { - args = append(args, "withcoord") - } - if q.WithDist { - args = append(args, "withdist") - } - if q.WithGeoHash { - args = append(args, "withhash") - } - if q.Count > 0 { - args = append(args, "count", q.Count) - } - if q.Sort != "" { - args = append(args, q.Sort) - } - if q.Store != "" { - args = append(args, "store") - args = append(args, q.Store) - } - if q.StoreDist != "" { - args = append(args, "storedist") - args = append(args, q.StoreDist) - } - return args -} - -func (cmd *GeoLocationCmd) Val() []GeoLocation { - return cmd.locations -} - -func (cmd *GeoLocationCmd) Result() ([]GeoLocation, error) { - return cmd.locations, cmd.err -} - -func (cmd *GeoLocationCmd) String() string { - return cmdString(cmd, cmd.locations) -} - -func (cmd *GeoLocationCmd) readReply(rd *proto.Reader) error { - v, err := rd.ReadArrayReply(newGeoLocationSliceParser(cmd.q)) - if err != nil { - return err - } - cmd.locations = v.([]GeoLocation) - return nil -} - -func newGeoLocationSliceParser(q *GeoRadiusQuery) proto.MultiBulkParse { - return func(rd *proto.Reader, n int64) (interface{}, error) { - locs := make([]GeoLocation, 0, n) - for i := int64(0); i < n; i++ { - v, err := rd.ReadReply(newGeoLocationParser(q)) - if err != nil { - return nil, err - } - switch vv := v.(type) { - case string: - locs = append(locs, GeoLocation{ - Name: vv, - }) - case *GeoLocation: - // TODO: avoid copying - locs = append(locs, *vv) - default: - return nil, fmt.Errorf("got %T, expected string or *GeoLocation", v) - } - } - return locs, nil - } -} - -func newGeoLocationParser(q *GeoRadiusQuery) proto.MultiBulkParse { - return func(rd *proto.Reader, n int64) (interface{}, error) { - var loc GeoLocation - var err error - - loc.Name, err = rd.ReadString() - if err != nil { - return nil, err - } - if q.WithDist { - loc.Dist, err = rd.ReadFloatReply() - if err != nil { - return nil, err - } - } - if q.WithGeoHash { - loc.GeoHash, err = rd.ReadIntReply() - if err != nil { - return nil, err - } - } - if q.WithCoord { - n, err := rd.ReadArrayLen() - if err != nil { - return nil, err - } - if n != 2 { - return nil, fmt.Errorf("got %d coordinates, expected 2", n) - } - - loc.Longitude, err = rd.ReadFloatReply() - if err != nil { - return nil, err - } - loc.Latitude, err = rd.ReadFloatReply() - if err != nil { - return nil, err - } - } - - return &loc, nil - } -} - -//------------------------------------------------------------------------------ - -type GeoPos struct { - Longitude, Latitude float64 -} - -type GeoPosCmd struct { - baseCmd - - val []*GeoPos -} - -var _ Cmder = (*GeoPosCmd)(nil) - -func NewGeoPosCmd(ctx context.Context, args ...interface{}) *GeoPosCmd { - return &GeoPosCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *GeoPosCmd) Val() []*GeoPos { - return cmd.val -} - -func (cmd *GeoPosCmd) Result() ([]*GeoPos, error) { - return cmd.Val(), cmd.Err() -} - -func (cmd *GeoPosCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *GeoPosCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]*GeoPos, n) - for i := 0; i < len(cmd.val); i++ { - i := i - _, err := rd.ReadReply(func(rd *proto.Reader, n int64) (interface{}, error) { - longitude, err := rd.ReadFloatReply() - if err != nil { - return nil, err - } - - latitude, err := rd.ReadFloatReply() - if err != nil { - return nil, err - } - - cmd.val[i] = &GeoPos{ - Longitude: longitude, - Latitude: latitude, - } - return nil, nil - }) - if err != nil { - if err == Nil { - cmd.val[i] = nil - continue - } - return nil, err - } - } - return nil, nil - }) - return err -} - -//------------------------------------------------------------------------------ - -type CommandInfo struct { - Name string - Arity int8 - Flags []string - ACLFlags []string - FirstKeyPos int8 - LastKeyPos int8 - StepCount int8 - ReadOnly bool -} - -type CommandsInfoCmd struct { - baseCmd - - val map[string]*CommandInfo -} - -var _ Cmder = (*CommandsInfoCmd)(nil) - -func NewCommandsInfoCmd(ctx context.Context, args ...interface{}) *CommandsInfoCmd { - return &CommandsInfoCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *CommandsInfoCmd) Val() map[string]*CommandInfo { - return cmd.val -} - -func (cmd *CommandsInfoCmd) Result() (map[string]*CommandInfo, error) { - return cmd.Val(), cmd.Err() -} - -func (cmd *CommandsInfoCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *CommandsInfoCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make(map[string]*CommandInfo, n) - for i := int64(0); i < n; i++ { - v, err := rd.ReadReply(commandInfoParser) - if err != nil { - return nil, err - } - vv := v.(*CommandInfo) - cmd.val[vv.Name] = vv - } - return nil, nil - }) - return err -} - -func commandInfoParser(rd *proto.Reader, n int64) (interface{}, error) { - const numArgRedis5 = 6 - const numArgRedis6 = 7 - - switch n { - case numArgRedis5, numArgRedis6: - // continue - default: - return nil, fmt.Errorf("redis: got %d elements in COMMAND reply, wanted 7", n) - } - - var cmd CommandInfo - var err error - - cmd.Name, err = rd.ReadString() - if err != nil { - return nil, err - } - - arity, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - cmd.Arity = int8(arity) - - _, err = rd.ReadReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.Flags = make([]string, n) - for i := 0; i < len(cmd.Flags); i++ { - switch s, err := rd.ReadString(); { - case err == Nil: - cmd.Flags[i] = "" - case err != nil: - return nil, err - default: - cmd.Flags[i] = s - } - } - return nil, nil - }) - if err != nil { - return nil, err - } - - firstKeyPos, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - cmd.FirstKeyPos = int8(firstKeyPos) - - lastKeyPos, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - cmd.LastKeyPos = int8(lastKeyPos) - - stepCount, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - cmd.StepCount = int8(stepCount) - - for _, flag := range cmd.Flags { - if flag == "readonly" { - cmd.ReadOnly = true - break - } - } - - if n == numArgRedis5 { - return &cmd, nil - } - - _, err = rd.ReadReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.ACLFlags = make([]string, n) - for i := 0; i < len(cmd.ACLFlags); i++ { - switch s, err := rd.ReadString(); { - case err == Nil: - cmd.ACLFlags[i] = "" - case err != nil: - return nil, err - default: - cmd.ACLFlags[i] = s - } - } - return nil, nil - }) - if err != nil { - return nil, err - } - - return &cmd, nil -} - -//------------------------------------------------------------------------------ - -type cmdsInfoCache struct { - fn func(ctx context.Context) (map[string]*CommandInfo, error) - - once internal.Once - cmds map[string]*CommandInfo -} - -func newCmdsInfoCache(fn func(ctx context.Context) (map[string]*CommandInfo, error)) *cmdsInfoCache { - return &cmdsInfoCache{ - fn: fn, - } -} - -func (c *cmdsInfoCache) Get(ctx context.Context) (map[string]*CommandInfo, error) { - err := c.once.Do(func() error { - cmds, err := c.fn(ctx) - if err != nil { - return err - } - - // Extensions have cmd names in upper case. Convert them to lower case. - for k, v := range cmds { - lower := internal.ToLower(k) - if lower != k { - cmds[lower] = v - } - } - - c.cmds = cmds - return nil - }) - return c.cmds, err -} - -//------------------------------------------------------------------------------ - -type SlowLog struct { - ID int64 - Time time.Time - Duration time.Duration - Args []string - // These are also optional fields emitted only by Redis 4.0 or greater: - // https://redis.io/commands/slowlog#output-format - ClientAddr string - ClientName string -} - -type SlowLogCmd struct { - baseCmd - - val []SlowLog -} - -var _ Cmder = (*SlowLogCmd)(nil) - -func NewSlowLogCmd(ctx context.Context, args ...interface{}) *SlowLogCmd { - return &SlowLogCmd{ - baseCmd: baseCmd{ - ctx: ctx, - args: args, - }, - } -} - -func (cmd *SlowLogCmd) Val() []SlowLog { - return cmd.val -} - -func (cmd *SlowLogCmd) Result() ([]SlowLog, error) { - return cmd.Val(), cmd.Err() -} - -func (cmd *SlowLogCmd) String() string { - return cmdString(cmd, cmd.val) -} - -func (cmd *SlowLogCmd) readReply(rd *proto.Reader) error { - _, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) { - cmd.val = make([]SlowLog, n) - for i := 0; i < len(cmd.val); i++ { - n, err := rd.ReadArrayLen() - if err != nil { - return nil, err - } - if n < 4 { - err := fmt.Errorf("redis: got %d elements in slowlog get, expected at least 4", n) - return nil, err - } - - id, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - - createdAt, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - createdAtTime := time.Unix(createdAt, 0) - - costs, err := rd.ReadIntReply() - if err != nil { - return nil, err - } - costsDuration := time.Duration(costs) * time.Microsecond - - cmdLen, err := rd.ReadArrayLen() - if err != nil { - return nil, err - } - if cmdLen < 1 { - err := fmt.Errorf("redis: got %d elements commands reply in slowlog get, expected at least 1", cmdLen) - return nil, err - } - - cmdString := make([]string, cmdLen) - for i := 0; i < cmdLen; i++ { - cmdString[i], err = rd.ReadString() - if err != nil { - return nil, err - } - } - - var address, name string - for i := 4; i < n; i++ { - str, err := rd.ReadString() - if err != nil { - return nil, err - } - if i == 4 { - address = str - } else if i == 5 { - name = str - } - } - - cmd.val[i] = SlowLog{ - ID: id, - Time: createdAtTime, - Duration: costsDuration, - Args: cmdString, - ClientAddr: address, - ClientName: name, - } - } - return nil, nil - }) - return err -} diff --git a/vendor/github.com/go-redis/redis/v8/commands.go b/vendor/github.com/go-redis/redis/v8/commands.go deleted file mode 100644 index f4e6afc1475..00000000000 --- a/vendor/github.com/go-redis/redis/v8/commands.go +++ /dev/null @@ -1,2732 +0,0 @@ -package redis - -import ( - "context" - "errors" - "io" - "time" - - "github.com/go-redis/redis/v8/internal" -) - -// KeepTTL is an option for Set command to keep key's existing TTL. -// For example: -// -// rdb.Set(ctx, key, value, redis.KeepTTL) -const KeepTTL = -1 - -func usePrecise(dur time.Duration) bool { - return dur < time.Second || dur%time.Second != 0 -} - -func formatMs(ctx context.Context, dur time.Duration) int64 { - if dur > 0 && dur < time.Millisecond { - internal.Logger.Printf( - ctx, - "specified duration is %s, but minimal supported value is %s - truncating to 1ms", - dur, time.Millisecond, - ) - return 1 - } - return int64(dur / time.Millisecond) -} - -func formatSec(ctx context.Context, dur time.Duration) int64 { - if dur > 0 && dur < time.Second { - internal.Logger.Printf( - ctx, - "specified duration is %s, but minimal supported value is %s - truncating to 1s", - dur, time.Second, - ) - return 1 - } - return int64(dur / time.Second) -} - -func appendArgs(dst, src []interface{}) []interface{} { - if len(src) == 1 { - return appendArg(dst, src[0]) - } - - dst = append(dst, src...) - return dst -} - -func appendArg(dst []interface{}, arg interface{}) []interface{} { - switch arg := arg.(type) { - case []string: - for _, s := range arg { - dst = append(dst, s) - } - return dst - case []interface{}: - dst = append(dst, arg...) - return dst - case map[string]interface{}: - for k, v := range arg { - dst = append(dst, k, v) - } - return dst - default: - return append(dst, arg) - } -} - -type Cmdable interface { - Pipeline() Pipeliner - Pipelined(ctx context.Context, fn func(Pipeliner) error) ([]Cmder, error) - - TxPipelined(ctx context.Context, fn func(Pipeliner) error) ([]Cmder, error) - TxPipeline() Pipeliner - - Command(ctx context.Context) *CommandsInfoCmd - ClientGetName(ctx context.Context) *StringCmd - Echo(ctx context.Context, message interface{}) *StringCmd - Ping(ctx context.Context) *StatusCmd - Quit(ctx context.Context) *StatusCmd - Del(ctx context.Context, keys ...string) *IntCmd - Unlink(ctx context.Context, keys ...string) *IntCmd - Dump(ctx context.Context, key string) *StringCmd - Exists(ctx context.Context, keys ...string) *IntCmd - Expire(ctx context.Context, key string, expiration time.Duration) *BoolCmd - ExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd - Keys(ctx context.Context, pattern string) *StringSliceCmd - Migrate(ctx context.Context, host, port, key string, db int, timeout time.Duration) *StatusCmd - Move(ctx context.Context, key string, db int) *BoolCmd - ObjectRefCount(ctx context.Context, key string) *IntCmd - ObjectEncoding(ctx context.Context, key string) *StringCmd - ObjectIdleTime(ctx context.Context, key string) *DurationCmd - Persist(ctx context.Context, key string) *BoolCmd - PExpire(ctx context.Context, key string, expiration time.Duration) *BoolCmd - PExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd - PTTL(ctx context.Context, key string) *DurationCmd - RandomKey(ctx context.Context) *StringCmd - Rename(ctx context.Context, key, newkey string) *StatusCmd - RenameNX(ctx context.Context, key, newkey string) *BoolCmd - Restore(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd - RestoreReplace(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd - Sort(ctx context.Context, key string, sort *Sort) *StringSliceCmd - SortStore(ctx context.Context, key, store string, sort *Sort) *IntCmd - SortInterfaces(ctx context.Context, key string, sort *Sort) *SliceCmd - Touch(ctx context.Context, keys ...string) *IntCmd - TTL(ctx context.Context, key string) *DurationCmd - Type(ctx context.Context, key string) *StatusCmd - Append(ctx context.Context, key, value string) *IntCmd - Decr(ctx context.Context, key string) *IntCmd - DecrBy(ctx context.Context, key string, decrement int64) *IntCmd - Get(ctx context.Context, key string) *StringCmd - GetRange(ctx context.Context, key string, start, end int64) *StringCmd - GetSet(ctx context.Context, key string, value interface{}) *StringCmd - Incr(ctx context.Context, key string) *IntCmd - IncrBy(ctx context.Context, key string, value int64) *IntCmd - IncrByFloat(ctx context.Context, key string, value float64) *FloatCmd - MGet(ctx context.Context, keys ...string) *SliceCmd - MSet(ctx context.Context, values ...interface{}) *StatusCmd - MSetNX(ctx context.Context, values ...interface{}) *BoolCmd - Set(ctx context.Context, key string, value interface{}, expiration time.Duration) *StatusCmd - SetNX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd - SetXX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd - SetRange(ctx context.Context, key string, offset int64, value string) *IntCmd - StrLen(ctx context.Context, key string) *IntCmd - - GetBit(ctx context.Context, key string, offset int64) *IntCmd - SetBit(ctx context.Context, key string, offset int64, value int) *IntCmd - BitCount(ctx context.Context, key string, bitCount *BitCount) *IntCmd - BitOpAnd(ctx context.Context, destKey string, keys ...string) *IntCmd - BitOpOr(ctx context.Context, destKey string, keys ...string) *IntCmd - BitOpXor(ctx context.Context, destKey string, keys ...string) *IntCmd - BitOpNot(ctx context.Context, destKey string, key string) *IntCmd - BitPos(ctx context.Context, key string, bit int64, pos ...int64) *IntCmd - BitField(ctx context.Context, key string, args ...interface{}) *IntSliceCmd - - Scan(ctx context.Context, cursor uint64, match string, count int64) *ScanCmd - SScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd - HScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd - ZScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd - - HDel(ctx context.Context, key string, fields ...string) *IntCmd - HExists(ctx context.Context, key, field string) *BoolCmd - HGet(ctx context.Context, key, field string) *StringCmd - HGetAll(ctx context.Context, key string) *StringStringMapCmd - HIncrBy(ctx context.Context, key, field string, incr int64) *IntCmd - HIncrByFloat(ctx context.Context, key, field string, incr float64) *FloatCmd - HKeys(ctx context.Context, key string) *StringSliceCmd - HLen(ctx context.Context, key string) *IntCmd - HMGet(ctx context.Context, key string, fields ...string) *SliceCmd - HSet(ctx context.Context, key string, values ...interface{}) *IntCmd - HMSet(ctx context.Context, key string, values ...interface{}) *BoolCmd - HSetNX(ctx context.Context, key, field string, value interface{}) *BoolCmd - HVals(ctx context.Context, key string) *StringSliceCmd - - BLPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd - BRPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd - BRPopLPush(ctx context.Context, source, destination string, timeout time.Duration) *StringCmd - LIndex(ctx context.Context, key string, index int64) *StringCmd - LInsert(ctx context.Context, key, op string, pivot, value interface{}) *IntCmd - LInsertBefore(ctx context.Context, key string, pivot, value interface{}) *IntCmd - LInsertAfter(ctx context.Context, key string, pivot, value interface{}) *IntCmd - LLen(ctx context.Context, key string) *IntCmd - LPop(ctx context.Context, key string) *StringCmd - LPush(ctx context.Context, key string, values ...interface{}) *IntCmd - LPushX(ctx context.Context, key string, values ...interface{}) *IntCmd - LRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd - LRem(ctx context.Context, key string, count int64, value interface{}) *IntCmd - LSet(ctx context.Context, key string, index int64, value interface{}) *StatusCmd - LTrim(ctx context.Context, key string, start, stop int64) *StatusCmd - RPop(ctx context.Context, key string) *StringCmd - RPopLPush(ctx context.Context, source, destination string) *StringCmd - RPush(ctx context.Context, key string, values ...interface{}) *IntCmd - RPushX(ctx context.Context, key string, values ...interface{}) *IntCmd - - SAdd(ctx context.Context, key string, members ...interface{}) *IntCmd - SCard(ctx context.Context, key string) *IntCmd - SDiff(ctx context.Context, keys ...string) *StringSliceCmd - SDiffStore(ctx context.Context, destination string, keys ...string) *IntCmd - SInter(ctx context.Context, keys ...string) *StringSliceCmd - SInterStore(ctx context.Context, destination string, keys ...string) *IntCmd - SIsMember(ctx context.Context, key string, member interface{}) *BoolCmd - SMembers(ctx context.Context, key string) *StringSliceCmd - SMembersMap(ctx context.Context, key string) *StringStructMapCmd - SMove(ctx context.Context, source, destination string, member interface{}) *BoolCmd - SPop(ctx context.Context, key string) *StringCmd - SPopN(ctx context.Context, key string, count int64) *StringSliceCmd - SRandMember(ctx context.Context, key string) *StringCmd - SRandMemberN(ctx context.Context, key string, count int64) *StringSliceCmd - SRem(ctx context.Context, key string, members ...interface{}) *IntCmd - SUnion(ctx context.Context, keys ...string) *StringSliceCmd - SUnionStore(ctx context.Context, destination string, keys ...string) *IntCmd - - XAdd(ctx context.Context, a *XAddArgs) *StringCmd - XDel(ctx context.Context, stream string, ids ...string) *IntCmd - XLen(ctx context.Context, stream string) *IntCmd - XRange(ctx context.Context, stream, start, stop string) *XMessageSliceCmd - XRangeN(ctx context.Context, stream, start, stop string, count int64) *XMessageSliceCmd - XRevRange(ctx context.Context, stream string, start, stop string) *XMessageSliceCmd - XRevRangeN(ctx context.Context, stream string, start, stop string, count int64) *XMessageSliceCmd - XRead(ctx context.Context, a *XReadArgs) *XStreamSliceCmd - XReadStreams(ctx context.Context, streams ...string) *XStreamSliceCmd - XGroupCreate(ctx context.Context, stream, group, start string) *StatusCmd - XGroupCreateMkStream(ctx context.Context, stream, group, start string) *StatusCmd - XGroupSetID(ctx context.Context, stream, group, start string) *StatusCmd - XGroupDestroy(ctx context.Context, stream, group string) *IntCmd - XGroupDelConsumer(ctx context.Context, stream, group, consumer string) *IntCmd - XReadGroup(ctx context.Context, a *XReadGroupArgs) *XStreamSliceCmd - XAck(ctx context.Context, stream, group string, ids ...string) *IntCmd - XPending(ctx context.Context, stream, group string) *XPendingCmd - XPendingExt(ctx context.Context, a *XPendingExtArgs) *XPendingExtCmd - XClaim(ctx context.Context, a *XClaimArgs) *XMessageSliceCmd - XClaimJustID(ctx context.Context, a *XClaimArgs) *StringSliceCmd - XTrim(ctx context.Context, key string, maxLen int64) *IntCmd - XTrimApprox(ctx context.Context, key string, maxLen int64) *IntCmd - XInfoGroups(ctx context.Context, key string) *XInfoGroupsCmd - XInfoStream(ctx context.Context, key string) *XInfoStreamCmd - - BZPopMax(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd - BZPopMin(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd - ZAdd(ctx context.Context, key string, members ...*Z) *IntCmd - ZAddNX(ctx context.Context, key string, members ...*Z) *IntCmd - ZAddXX(ctx context.Context, key string, members ...*Z) *IntCmd - ZAddCh(ctx context.Context, key string, members ...*Z) *IntCmd - ZAddNXCh(ctx context.Context, key string, members ...*Z) *IntCmd - ZAddXXCh(ctx context.Context, key string, members ...*Z) *IntCmd - ZIncr(ctx context.Context, key string, member *Z) *FloatCmd - ZIncrNX(ctx context.Context, key string, member *Z) *FloatCmd - ZIncrXX(ctx context.Context, key string, member *Z) *FloatCmd - ZCard(ctx context.Context, key string) *IntCmd - ZCount(ctx context.Context, key, min, max string) *IntCmd - ZLexCount(ctx context.Context, key, min, max string) *IntCmd - ZIncrBy(ctx context.Context, key string, increment float64, member string) *FloatCmd - ZInterStore(ctx context.Context, destination string, store *ZStore) *IntCmd - ZPopMax(ctx context.Context, key string, count ...int64) *ZSliceCmd - ZPopMin(ctx context.Context, key string, count ...int64) *ZSliceCmd - ZRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd - ZRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd - ZRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd - ZRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd - ZRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd - ZRank(ctx context.Context, key, member string) *IntCmd - ZRem(ctx context.Context, key string, members ...interface{}) *IntCmd - ZRemRangeByRank(ctx context.Context, key string, start, stop int64) *IntCmd - ZRemRangeByScore(ctx context.Context, key, min, max string) *IntCmd - ZRemRangeByLex(ctx context.Context, key, min, max string) *IntCmd - ZRevRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd - ZRevRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd - ZRevRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd - ZRevRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd - ZRevRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd - ZRevRank(ctx context.Context, key, member string) *IntCmd - ZScore(ctx context.Context, key, member string) *FloatCmd - ZUnionStore(ctx context.Context, dest string, store *ZStore) *IntCmd - - PFAdd(ctx context.Context, key string, els ...interface{}) *IntCmd - PFCount(ctx context.Context, keys ...string) *IntCmd - PFMerge(ctx context.Context, dest string, keys ...string) *StatusCmd - - BgRewriteAOF(ctx context.Context) *StatusCmd - BgSave(ctx context.Context) *StatusCmd - ClientKill(ctx context.Context, ipPort string) *StatusCmd - ClientKillByFilter(ctx context.Context, keys ...string) *IntCmd - ClientList(ctx context.Context) *StringCmd - ClientPause(ctx context.Context, dur time.Duration) *BoolCmd - ClientID(ctx context.Context) *IntCmd - ConfigGet(ctx context.Context, parameter string) *SliceCmd - ConfigResetStat(ctx context.Context) *StatusCmd - ConfigSet(ctx context.Context, parameter, value string) *StatusCmd - ConfigRewrite(ctx context.Context) *StatusCmd - DBSize(ctx context.Context) *IntCmd - FlushAll(ctx context.Context) *StatusCmd - FlushAllAsync(ctx context.Context) *StatusCmd - FlushDB(ctx context.Context) *StatusCmd - FlushDBAsync(ctx context.Context) *StatusCmd - Info(ctx context.Context, section ...string) *StringCmd - LastSave(ctx context.Context) *IntCmd - Save(ctx context.Context) *StatusCmd - Shutdown(ctx context.Context) *StatusCmd - ShutdownSave(ctx context.Context) *StatusCmd - ShutdownNoSave(ctx context.Context) *StatusCmd - SlaveOf(ctx context.Context, host, port string) *StatusCmd - Time(ctx context.Context) *TimeCmd - DebugObject(ctx context.Context, key string) *StringCmd - ReadOnly(ctx context.Context) *StatusCmd - ReadWrite(ctx context.Context) *StatusCmd - MemoryUsage(ctx context.Context, key string, samples ...int) *IntCmd - - Eval(ctx context.Context, script string, keys []string, args ...interface{}) *Cmd - EvalSha(ctx context.Context, sha1 string, keys []string, args ...interface{}) *Cmd - ScriptExists(ctx context.Context, hashes ...string) *BoolSliceCmd - ScriptFlush(ctx context.Context) *StatusCmd - ScriptKill(ctx context.Context) *StatusCmd - ScriptLoad(ctx context.Context, script string) *StringCmd - - Publish(ctx context.Context, channel string, message interface{}) *IntCmd - PubSubChannels(ctx context.Context, pattern string) *StringSliceCmd - PubSubNumSub(ctx context.Context, channels ...string) *StringIntMapCmd - PubSubNumPat(ctx context.Context) *IntCmd - - ClusterSlots(ctx context.Context) *ClusterSlotsCmd - ClusterNodes(ctx context.Context) *StringCmd - ClusterMeet(ctx context.Context, host, port string) *StatusCmd - ClusterForget(ctx context.Context, nodeID string) *StatusCmd - ClusterReplicate(ctx context.Context, nodeID string) *StatusCmd - ClusterResetSoft(ctx context.Context) *StatusCmd - ClusterResetHard(ctx context.Context) *StatusCmd - ClusterInfo(ctx context.Context) *StringCmd - ClusterKeySlot(ctx context.Context, key string) *IntCmd - ClusterGetKeysInSlot(ctx context.Context, slot int, count int) *StringSliceCmd - ClusterCountFailureReports(ctx context.Context, nodeID string) *IntCmd - ClusterCountKeysInSlot(ctx context.Context, slot int) *IntCmd - ClusterDelSlots(ctx context.Context, slots ...int) *StatusCmd - ClusterDelSlotsRange(ctx context.Context, min, max int) *StatusCmd - ClusterSaveConfig(ctx context.Context) *StatusCmd - ClusterSlaves(ctx context.Context, nodeID string) *StringSliceCmd - ClusterFailover(ctx context.Context) *StatusCmd - ClusterAddSlots(ctx context.Context, slots ...int) *StatusCmd - ClusterAddSlotsRange(ctx context.Context, min, max int) *StatusCmd - - GeoAdd(ctx context.Context, key string, geoLocation ...*GeoLocation) *IntCmd - GeoPos(ctx context.Context, key string, members ...string) *GeoPosCmd - GeoRadius(ctx context.Context, key string, longitude, latitude float64, query *GeoRadiusQuery) *GeoLocationCmd - GeoRadiusStore(ctx context.Context, key string, longitude, latitude float64, query *GeoRadiusQuery) *IntCmd - GeoRadiusByMember(ctx context.Context, key, member string, query *GeoRadiusQuery) *GeoLocationCmd - GeoRadiusByMemberStore(ctx context.Context, key, member string, query *GeoRadiusQuery) *IntCmd - GeoDist(ctx context.Context, key string, member1, member2, unit string) *FloatCmd - GeoHash(ctx context.Context, key string, members ...string) *StringSliceCmd -} - -type StatefulCmdable interface { - Cmdable - Auth(ctx context.Context, password string) *StatusCmd - AuthACL(ctx context.Context, username, password string) *StatusCmd - Select(ctx context.Context, index int) *StatusCmd - SwapDB(ctx context.Context, index1, index2 int) *StatusCmd - ClientSetName(ctx context.Context, name string) *BoolCmd -} - -var ( - _ Cmdable = (*Client)(nil) - _ Cmdable = (*Tx)(nil) - _ Cmdable = (*Ring)(nil) - _ Cmdable = (*ClusterClient)(nil) -) - -type cmdable func(ctx context.Context, cmd Cmder) error - -type statefulCmdable func(ctx context.Context, cmd Cmder) error - -//------------------------------------------------------------------------------ - -func (c statefulCmdable) Auth(ctx context.Context, password string) *StatusCmd { - cmd := NewStatusCmd(ctx, "auth", password) - _ = c(ctx, cmd) - return cmd -} - -// Perform an AUTH command, using the given user and pass. -// Should be used to authenticate the current connection with one of the connections defined in the ACL list -// when connecting to a Redis 6.0 instance, or greater, that is using the Redis ACL system. -func (c statefulCmdable) AuthACL(ctx context.Context, username, password string) *StatusCmd { - cmd := NewStatusCmd(ctx, "auth", username, password) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Wait(ctx context.Context, numSlaves int, timeout time.Duration) *IntCmd { - cmd := NewIntCmd(ctx, "wait", numSlaves, int(timeout/time.Millisecond)) - _ = c(ctx, cmd) - return cmd -} - -func (c statefulCmdable) Select(ctx context.Context, index int) *StatusCmd { - cmd := NewStatusCmd(ctx, "select", index) - _ = c(ctx, cmd) - return cmd -} - -func (c statefulCmdable) SwapDB(ctx context.Context, index1, index2 int) *StatusCmd { - cmd := NewStatusCmd(ctx, "swapdb", index1, index2) - _ = c(ctx, cmd) - return cmd -} - -// ClientSetName assigns a name to the connection. -func (c statefulCmdable) ClientSetName(ctx context.Context, name string) *BoolCmd { - cmd := NewBoolCmd(ctx, "client", "setname", name) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) Command(ctx context.Context) *CommandsInfoCmd { - cmd := NewCommandsInfoCmd(ctx, "command") - _ = c(ctx, cmd) - return cmd -} - -// ClientGetName returns the name of the connection. -func (c cmdable) ClientGetName(ctx context.Context) *StringCmd { - cmd := NewStringCmd(ctx, "client", "getname") - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Echo(ctx context.Context, message interface{}) *StringCmd { - cmd := NewStringCmd(ctx, "echo", message) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Ping(ctx context.Context) *StatusCmd { - cmd := NewStatusCmd(ctx, "ping") - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Quit(ctx context.Context) *StatusCmd { - panic("not implemented") -} - -func (c cmdable) Del(ctx context.Context, keys ...string) *IntCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "del" - for i, key := range keys { - args[1+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Unlink(ctx context.Context, keys ...string) *IntCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "unlink" - for i, key := range keys { - args[1+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Dump(ctx context.Context, key string) *StringCmd { - cmd := NewStringCmd(ctx, "dump", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Exists(ctx context.Context, keys ...string) *IntCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "exists" - for i, key := range keys { - args[1+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Expire(ctx context.Context, key string, expiration time.Duration) *BoolCmd { - cmd := NewBoolCmd(ctx, "expire", key, formatSec(ctx, expiration)) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd { - cmd := NewBoolCmd(ctx, "expireat", key, tm.Unix()) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Keys(ctx context.Context, pattern string) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, "keys", pattern) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Migrate(ctx context.Context, host, port, key string, db int, timeout time.Duration) *StatusCmd { - cmd := NewStatusCmd( - ctx, - "migrate", - host, - port, - key, - db, - formatMs(ctx, timeout), - ) - cmd.setReadTimeout(timeout) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Move(ctx context.Context, key string, db int) *BoolCmd { - cmd := NewBoolCmd(ctx, "move", key, db) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ObjectRefCount(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "object", "refcount", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ObjectEncoding(ctx context.Context, key string) *StringCmd { - cmd := NewStringCmd(ctx, "object", "encoding", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ObjectIdleTime(ctx context.Context, key string) *DurationCmd { - cmd := NewDurationCmd(ctx, time.Second, "object", "idletime", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Persist(ctx context.Context, key string) *BoolCmd { - cmd := NewBoolCmd(ctx, "persist", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) PExpire(ctx context.Context, key string, expiration time.Duration) *BoolCmd { - cmd := NewBoolCmd(ctx, "pexpire", key, formatMs(ctx, expiration)) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) PExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd { - cmd := NewBoolCmd( - ctx, - "pexpireat", - key, - tm.UnixNano()/int64(time.Millisecond), - ) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) PTTL(ctx context.Context, key string) *DurationCmd { - cmd := NewDurationCmd(ctx, time.Millisecond, "pttl", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) RandomKey(ctx context.Context) *StringCmd { - cmd := NewStringCmd(ctx, "randomkey") - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Rename(ctx context.Context, key, newkey string) *StatusCmd { - cmd := NewStatusCmd(ctx, "rename", key, newkey) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) RenameNX(ctx context.Context, key, newkey string) *BoolCmd { - cmd := NewBoolCmd(ctx, "renamenx", key, newkey) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Restore(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd { - cmd := NewStatusCmd( - ctx, - "restore", - key, - formatMs(ctx, ttl), - value, - ) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) RestoreReplace(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd { - cmd := NewStatusCmd( - ctx, - "restore", - key, - formatMs(ctx, ttl), - value, - "replace", - ) - _ = c(ctx, cmd) - return cmd -} - -type Sort struct { - By string - Offset, Count int64 - Get []string - Order string - Alpha bool -} - -func (sort *Sort) args(key string) []interface{} { - args := []interface{}{"sort", key} - if sort.By != "" { - args = append(args, "by", sort.By) - } - if sort.Offset != 0 || sort.Count != 0 { - args = append(args, "limit", sort.Offset, sort.Count) - } - for _, get := range sort.Get { - args = append(args, "get", get) - } - if sort.Order != "" { - args = append(args, sort.Order) - } - if sort.Alpha { - args = append(args, "alpha") - } - return args -} - -func (c cmdable) Sort(ctx context.Context, key string, sort *Sort) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, sort.args(key)...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SortStore(ctx context.Context, key, store string, sort *Sort) *IntCmd { - args := sort.args(key) - if store != "" { - args = append(args, "store", store) - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SortInterfaces(ctx context.Context, key string, sort *Sort) *SliceCmd { - cmd := NewSliceCmd(ctx, sort.args(key)...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Touch(ctx context.Context, keys ...string) *IntCmd { - args := make([]interface{}, len(keys)+1) - args[0] = "touch" - for i, key := range keys { - args[i+1] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) TTL(ctx context.Context, key string) *DurationCmd { - cmd := NewDurationCmd(ctx, time.Second, "ttl", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Type(ctx context.Context, key string) *StatusCmd { - cmd := NewStatusCmd(ctx, "type", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Append(ctx context.Context, key, value string) *IntCmd { - cmd := NewIntCmd(ctx, "append", key, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Decr(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "decr", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) DecrBy(ctx context.Context, key string, decrement int64) *IntCmd { - cmd := NewIntCmd(ctx, "decrby", key, decrement) - _ = c(ctx, cmd) - return cmd -} - -// Redis `GET key` command. It returns redis.Nil error when key does not exist. -func (c cmdable) Get(ctx context.Context, key string) *StringCmd { - cmd := NewStringCmd(ctx, "get", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) GetRange(ctx context.Context, key string, start, end int64) *StringCmd { - cmd := NewStringCmd(ctx, "getrange", key, start, end) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) GetSet(ctx context.Context, key string, value interface{}) *StringCmd { - cmd := NewStringCmd(ctx, "getset", key, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) Incr(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "incr", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) IncrBy(ctx context.Context, key string, value int64) *IntCmd { - cmd := NewIntCmd(ctx, "incrby", key, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) IncrByFloat(ctx context.Context, key string, value float64) *FloatCmd { - cmd := NewFloatCmd(ctx, "incrbyfloat", key, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) MGet(ctx context.Context, keys ...string) *SliceCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "mget" - for i, key := range keys { - args[1+i] = key - } - cmd := NewSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -// MSet is like Set but accepts multiple values: -// - MSet("key1", "value1", "key2", "value2") -// - MSet([]string{"key1", "value1", "key2", "value2"}) -// - MSet(map[string]interface{}{"key1": "value1", "key2": "value2"}) -func (c cmdable) MSet(ctx context.Context, values ...interface{}) *StatusCmd { - args := make([]interface{}, 1, 1+len(values)) - args[0] = "mset" - args = appendArgs(args, values) - cmd := NewStatusCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -// MSetNX is like SetNX but accepts multiple values: -// - MSetNX("key1", "value1", "key2", "value2") -// - MSetNX([]string{"key1", "value1", "key2", "value2"}) -// - MSetNX(map[string]interface{}{"key1": "value1", "key2": "value2"}) -func (c cmdable) MSetNX(ctx context.Context, values ...interface{}) *BoolCmd { - args := make([]interface{}, 1, 1+len(values)) - args[0] = "msetnx" - args = appendArgs(args, values) - cmd := NewBoolCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SET key value [expiration]` command. -// Use expiration for `SETEX`-like behavior. -// -// Zero expiration means the key has no expiration time. -// KeepTTL(-1) expiration is a Redis KEEPTTL option to keep existing TTL. -func (c cmdable) Set(ctx context.Context, key string, value interface{}, expiration time.Duration) *StatusCmd { - args := make([]interface{}, 3, 5) - args[0] = "set" - args[1] = key - args[2] = value - if expiration > 0 { - if usePrecise(expiration) { - args = append(args, "px", formatMs(ctx, expiration)) - } else { - args = append(args, "ex", formatSec(ctx, expiration)) - } - } else if expiration == KeepTTL { - args = append(args, "keepttl") - } - - cmd := NewStatusCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SET key value [expiration] NX` command. -// -// Zero expiration means the key has no expiration time. -// KeepTTL(-1) expiration is a Redis KEEPTTL option to keep existing TTL. -func (c cmdable) SetNX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd { - var cmd *BoolCmd - switch expiration { - case 0: - // Use old `SETNX` to support old Redis versions. - cmd = NewBoolCmd(ctx, "setnx", key, value) - case KeepTTL: - cmd = NewBoolCmd(ctx, "set", key, value, "keepttl", "nx") - default: - if usePrecise(expiration) { - cmd = NewBoolCmd(ctx, "set", key, value, "px", formatMs(ctx, expiration), "nx") - } else { - cmd = NewBoolCmd(ctx, "set", key, value, "ex", formatSec(ctx, expiration), "nx") - } - } - - _ = c(ctx, cmd) - return cmd -} - -// Redis `SET key value [expiration] XX` command. -// -// Zero expiration means the key has no expiration time. -// KeepTTL(-1) expiration is a Redis KEEPTTL option to keep existing TTL. -func (c cmdable) SetXX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd { - var cmd *BoolCmd - switch expiration { - case 0: - cmd = NewBoolCmd(ctx, "set", key, value, "xx") - case KeepTTL: - cmd = NewBoolCmd(ctx, "set", key, value, "keepttl", "xx") - default: - if usePrecise(expiration) { - cmd = NewBoolCmd(ctx, "set", key, value, "px", formatMs(ctx, expiration), "xx") - } else { - cmd = NewBoolCmd(ctx, "set", key, value, "ex", formatSec(ctx, expiration), "xx") - } - } - - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SetRange(ctx context.Context, key string, offset int64, value string) *IntCmd { - cmd := NewIntCmd(ctx, "setrange", key, offset, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) StrLen(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "strlen", key) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) GetBit(ctx context.Context, key string, offset int64) *IntCmd { - cmd := NewIntCmd(ctx, "getbit", key, offset) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SetBit(ctx context.Context, key string, offset int64, value int) *IntCmd { - cmd := NewIntCmd( - ctx, - "setbit", - key, - offset, - value, - ) - _ = c(ctx, cmd) - return cmd -} - -type BitCount struct { - Start, End int64 -} - -func (c cmdable) BitCount(ctx context.Context, key string, bitCount *BitCount) *IntCmd { - args := []interface{}{"bitcount", key} - if bitCount != nil { - args = append( - args, - bitCount.Start, - bitCount.End, - ) - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) bitOp(ctx context.Context, op, destKey string, keys ...string) *IntCmd { - args := make([]interface{}, 3+len(keys)) - args[0] = "bitop" - args[1] = op - args[2] = destKey - for i, key := range keys { - args[3+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) BitOpAnd(ctx context.Context, destKey string, keys ...string) *IntCmd { - return c.bitOp(ctx, "and", destKey, keys...) -} - -func (c cmdable) BitOpOr(ctx context.Context, destKey string, keys ...string) *IntCmd { - return c.bitOp(ctx, "or", destKey, keys...) -} - -func (c cmdable) BitOpXor(ctx context.Context, destKey string, keys ...string) *IntCmd { - return c.bitOp(ctx, "xor", destKey, keys...) -} - -func (c cmdable) BitOpNot(ctx context.Context, destKey string, key string) *IntCmd { - return c.bitOp(ctx, "not", destKey, key) -} - -func (c cmdable) BitPos(ctx context.Context, key string, bit int64, pos ...int64) *IntCmd { - args := make([]interface{}, 3+len(pos)) - args[0] = "bitpos" - args[1] = key - args[2] = bit - switch len(pos) { - case 0: - case 1: - args[3] = pos[0] - case 2: - args[3] = pos[0] - args[4] = pos[1] - default: - panic("too many arguments") - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) BitField(ctx context.Context, key string, args ...interface{}) *IntSliceCmd { - a := make([]interface{}, 0, 2+len(args)) - a = append(a, "bitfield") - a = append(a, key) - a = append(a, args...) - cmd := NewIntSliceCmd(ctx, a...) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) Scan(ctx context.Context, cursor uint64, match string, count int64) *ScanCmd { - args := []interface{}{"scan", cursor} - if match != "" { - args = append(args, "match", match) - } - if count > 0 { - args = append(args, "count", count) - } - cmd := NewScanCmd(ctx, c, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd { - args := []interface{}{"sscan", key, cursor} - if match != "" { - args = append(args, "match", match) - } - if count > 0 { - args = append(args, "count", count) - } - cmd := NewScanCmd(ctx, c, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd { - args := []interface{}{"hscan", key, cursor} - if match != "" { - args = append(args, "match", match) - } - if count > 0 { - args = append(args, "count", count) - } - cmd := NewScanCmd(ctx, c, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd { - args := []interface{}{"zscan", key, cursor} - if match != "" { - args = append(args, "match", match) - } - if count > 0 { - args = append(args, "count", count) - } - cmd := NewScanCmd(ctx, c, args...) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) HDel(ctx context.Context, key string, fields ...string) *IntCmd { - args := make([]interface{}, 2+len(fields)) - args[0] = "hdel" - args[1] = key - for i, field := range fields { - args[2+i] = field - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HExists(ctx context.Context, key, field string) *BoolCmd { - cmd := NewBoolCmd(ctx, "hexists", key, field) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HGet(ctx context.Context, key, field string) *StringCmd { - cmd := NewStringCmd(ctx, "hget", key, field) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HGetAll(ctx context.Context, key string) *StringStringMapCmd { - cmd := NewStringStringMapCmd(ctx, "hgetall", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HIncrBy(ctx context.Context, key, field string, incr int64) *IntCmd { - cmd := NewIntCmd(ctx, "hincrby", key, field, incr) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HIncrByFloat(ctx context.Context, key, field string, incr float64) *FloatCmd { - cmd := NewFloatCmd(ctx, "hincrbyfloat", key, field, incr) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HKeys(ctx context.Context, key string) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, "hkeys", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HLen(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "hlen", key) - _ = c(ctx, cmd) - return cmd -} - -// HMGet returns the values for the specified fields in the hash stored at key. -// It returns an interface{} to distinguish between empty string and nil value. -func (c cmdable) HMGet(ctx context.Context, key string, fields ...string) *SliceCmd { - args := make([]interface{}, 2+len(fields)) - args[0] = "hmget" - args[1] = key - for i, field := range fields { - args[2+i] = field - } - cmd := NewSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -// HSet accepts values in following formats: -// - HSet("myhash", "key1", "value1", "key2", "value2") -// - HSet("myhash", []string{"key1", "value1", "key2", "value2"}) -// - HSet("myhash", map[string]interface{}{"key1": "value1", "key2": "value2"}) -// -// Note that it requires Redis v4 for multiple field/value pairs support. -func (c cmdable) HSet(ctx context.Context, key string, values ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(values)) - args[0] = "hset" - args[1] = key - args = appendArgs(args, values) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -// HMSet is a deprecated version of HSet left for compatibility with Redis 3. -func (c cmdable) HMSet(ctx context.Context, key string, values ...interface{}) *BoolCmd { - args := make([]interface{}, 2, 2+len(values)) - args[0] = "hmset" - args[1] = key - args = appendArgs(args, values) - cmd := NewBoolCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HSetNX(ctx context.Context, key, field string, value interface{}) *BoolCmd { - cmd := NewBoolCmd(ctx, "hsetnx", key, field, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) HVals(ctx context.Context, key string) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, "hvals", key) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) BLPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd { - args := make([]interface{}, 1+len(keys)+1) - args[0] = "blpop" - for i, key := range keys { - args[1+i] = key - } - args[len(args)-1] = formatSec(ctx, timeout) - cmd := NewStringSliceCmd(ctx, args...) - cmd.setReadTimeout(timeout) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) BRPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd { - args := make([]interface{}, 1+len(keys)+1) - args[0] = "brpop" - for i, key := range keys { - args[1+i] = key - } - args[len(keys)+1] = formatSec(ctx, timeout) - cmd := NewStringSliceCmd(ctx, args...) - cmd.setReadTimeout(timeout) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) BRPopLPush(ctx context.Context, source, destination string, timeout time.Duration) *StringCmd { - cmd := NewStringCmd( - ctx, - "brpoplpush", - source, - destination, - formatSec(ctx, timeout), - ) - cmd.setReadTimeout(timeout) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LIndex(ctx context.Context, key string, index int64) *StringCmd { - cmd := NewStringCmd(ctx, "lindex", key, index) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LInsert(ctx context.Context, key, op string, pivot, value interface{}) *IntCmd { - cmd := NewIntCmd(ctx, "linsert", key, op, pivot, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LInsertBefore(ctx context.Context, key string, pivot, value interface{}) *IntCmd { - cmd := NewIntCmd(ctx, "linsert", key, "before", pivot, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LInsertAfter(ctx context.Context, key string, pivot, value interface{}) *IntCmd { - cmd := NewIntCmd(ctx, "linsert", key, "after", pivot, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LLen(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "llen", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LPop(ctx context.Context, key string) *StringCmd { - cmd := NewStringCmd(ctx, "lpop", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LPush(ctx context.Context, key string, values ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(values)) - args[0] = "lpush" - args[1] = key - args = appendArgs(args, values) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LPushX(ctx context.Context, key string, values ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(values)) - args[0] = "lpushx" - args[1] = key - args = appendArgs(args, values) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd { - cmd := NewStringSliceCmd( - ctx, - "lrange", - key, - start, - stop, - ) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LRem(ctx context.Context, key string, count int64, value interface{}) *IntCmd { - cmd := NewIntCmd(ctx, "lrem", key, count, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LSet(ctx context.Context, key string, index int64, value interface{}) *StatusCmd { - cmd := NewStatusCmd(ctx, "lset", key, index, value) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) LTrim(ctx context.Context, key string, start, stop int64) *StatusCmd { - cmd := NewStatusCmd( - ctx, - "ltrim", - key, - start, - stop, - ) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) RPop(ctx context.Context, key string) *StringCmd { - cmd := NewStringCmd(ctx, "rpop", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) RPopLPush(ctx context.Context, source, destination string) *StringCmd { - cmd := NewStringCmd(ctx, "rpoplpush", source, destination) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) RPush(ctx context.Context, key string, values ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(values)) - args[0] = "rpush" - args[1] = key - args = appendArgs(args, values) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) RPushX(ctx context.Context, key string, values ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(values)) - args[0] = "rpushx" - args[1] = key - args = appendArgs(args, values) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) SAdd(ctx context.Context, key string, members ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(members)) - args[0] = "sadd" - args[1] = key - args = appendArgs(args, members) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SCard(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "scard", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SDiff(ctx context.Context, keys ...string) *StringSliceCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "sdiff" - for i, key := range keys { - args[1+i] = key - } - cmd := NewStringSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SDiffStore(ctx context.Context, destination string, keys ...string) *IntCmd { - args := make([]interface{}, 2+len(keys)) - args[0] = "sdiffstore" - args[1] = destination - for i, key := range keys { - args[2+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SInter(ctx context.Context, keys ...string) *StringSliceCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "sinter" - for i, key := range keys { - args[1+i] = key - } - cmd := NewStringSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SInterStore(ctx context.Context, destination string, keys ...string) *IntCmd { - args := make([]interface{}, 2+len(keys)) - args[0] = "sinterstore" - args[1] = destination - for i, key := range keys { - args[2+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SIsMember(ctx context.Context, key string, member interface{}) *BoolCmd { - cmd := NewBoolCmd(ctx, "sismember", key, member) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SMEMBERS key` command output as a slice. -func (c cmdable) SMembers(ctx context.Context, key string) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, "smembers", key) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SMEMBERS key` command output as a map. -func (c cmdable) SMembersMap(ctx context.Context, key string) *StringStructMapCmd { - cmd := NewStringStructMapCmd(ctx, "smembers", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SMove(ctx context.Context, source, destination string, member interface{}) *BoolCmd { - cmd := NewBoolCmd(ctx, "smove", source, destination, member) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SPOP key` command. -func (c cmdable) SPop(ctx context.Context, key string) *StringCmd { - cmd := NewStringCmd(ctx, "spop", key) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SPOP key count` command. -func (c cmdable) SPopN(ctx context.Context, key string, count int64) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, "spop", key, count) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SRANDMEMBER key` command. -func (c cmdable) SRandMember(ctx context.Context, key string) *StringCmd { - cmd := NewStringCmd(ctx, "srandmember", key) - _ = c(ctx, cmd) - return cmd -} - -// Redis `SRANDMEMBER key count` command. -func (c cmdable) SRandMemberN(ctx context.Context, key string, count int64) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, "srandmember", key, count) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SRem(ctx context.Context, key string, members ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(members)) - args[0] = "srem" - args[1] = key - args = appendArgs(args, members) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SUnion(ctx context.Context, keys ...string) *StringSliceCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "sunion" - for i, key := range keys { - args[1+i] = key - } - cmd := NewStringSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) SUnionStore(ctx context.Context, destination string, keys ...string) *IntCmd { - args := make([]interface{}, 2+len(keys)) - args[0] = "sunionstore" - args[1] = destination - for i, key := range keys { - args[2+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -// XAddArgs accepts values in the following formats: -// - XAddArgs.Values = []interface{}{"key1", "value1", "key2", "value2"} -// - XAddArgs.Values = []string("key1", "value1", "key2", "value2") -// - XAddArgs.Values = map[string]interface{}{"key1": "value1", "key2": "value2"} -// -// Note that map will not preserve the order of key-value pairs. -type XAddArgs struct { - Stream string - MaxLen int64 // MAXLEN N - MaxLenApprox int64 // MAXLEN ~ N - ID string - Values interface{} -} - -func (c cmdable) XAdd(ctx context.Context, a *XAddArgs) *StringCmd { - args := make([]interface{}, 0, 8) - args = append(args, "xadd") - args = append(args, a.Stream) - if a.MaxLen > 0 { - args = append(args, "maxlen", a.MaxLen) - } else if a.MaxLenApprox > 0 { - args = append(args, "maxlen", "~", a.MaxLenApprox) - } - if a.ID != "" { - args = append(args, a.ID) - } else { - args = append(args, "*") - } - args = appendArg(args, a.Values) - - cmd := NewStringCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XDel(ctx context.Context, stream string, ids ...string) *IntCmd { - args := []interface{}{"xdel", stream} - for _, id := range ids { - args = append(args, id) - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XLen(ctx context.Context, stream string) *IntCmd { - cmd := NewIntCmd(ctx, "xlen", stream) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XRange(ctx context.Context, stream, start, stop string) *XMessageSliceCmd { - cmd := NewXMessageSliceCmd(ctx, "xrange", stream, start, stop) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XRangeN(ctx context.Context, stream, start, stop string, count int64) *XMessageSliceCmd { - cmd := NewXMessageSliceCmd(ctx, "xrange", stream, start, stop, "count", count) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XRevRange(ctx context.Context, stream, start, stop string) *XMessageSliceCmd { - cmd := NewXMessageSliceCmd(ctx, "xrevrange", stream, start, stop) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XRevRangeN(ctx context.Context, stream, start, stop string, count int64) *XMessageSliceCmd { - cmd := NewXMessageSliceCmd(ctx, "xrevrange", stream, start, stop, "count", count) - _ = c(ctx, cmd) - return cmd -} - -type XReadArgs struct { - Streams []string // list of streams and ids, e.g. stream1 stream2 id1 id2 - Count int64 - Block time.Duration -} - -func (c cmdable) XRead(ctx context.Context, a *XReadArgs) *XStreamSliceCmd { - args := make([]interface{}, 0, 5+len(a.Streams)) - args = append(args, "xread") - - keyPos := int8(1) - if a.Count > 0 { - args = append(args, "count") - args = append(args, a.Count) - keyPos += 2 - } - if a.Block >= 0 { - args = append(args, "block") - args = append(args, int64(a.Block/time.Millisecond)) - keyPos += 2 - } - args = append(args, "streams") - keyPos++ - for _, s := range a.Streams { - args = append(args, s) - } - - cmd := NewXStreamSliceCmd(ctx, args...) - if a.Block >= 0 { - cmd.setReadTimeout(a.Block) - } - cmd.setFirstKeyPos(keyPos) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XReadStreams(ctx context.Context, streams ...string) *XStreamSliceCmd { - return c.XRead(ctx, &XReadArgs{ - Streams: streams, - Block: -1, - }) -} - -func (c cmdable) XGroupCreate(ctx context.Context, stream, group, start string) *StatusCmd { - cmd := NewStatusCmd(ctx, "xgroup", "create", stream, group, start) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XGroupCreateMkStream(ctx context.Context, stream, group, start string) *StatusCmd { - cmd := NewStatusCmd(ctx, "xgroup", "create", stream, group, start, "mkstream") - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XGroupSetID(ctx context.Context, stream, group, start string) *StatusCmd { - cmd := NewStatusCmd(ctx, "xgroup", "setid", stream, group, start) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XGroupDestroy(ctx context.Context, stream, group string) *IntCmd { - cmd := NewIntCmd(ctx, "xgroup", "destroy", stream, group) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XGroupDelConsumer(ctx context.Context, stream, group, consumer string) *IntCmd { - cmd := NewIntCmd(ctx, "xgroup", "delconsumer", stream, group, consumer) - _ = c(ctx, cmd) - return cmd -} - -type XReadGroupArgs struct { - Group string - Consumer string - Streams []string // list of streams and ids, e.g. stream1 stream2 id1 id2 - Count int64 - Block time.Duration - NoAck bool -} - -func (c cmdable) XReadGroup(ctx context.Context, a *XReadGroupArgs) *XStreamSliceCmd { - args := make([]interface{}, 0, 8+len(a.Streams)) - args = append(args, "xreadgroup", "group", a.Group, a.Consumer) - - keyPos := int8(1) - if a.Count > 0 { - args = append(args, "count", a.Count) - keyPos += 2 - } - if a.Block >= 0 { - args = append(args, "block", int64(a.Block/time.Millisecond)) - keyPos += 2 - } - if a.NoAck { - args = append(args, "noack") - keyPos++ - } - args = append(args, "streams") - keyPos++ - for _, s := range a.Streams { - args = append(args, s) - } - - cmd := NewXStreamSliceCmd(ctx, args...) - if a.Block >= 0 { - cmd.setReadTimeout(a.Block) - } - cmd.setFirstKeyPos(keyPos) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XAck(ctx context.Context, stream, group string, ids ...string) *IntCmd { - args := []interface{}{"xack", stream, group} - for _, id := range ids { - args = append(args, id) - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XPending(ctx context.Context, stream, group string) *XPendingCmd { - cmd := NewXPendingCmd(ctx, "xpending", stream, group) - _ = c(ctx, cmd) - return cmd -} - -type XPendingExtArgs struct { - Stream string - Group string - Start string - End string - Count int64 - Consumer string -} - -func (c cmdable) XPendingExt(ctx context.Context, a *XPendingExtArgs) *XPendingExtCmd { - args := make([]interface{}, 0, 7) - args = append(args, "xpending", a.Stream, a.Group, a.Start, a.End, a.Count) - if a.Consumer != "" { - args = append(args, a.Consumer) - } - cmd := NewXPendingExtCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -type XClaimArgs struct { - Stream string - Group string - Consumer string - MinIdle time.Duration - Messages []string -} - -func (c cmdable) XClaim(ctx context.Context, a *XClaimArgs) *XMessageSliceCmd { - args := xClaimArgs(a) - cmd := NewXMessageSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XClaimJustID(ctx context.Context, a *XClaimArgs) *StringSliceCmd { - args := xClaimArgs(a) - args = append(args, "justid") - cmd := NewStringSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func xClaimArgs(a *XClaimArgs) []interface{} { - args := make([]interface{}, 0, 4+len(a.Messages)) - args = append(args, - "xclaim", - a.Stream, - a.Group, a.Consumer, - int64(a.MinIdle/time.Millisecond)) - for _, id := range a.Messages { - args = append(args, id) - } - return args -} - -func (c cmdable) XTrim(ctx context.Context, key string, maxLen int64) *IntCmd { - cmd := NewIntCmd(ctx, "xtrim", key, "maxlen", maxLen) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XTrimApprox(ctx context.Context, key string, maxLen int64) *IntCmd { - cmd := NewIntCmd(ctx, "xtrim", key, "maxlen", "~", maxLen) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XInfoGroups(ctx context.Context, key string) *XInfoGroupsCmd { - cmd := NewXInfoGroupsCmd(ctx, key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) XInfoStream(ctx context.Context, key string) *XInfoStreamCmd { - cmd := NewXInfoStreamCmd(ctx, key) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -// Z represents sorted set member. -type Z struct { - Score float64 - Member interface{} -} - -// ZWithKey represents sorted set member including the name of the key where it was popped. -type ZWithKey struct { - Z - Key string -} - -// ZStore is used as an arg to ZInterStore and ZUnionStore. -type ZStore struct { - Keys []string - Weights []float64 - // Can be SUM, MIN or MAX. - Aggregate string -} - -// Redis `BZPOPMAX key [key ...] timeout` command. -func (c cmdable) BZPopMax(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd { - args := make([]interface{}, 1+len(keys)+1) - args[0] = "bzpopmax" - for i, key := range keys { - args[1+i] = key - } - args[len(args)-1] = formatSec(ctx, timeout) - cmd := NewZWithKeyCmd(ctx, args...) - cmd.setReadTimeout(timeout) - _ = c(ctx, cmd) - return cmd -} - -// Redis `BZPOPMIN key [key ...] timeout` command. -func (c cmdable) BZPopMin(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd { - args := make([]interface{}, 1+len(keys)+1) - args[0] = "bzpopmin" - for i, key := range keys { - args[1+i] = key - } - args[len(args)-1] = formatSec(ctx, timeout) - cmd := NewZWithKeyCmd(ctx, args...) - cmd.setReadTimeout(timeout) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) zAdd(ctx context.Context, a []interface{}, n int, members ...*Z) *IntCmd { - for i, m := range members { - a[n+2*i] = m.Score - a[n+2*i+1] = m.Member - } - cmd := NewIntCmd(ctx, a...) - _ = c(ctx, cmd) - return cmd -} - -// Redis `ZADD key score member [score member ...]` command. -func (c cmdable) ZAdd(ctx context.Context, key string, members ...*Z) *IntCmd { - const n = 2 - a := make([]interface{}, n+2*len(members)) - a[0], a[1] = "zadd", key - return c.zAdd(ctx, a, n, members...) -} - -// Redis `ZADD key NX score member [score member ...]` command. -func (c cmdable) ZAddNX(ctx context.Context, key string, members ...*Z) *IntCmd { - const n = 3 - a := make([]interface{}, n+2*len(members)) - a[0], a[1], a[2] = "zadd", key, "nx" - return c.zAdd(ctx, a, n, members...) -} - -// Redis `ZADD key XX score member [score member ...]` command. -func (c cmdable) ZAddXX(ctx context.Context, key string, members ...*Z) *IntCmd { - const n = 3 - a := make([]interface{}, n+2*len(members)) - a[0], a[1], a[2] = "zadd", key, "xx" - return c.zAdd(ctx, a, n, members...) -} - -// Redis `ZADD key CH score member [score member ...]` command. -func (c cmdable) ZAddCh(ctx context.Context, key string, members ...*Z) *IntCmd { - const n = 3 - a := make([]interface{}, n+2*len(members)) - a[0], a[1], a[2] = "zadd", key, "ch" - return c.zAdd(ctx, a, n, members...) -} - -// Redis `ZADD key NX CH score member [score member ...]` command. -func (c cmdable) ZAddNXCh(ctx context.Context, key string, members ...*Z) *IntCmd { - const n = 4 - a := make([]interface{}, n+2*len(members)) - a[0], a[1], a[2], a[3] = "zadd", key, "nx", "ch" - return c.zAdd(ctx, a, n, members...) -} - -// Redis `ZADD key XX CH score member [score member ...]` command. -func (c cmdable) ZAddXXCh(ctx context.Context, key string, members ...*Z) *IntCmd { - const n = 4 - a := make([]interface{}, n+2*len(members)) - a[0], a[1], a[2], a[3] = "zadd", key, "xx", "ch" - return c.zAdd(ctx, a, n, members...) -} - -func (c cmdable) zIncr(ctx context.Context, a []interface{}, n int, members ...*Z) *FloatCmd { - for i, m := range members { - a[n+2*i] = m.Score - a[n+2*i+1] = m.Member - } - cmd := NewFloatCmd(ctx, a...) - _ = c(ctx, cmd) - return cmd -} - -// Redis `ZADD key INCR score member` command. -func (c cmdable) ZIncr(ctx context.Context, key string, member *Z) *FloatCmd { - const n = 3 - a := make([]interface{}, n+2) - a[0], a[1], a[2] = "zadd", key, "incr" - return c.zIncr(ctx, a, n, member) -} - -// Redis `ZADD key NX INCR score member` command. -func (c cmdable) ZIncrNX(ctx context.Context, key string, member *Z) *FloatCmd { - const n = 4 - a := make([]interface{}, n+2) - a[0], a[1], a[2], a[3] = "zadd", key, "incr", "nx" - return c.zIncr(ctx, a, n, member) -} - -// Redis `ZADD key XX INCR score member` command. -func (c cmdable) ZIncrXX(ctx context.Context, key string, member *Z) *FloatCmd { - const n = 4 - a := make([]interface{}, n+2) - a[0], a[1], a[2], a[3] = "zadd", key, "incr", "xx" - return c.zIncr(ctx, a, n, member) -} - -func (c cmdable) ZCard(ctx context.Context, key string) *IntCmd { - cmd := NewIntCmd(ctx, "zcard", key) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZCount(ctx context.Context, key, min, max string) *IntCmd { - cmd := NewIntCmd(ctx, "zcount", key, min, max) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZLexCount(ctx context.Context, key, min, max string) *IntCmd { - cmd := NewIntCmd(ctx, "zlexcount", key, min, max) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZIncrBy(ctx context.Context, key string, increment float64, member string) *FloatCmd { - cmd := NewFloatCmd(ctx, "zincrby", key, increment, member) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZInterStore(ctx context.Context, destination string, store *ZStore) *IntCmd { - args := make([]interface{}, 3+len(store.Keys)) - args[0] = "zinterstore" - args[1] = destination - args[2] = len(store.Keys) - for i, key := range store.Keys { - args[3+i] = key - } - if len(store.Weights) > 0 { - args = append(args, "weights") - for _, weight := range store.Weights { - args = append(args, weight) - } - } - if store.Aggregate != "" { - args = append(args, "aggregate", store.Aggregate) - } - cmd := NewIntCmd(ctx, args...) - cmd.setFirstKeyPos(3) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZPopMax(ctx context.Context, key string, count ...int64) *ZSliceCmd { - args := []interface{}{ - "zpopmax", - key, - } - - switch len(count) { - case 0: - break - case 1: - args = append(args, count[0]) - default: - panic("too many arguments") - } - - cmd := NewZSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZPopMin(ctx context.Context, key string, count ...int64) *ZSliceCmd { - args := []interface{}{ - "zpopmin", - key, - } - - switch len(count) { - case 0: - break - case 1: - args = append(args, count[0]) - default: - panic("too many arguments") - } - - cmd := NewZSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) zRange(ctx context.Context, key string, start, stop int64, withScores bool) *StringSliceCmd { - args := []interface{}{ - "zrange", - key, - start, - stop, - } - if withScores { - args = append(args, "withscores") - } - cmd := NewStringSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd { - return c.zRange(ctx, key, start, stop, false) -} - -func (c cmdable) ZRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd { - cmd := NewZSliceCmd(ctx, "zrange", key, start, stop, "withscores") - _ = c(ctx, cmd) - return cmd -} - -type ZRangeBy struct { - Min, Max string - Offset, Count int64 -} - -func (c cmdable) zRangeBy(ctx context.Context, zcmd, key string, opt *ZRangeBy, withScores bool) *StringSliceCmd { - args := []interface{}{zcmd, key, opt.Min, opt.Max} - if withScores { - args = append(args, "withscores") - } - if opt.Offset != 0 || opt.Count != 0 { - args = append( - args, - "limit", - opt.Offset, - opt.Count, - ) - } - cmd := NewStringSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd { - return c.zRangeBy(ctx, "zrangebyscore", key, opt, false) -} - -func (c cmdable) ZRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd { - return c.zRangeBy(ctx, "zrangebylex", key, opt, false) -} - -func (c cmdable) ZRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd { - args := []interface{}{"zrangebyscore", key, opt.Min, opt.Max, "withscores"} - if opt.Offset != 0 || opt.Count != 0 { - args = append( - args, - "limit", - opt.Offset, - opt.Count, - ) - } - cmd := NewZSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRank(ctx context.Context, key, member string) *IntCmd { - cmd := NewIntCmd(ctx, "zrank", key, member) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRem(ctx context.Context, key string, members ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(members)) - args[0] = "zrem" - args[1] = key - args = appendArgs(args, members) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRemRangeByRank(ctx context.Context, key string, start, stop int64) *IntCmd { - cmd := NewIntCmd( - ctx, - "zremrangebyrank", - key, - start, - stop, - ) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRemRangeByScore(ctx context.Context, key, min, max string) *IntCmd { - cmd := NewIntCmd(ctx, "zremrangebyscore", key, min, max) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRemRangeByLex(ctx context.Context, key, min, max string) *IntCmd { - cmd := NewIntCmd(ctx, "zremrangebylex", key, min, max) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRevRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd { - cmd := NewStringSliceCmd(ctx, "zrevrange", key, start, stop) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRevRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd { - cmd := NewZSliceCmd(ctx, "zrevrange", key, start, stop, "withscores") - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) zRevRangeBy(ctx context.Context, zcmd, key string, opt *ZRangeBy) *StringSliceCmd { - args := []interface{}{zcmd, key, opt.Max, opt.Min} - if opt.Offset != 0 || opt.Count != 0 { - args = append( - args, - "limit", - opt.Offset, - opt.Count, - ) - } - cmd := NewStringSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRevRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd { - return c.zRevRangeBy(ctx, "zrevrangebyscore", key, opt) -} - -func (c cmdable) ZRevRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd { - return c.zRevRangeBy(ctx, "zrevrangebylex", key, opt) -} - -func (c cmdable) ZRevRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd { - args := []interface{}{"zrevrangebyscore", key, opt.Max, opt.Min, "withscores"} - if opt.Offset != 0 || opt.Count != 0 { - args = append( - args, - "limit", - opt.Offset, - opt.Count, - ) - } - cmd := NewZSliceCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZRevRank(ctx context.Context, key, member string) *IntCmd { - cmd := NewIntCmd(ctx, "zrevrank", key, member) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZScore(ctx context.Context, key, member string) *FloatCmd { - cmd := NewFloatCmd(ctx, "zscore", key, member) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ZUnionStore(ctx context.Context, dest string, store *ZStore) *IntCmd { - args := make([]interface{}, 3+len(store.Keys)) - args[0] = "zunionstore" - args[1] = dest - args[2] = len(store.Keys) - for i, key := range store.Keys { - args[3+i] = key - } - if len(store.Weights) > 0 { - args = append(args, "weights") - for _, weight := range store.Weights { - args = append(args, weight) - } - } - if store.Aggregate != "" { - args = append(args, "aggregate", store.Aggregate) - } - - cmd := NewIntCmd(ctx, args...) - cmd.setFirstKeyPos(3) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) PFAdd(ctx context.Context, key string, els ...interface{}) *IntCmd { - args := make([]interface{}, 2, 2+len(els)) - args[0] = "pfadd" - args[1] = key - args = appendArgs(args, els) - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) PFCount(ctx context.Context, keys ...string) *IntCmd { - args := make([]interface{}, 1+len(keys)) - args[0] = "pfcount" - for i, key := range keys { - args[1+i] = key - } - cmd := NewIntCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) PFMerge(ctx context.Context, dest string, keys ...string) *StatusCmd { - args := make([]interface{}, 2+len(keys)) - args[0] = "pfmerge" - args[1] = dest - for i, key := range keys { - args[2+i] = key - } - cmd := NewStatusCmd(ctx, args...) - _ = c(ctx, cmd) - return cmd -} - -//------------------------------------------------------------------------------ - -func (c cmdable) BgRewriteAOF(ctx context.Context) *StatusCmd { - cmd := NewStatusCmd(ctx, "bgrewriteaof") - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) BgSave(ctx context.Context) *StatusCmd { - cmd := NewStatusCmd(ctx, "bgsave") - _ = c(ctx, cmd) - return cmd -} - -func (c cmdable) ClientKill(ctx context.Context, ipPort string) *StatusCmd { - cmd := NewStatusCmd(ctx, "client", "kill", ipPort) - _ = c(ctx, cmd) - return cmd -} - -// ClientKillByFilter is new style syntax, while the ClientKill is old -// -// CLIENT KILL