commit 8ffb773f43c8dc54801ca1d111854e7e881c93c9 Author: Frédéric Guillot Date: Sun Nov 19 21:10:04 2017 -0800 First commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..8ad38a5b --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +miniflux-linux-amd64 +miniflux-darwin-amd64 diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..ccc3a6c8 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,5 @@ +language: go +go: + - 1.9 +script: + - go test -cover -race ./... diff --git a/Gopkg.lock b/Gopkg.lock new file mode 100644 index 00000000..b71308fc --- /dev/null +++ b/Gopkg.lock @@ -0,0 +1,81 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/PuerkitoBio/goquery" + packages = ["."] + revision = "e1271ee34c6a305e38566ecd27ae374944907ee9" + version = "v1.1.0" + +[[projects]] + branch = "master" + name = "github.com/andybalholm/cascadia" + packages = ["."] + revision = "349dd0209470eabd9514242c688c403c0926d266" + +[[projects]] + name = "github.com/gorilla/context" + packages = ["."] + revision = "1ea25387ff6f684839d82767c1733ff4d4d15d0a" + version = "v1.1" + +[[projects]] + name = "github.com/gorilla/mux" + packages = ["."] + revision = "7f08801859139f86dfafd1c296e2cba9a80d292e" + version = "v1.6.0" + +[[projects]] + branch = "master" + name = "github.com/lib/pq" + packages = [".","oid"] + revision = "8c6ee72f3e6bcb1542298dd5f76cb74af9742cec" + +[[projects]] + name = "github.com/tdewolff/minify" + packages = [".","css","js"] + revision = "90df1aae5028a7cbb441bde86e86a55df6b5aa34" + version = "v2.3.3" + +[[projects]] + name = "github.com/tdewolff/parse" + packages = [".","buffer","css","js","strconv"] + revision = "bace4cf682c41e03b154044b561575ff541b83e8" + version = "v2.3.1" + +[[projects]] + branch = "master" + name = "github.com/tomasen/realip" + packages = ["."] + revision = "15489afd3be348430f5f67467d2bb6b2f9b757ed" + +[[projects]] + branch = "master" + name = "golang.org/x/crypto" + packages = ["bcrypt","blowfish","ssh/terminal"] + revision = "9f005a07e0d31d45e6656d241bb5c0f2efd4bc94" + +[[projects]] + branch = "master" + name = "golang.org/x/net" + packages = ["html","html/atom","html/charset"] + revision = "9dfe39835686865bff950a07b394c12a98ddc811" + +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = ["unix","windows"] + revision = "0dd5e194bbf5eb84a39666eb4c98a4d007e4203a" + +[[projects]] + branch = "master" + name = "golang.org/x/text" + packages = ["encoding","encoding/charmap","encoding/htmlindex","encoding/internal","encoding/internal/identifier","encoding/japanese","encoding/korean","encoding/simplifiedchinese","encoding/traditionalchinese","encoding/unicode","internal/gen","internal/tag","internal/utf8internal","language","runes","transform","unicode/cldr"] + revision = "88f656faf3f37f690df1a32515b479415e1a6769" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "27a0ca12f5a709bb76b9c90f6720b6824ac8fc81b2fc66f059f212366443ff5d" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml new file mode 100644 index 00000000..b4233cf3 --- /dev/null +++ b/Gopkg.toml @@ -0,0 +1,54 @@ + +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + + +[[constraint]] + name = "github.com/PuerkitoBio/goquery" + version = "1.1.0" + +[[constraint]] + name = "github.com/gorilla/mux" + version = "1.6.0" + +[[constraint]] + branch = "master" + name = "github.com/lib/pq" + +[[constraint]] + branch = "master" + name = "github.com/rvflash/elapsed" + +[[constraint]] + name = "github.com/tdewolff/minify" + version = "2.3.3" + +[[constraint]] + branch = "master" + name = "github.com/tomasen/realip" + +[[constraint]] + branch = "master" + name = "golang.org/x/crypto" + +[[constraint]] + branch = "master" + name = "golang.org/x/net" diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..f433b1a5 --- /dev/null +++ b/LICENSE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..88c4aea1 --- /dev/null +++ b/Makefile @@ -0,0 +1,25 @@ +APP = miniflux +VERSION = $(shell git rev-parse --short HEAD) +BUILD_DATE = `date +%FT%T%z` + +.PHONY: build-linux build-darwin build run clean test + +build-linux: + @ go generate + @ GOOS=linux GOARCH=amd64 go build -ldflags="-X 'miniflux/version.Version=$(VERSION)' -X 'miniflux/version.BuildDate=$(BUILD_DATE)'" -o $(APP)-linux-amd64 main.go + +build-darwin: + @ go generate + @ GOOS=darwin GOARCH=amd64 go build -ldflags="-X 'miniflux/version.Version=$(VERSION)' -X 'miniflux/version.BuildDate=$(BUILD_DATE)'" -o $(APP)-darwin-amd64 main.go + +build: build-linux build-darwin + +run: + @ go generate + @ go run main.go + +clean: + @ rm -f $(APP)-* + +test: + go test -cover -race ./... diff --git a/README.md b/README.md new file mode 100644 index 00000000..414d88de --- /dev/null +++ b/README.md @@ -0,0 +1,38 @@ +Miniflux 2 +========== +[![Build Status](https://travis-ci.org/miniflux/miniflux2.svg?branch=master)](https://travis-ci.org/miniflux/miniflux2) + +Miniflux is a minimalist and opinionated feed reader: + +- Written in Go (Golang) +- Works only with Postgresql +- Doesn't use any ORM +- Doesn't use any complicated framework +- The number of features is volountary limited + +It's simple, fast, lightweight and super easy to install. + +Miniflux 2 is a rewrite of Miniflux 1.x in Golang. + +Notes +----- + +Miniflux 2 still in development and **it's not ready to use**. + +TODO +---- + +- [ ] Custom entries sorting +- [ ] Webpage scraper (Readability) +- [ ] Bookmarklet +- [ ] External integrations (Pinboard, Wallabag...) +- [ ] Gzip compression +- [ ] Integration tests +- [ ] Flush history +- [ ] OAuth2 + +Credits +------- + +- Author: Frédéric Guillot +- Distributed under Apache 2.0 License diff --git a/config/config.go b/config/config.go new file mode 100644 index 00000000..42fb2090 --- /dev/null +++ b/config/config.go @@ -0,0 +1,36 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package config + +import ( + "os" + "strconv" +) + +type Config struct { +} + +func (c *Config) Get(key, fallback string) string { + value := os.Getenv(key) + if value == "" { + return fallback + } + + return value +} + +func (c *Config) GetInt(key string, fallback int) int { + value := os.Getenv(key) + if value == "" { + return fallback + } + + v, _ := strconv.Atoi(value) + return v +} + +func NewConfig() *Config { + return &Config{} +} diff --git a/errors/errors.go b/errors/errors.go new file mode 100644 index 00000000..e40b3c23 --- /dev/null +++ b/errors/errors.go @@ -0,0 +1,27 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package errors + +import ( + "fmt" + "github.com/miniflux/miniflux2/locale" +) + +type LocalizedError struct { + message string + args []interface{} +} + +func (l LocalizedError) Error() string { + return fmt.Sprintf(l.message, l.args...) +} + +func (l LocalizedError) Localize(translation *locale.Language) string { + return translation.Get(l.message, l.args...) +} + +func NewLocalizedError(message string, args ...interface{}) LocalizedError { + return LocalizedError{message: message, args: args} +} diff --git a/generate.go b/generate.go new file mode 100644 index 00000000..21ce370a --- /dev/null +++ b/generate.go @@ -0,0 +1,120 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +// +build ignore + +package main + +import ( + "crypto/sha256" + "encoding/base64" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + "strings" + "text/template" + "time" + + "github.com/tdewolff/minify" + "github.com/tdewolff/minify/css" + "github.com/tdewolff/minify/js" +) + +const tpl = `// Code generated by go generate; DO NOT EDIT. +// {{ .Timestamp }} + +package {{ .Package }} + +var {{ .Map }} = map[string]string{ +{{ range $constant, $content := .Files }}` + "\t" + `"{{ $constant }}": ` + "`{{ $content }}`" + `, +{{ end }}} + +var {{ .Map }}Checksums = map[string]string{ +{{ range $constant, $content := .Checksums }}` + "\t" + `"{{ $constant }}": "{{ $content }}", +{{ end }}} +` + +var generatedTpl = template.Must(template.New("").Parse(tpl)) + +type GeneratedFile struct { + Package, Map string + Timestamp time.Time + Files map[string]string + Checksums map[string]string +} + +func normalizeBasename(filename string) string { + filename = strings.TrimSuffix(filename, filepath.Ext(filename)) + return strings.Replace(filename, " ", "_", -1) +} + +func generateFile(serializer, pkg, mapName, pattern, output string) { + generatedFile := &GeneratedFile{ + Package: pkg, + Map: mapName, + Timestamp: time.Now(), + Files: make(map[string]string), + Checksums: make(map[string]string), + } + + files, _ := filepath.Glob(pattern) + for _, file := range files { + basename := path.Base(file) + content, err := ioutil.ReadFile(file) + if err != nil { + panic(err) + } + + switch serializer { + case "css": + m := minify.New() + m.AddFunc("text/css", css.Minify) + content, err = m.Bytes("text/css", content) + if err != nil { + panic(err) + } + + basename = normalizeBasename(basename) + generatedFile.Files[basename] = string(content) + case "js": + m := minify.New() + m.AddFunc("text/javascript", js.Minify) + content, err = m.Bytes("text/javascript", content) + if err != nil { + panic(err) + } + + basename = normalizeBasename(basename) + generatedFile.Files[basename] = string(content) + case "base64": + encodedContent := base64.StdEncoding.EncodeToString(content) + generatedFile.Files[basename] = encodedContent + default: + basename = normalizeBasename(basename) + generatedFile.Files[basename] = string(content) + } + + generatedFile.Checksums[basename] = fmt.Sprintf("%x", sha256.Sum256(content)) + } + + f, err := os.Create(output) + if err != nil { + panic(err) + } + defer f.Close() + + generatedTpl.Execute(f, generatedFile) +} + +func main() { + generateFile("none", "sql", "SqlMap", "sql/*.sql", "sql/sql.go") + generateFile("base64", "static", "Binaries", "server/static/bin/*", "server/static/bin.go") + generateFile("css", "static", "Stylesheets", "server/static/css/*.css", "server/static/css.go") + generateFile("js", "static", "Javascript", "server/static/js/*.js", "server/static/js.go") + generateFile("none", "template", "templateViewsMap", "server/template/html/*.html", "server/template/views.go") + generateFile("none", "template", "templateCommonMap", "server/template/html/common/*.html", "server/template/common.go") + generateFile("none", "locale", "Translations", "locale/translations/*.json", "locale/translations.go") +} diff --git a/helper/crypto.go b/helper/crypto.go new file mode 100644 index 00000000..ed18bb68 --- /dev/null +++ b/helper/crypto.go @@ -0,0 +1,38 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package helper + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/base64" + "fmt" +) + +// HashFromBytes returns a SHA-256 checksum of the input. +func HashFromBytes(value []byte) string { + sum := sha256.Sum256(value) + return fmt.Sprintf("%x", sum) +} + +// Hash returns a SHA-256 checksum of a string. +func Hash(value string) string { + return HashFromBytes([]byte(value)) +} + +// GenerateRandomBytes returns random bytes. +func GenerateRandomBytes(size int) []byte { + b := make([]byte, size) + if _, err := rand.Read(b); err != nil { + panic(fmt.Errorf("Unable to generate random string: %v", err)) + } + + return b +} + +// GenerateRandomString returns a random string. +func GenerateRandomString(size int) string { + return base64.URLEncoding.EncodeToString(GenerateRandomBytes(size)) +} diff --git a/helper/time.go b/helper/time.go new file mode 100644 index 00000000..57d2ba49 --- /dev/null +++ b/helper/time.go @@ -0,0 +1,16 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package helper + +import ( + "log" + "time" +) + +// ExecutionTime returns the elapsed time of a block of code. +func ExecutionTime(start time.Time, name string) { + elapsed := time.Since(start) + log.Printf("%s took %s", name, elapsed) +} diff --git a/locale/language.go b/locale/language.go new file mode 100644 index 00000000..c3deda33 --- /dev/null +++ b/locale/language.go @@ -0,0 +1,47 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package locale + +import "fmt" + +type Language struct { + language string + translations Translation +} + +func (l *Language) Get(key string, args ...interface{}) string { + var translation string + + str, found := l.translations[key] + if !found { + translation = key + } else { + translation = str.(string) + } + + return fmt.Sprintf(translation, args...) +} + +func (l *Language) Plural(key string, n int, args ...interface{}) string { + translation := key + slices, found := l.translations[key] + if found { + + pluralForm, found := pluralForms[l.language] + if !found { + pluralForm = pluralForms["default"] + } + + index := pluralForm(n) + translations := slices.([]interface{}) + translation = key + + if len(translations) > index { + translation = translations[index].(string) + } + } + + return fmt.Sprintf(translation, args...) +} diff --git a/locale/locale.go b/locale/locale.go new file mode 100644 index 00000000..49005259 --- /dev/null +++ b/locale/locale.go @@ -0,0 +1,30 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package locale + +import "log" + +type Translation map[string]interface{} + +type Locales map[string]Translation + +func Load() *Translator { + translator := NewTranslator() + + for language, translations := range Translations { + log.Println("Loading translation:", language) + translator.AddLanguage(language, translations) + } + + return translator +} + +// GetAvailableLanguages returns the list of available languages. +func GetAvailableLanguages() map[string]string { + return map[string]string{ + "en_US": "English", + "fr_FR": "Français", + } +} diff --git a/locale/locale_test.go b/locale/locale_test.go new file mode 100644 index 00000000..baddd1e1 --- /dev/null +++ b/locale/locale_test.go @@ -0,0 +1,103 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. +package locale + +import "testing" + +func TestTranslateWithMissingLanguage(t *testing.T) { + translator := NewTranslator() + translation := translator.GetLanguage("en_US").Get("auth.username") + + if translation != "auth.username" { + t.Errorf("Wrong translation, got %s", translation) + } +} + +func TestTranslateWithExistingKey(t *testing.T) { + data := `{"auth.username": "Username"}` + translator := NewTranslator() + translator.AddLanguage("en_US", data) + translation := translator.GetLanguage("en_US").Get("auth.username") + + if translation != "Username" { + t.Errorf("Wrong translation, got %s", translation) + } +} + +func TestTranslateWithMissingKey(t *testing.T) { + data := `{"auth.username": "Username"}` + translator := NewTranslator() + translator.AddLanguage("en_US", data) + translation := translator.GetLanguage("en_US").Get("auth.password") + + if translation != "auth.password" { + t.Errorf("Wrong translation, got %s", translation) + } +} + +func TestTranslateWithMissingKeyAndPlaceholder(t *testing.T) { + translator := NewTranslator() + translator.AddLanguage("fr_FR", "") + translation := translator.GetLanguage("fr_FR").Get("Status: %s", "ok") + + if translation != "Status: ok" { + t.Errorf("Wrong translation, got %s", translation) + } +} + +func TestTranslatePluralWithDefaultRule(t *testing.T) { + data := `{"number_of_users": ["Il y a %d utilisateur (%s)", "Il y a %d utilisateurs (%s)"]}` + translator := NewTranslator() + translator.AddLanguage("fr_FR", data) + language := translator.GetLanguage("fr_FR") + + translation := language.Plural("number_of_users", 1, 1, "some text") + expected := "Il y a 1 utilisateur (some text)" + if translation != expected { + t.Errorf(`Wrong translation, got "%s" instead of "%s"`, translation, expected) + } + + translation = language.Plural("number_of_users", 2, 2, "some text") + expected = "Il y a 2 utilisateurs (some text)" + if translation != expected { + t.Errorf(`Wrong translation, got "%s" instead of "%s"`, translation, expected) + } +} + +func TestTranslatePluralWithRussianRule(t *testing.T) { + data := `{"key": ["из %d книги за %d день", "из %d книг за %d дня", "из %d книг за %d дней"]}` + translator := NewTranslator() + translator.AddLanguage("ru_RU", data) + language := translator.GetLanguage("ru_RU") + + translation := language.Plural("key", 1, 1, 1) + expected := "из 1 книги за 1 день" + if translation != expected { + t.Errorf(`Wrong translation, got "%s" instead of "%s"`, translation, expected) + } + + translation = language.Plural("key", 2, 2, 2) + expected = "из 2 книг за 2 дня" + if translation != expected { + t.Errorf(`Wrong translation, got "%s" instead of "%s"`, translation, expected) + } + + translation = language.Plural("key", 5, 5, 5) + expected = "из 5 книг за 5 дней" + if translation != expected { + t.Errorf(`Wrong translation, got "%s" instead of "%s"`, translation, expected) + } +} + +func TestTranslatePluralWithMissingTranslation(t *testing.T) { + translator := NewTranslator() + translator.AddLanguage("fr_FR", "") + language := translator.GetLanguage("fr_FR") + + translation := language.Plural("number_of_users", 2) + expected := "number_of_users" + if translation != expected { + t.Errorf(`Wrong translation, got "%s" instead of "%s"`, translation, expected) + } +} diff --git a/locale/plurals.go b/locale/plurals.go new file mode 100644 index 00000000..d94f238a --- /dev/null +++ b/locale/plurals.go @@ -0,0 +1,101 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package locale + +// See https://localization-guide.readthedocs.io/en/latest/l10n/pluralforms.html +// And http://www.unicode.org/cldr/charts/29/supplemental/language_plural_rules.html +var pluralForms = map[string]func(n int) int{ + // nplurals=2; plural=(n != 1); + "default": func(n int) int { + if n != 1 { + return 1 + } + + return 0 + }, + // nplurals=6; plural=(n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 : n%100>=11 ? 4 : 5); + "ar_AR": func(n int) int { + if n == 0 { + return 0 + } + + if n == 1 { + return 1 + } + + if n == 2 { + return 2 + } + + if n%100 >= 3 && n%100 <= 10 { + return 3 + } + + if n%100 >= 11 { + return 4 + } + + return 5 + }, + // nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2; + "cs_CZ": func(n int) int { + if n == 1 { + return 0 + } + + if n >= 2 && n <= 4 { + return 1 + } + + return 2 + }, + // nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2); + "pl_PL": func(n int) int { + if n == 1 { + return 0 + } + + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + + return 2 + }, + // nplurals=2; plural=(n > 1); + "pt_BR": func(n int) int { + if n > 1 { + return 1 + } + return 0 + }, + // nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2); + "ru_RU": func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + + return 2 + }, + // nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2); + "sr_RS": func(n int) int { + if n%10 == 1 && n%100 != 11 { + return 0 + } + + if n%10 >= 2 && n%10 <= 4 && (n%100 < 10 || n%100 >= 20) { + return 1 + } + + return 2 + }, + // nplurals=1; plural=0; + "zh_CN": func(n int) int { + return 0 + }, +} diff --git a/locale/translations.go b/locale/translations.go new file mode 100644 index 00000000..0aa1aa77 --- /dev/null +++ b/locale/translations.go @@ -0,0 +1,136 @@ +// Code generated by go generate; DO NOT EDIT. +// 2017-11-19 22:01:21.925268372 -0800 PST m=+0.006101515 + +package locale + +var Translations = map[string]string{ + "en_US": `{ + "plural.feed.error_count": [ + "%d error", + "%d errors" + ], + "plural.categories.feed_count": [ + "There is %d feed.", + "There are %d feeds." + ] +}`, + "fr_FR": `{ + "plural.feed.error_count": [ + "%d erreur", + "%d erreurs" + ], + "plural.categories.feed_count": [ + "Il y %d abonnement.", + "Il y %d abonnements." + ], + "Username": "Nom d'utilisateur", + "Password": "Mot de passe", + "Unread": "Non lus", + "History": "Historique", + "Feeds": "Abonnements", + "Categories": "Catégories", + "Settings": "Réglages", + "Logout": "Se déconnecter", + "Next": "Suivant", + "Previous": "Précédent", + "New Subscription": "Nouvel Abonnment", + "Import": "Importation", + "Export": "Exportation", + "There is no category. You must have at least one category.": "Il n'y a aucune catégorie. Vous devez avoir au moins une catégorie.", + "URL": "URL", + "Category": "Catégorie", + "Find a subscription": "Trouver un abonnement", + "Loading...": "Chargement...", + "Create a category": "Créer une catégorie", + "There is no category.": "Il n'y a aucune catégorie.", + "Edit": "Modifier", + "Remove": "Supprimer", + "No feed.": "Aucun abonnement.", + "There is no article in this category.": "Il n'y a aucun article dans cette catégorie.", + "Original": "Original", + "Mark this page as read": "Marquer cette page comme lu", + "not yet": "pas encore", + "just now": "à l'instant", + "1 minute ago": "il y a une minute", + "%d minutes ago": "il y a %d minutes", + "1 hour ago": "il y a une heure", + "%d hours ago": "il y a %d heures", + "yesterday": "hier", + "%d days ago": "il y a %d jours", + "%d weeks ago": "il y a %d semaines", + "%d months ago": "il y a %d mois", + "%d years ago": "il y a %d années", + "Date": "Date", + "IP Address": "Adresse IP", + "User Agent": "Navigateur Web", + "Actions": "Actions", + "Current session": "Session actuelle", + "Sessions": "Sessions", + "Users": "Utilisateurs", + "Add user": "Ajouter un utilisateur", + "Choose a Subscription": "Choisissez un abonnement", + "Subscribe": "S'abonner", + "New Category": "Nouvelle Catégorie", + "Title": "Titre", + "Save": "Sauvegarder", + "or": "ou", + "cancel": "annuler", + "New User": "Nouvel Utilisateur", + "Confirmation": "Confirmation", + "Administrator": "Administrateur", + "Edit Category: %s": "Modification de la catégorie : %s", + "Update": "Mettre à jour", + "Edit Feed: %s": "Modification de l'abonnement : %s", + "There is no category!": "Il n'y a aucune catégorie !", + "Edit user: %s": "Modification de l'utilisateur : %s", + "There is no article for this feed.": "Il n'y a aucun article pour cet abonnement.", + "Add subscription": "Ajouter un abonnement", + "You don't have any subscription.": "Vous n'avez aucun abonnement", + "Last check:": "Dernière vérification :", + "Refresh": "Actualiser", + "There is no history at the moment.": "Il n'y a aucun historique pour le moment.", + "OPML file": "Fichier OPML", + "Sign In": "Connexion", + "Sign in": "Connexion", + "Theme": "Thème", + "Timezone": "Fuseau horaire", + "Language": "Langue", + "There is no unread article.": "Il n'y a rien de nouveau à lire.", + "You are the only user.": "Vous êtes le seul utilisateur.", + "Last Login": "Dernière connexion", + "Yes": "Oui", + "No": "Non", + "This feed already exists (%s).": "Cet abonnement existe déjà (%s).", + "Unable to fetch feed (statusCode=%d).": "Impossible de récupérer cet abonnement (code=%d).", + "Unable to open this link: %v": "Impossible d'ouvrir ce lien : %v", + "Unable to analyze this page: %v": "Impossible d'analyzer cette page : %v", + "Unable to find any subscription.": "Impossible de trouver un abonnement.", + "The URL and the category are mandatory.": "L'URL et la catégorie sont obligatoire.", + "All fields are mandatory.": "Tous les champs sont obligatoire.", + "Passwords are not the same.": "Les mots de passe ne sont pas les mêmes.", + "You must use at least 6 characters.": "Vous devez utiliser au moins 6 caractères.", + "The username is mandatory.": "Le nom d'utilisateur est obligatoire.", + "The username, theme, language and timezone fields are mandatory.": "Le nom d'utilisateur, le thème, la langue et le fuseau horaire sont obligatoire.", + "The title is mandatory.": "Le titre est obligatoire.", + "About": "A propos", + "version": "Version", + "Version:": "Version :", + "Build Date:": "Date de la compilation :", + "Author:": "Auteur :", + "Authors": "Auteurs", + "License:": "Licence :", + "Attachments": "Pièces jointes", + "Download": "Télécharger", + "Invalid username or password.": "Mauvais identifiant ou mot de passe.", + "Never": "Jamais", + "Unable to execute request: %v": "Impossible d'exécuter cette requête: %v", + "Last Parsing Error": "Dernière erreur d'analyse", + "There is a problem with this feed": "Il y a un problème avec cet abonnement" +} +`, +} + +var TranslationsChecksums = map[string]string{ + "en_US": "6fe95384260941e8a5a3c695a655a932e0a8a6a572c1e45cb2b1ae8baa01b897", + "fr_FR": "1f75e5a4b581755f7f84687126bc5b96aaf0109a2f83a72a8770c2ad3ddb7ba3", +} diff --git a/locale/translations/en_US.json b/locale/translations/en_US.json new file mode 100644 index 00000000..0ec7b26a --- /dev/null +++ b/locale/translations/en_US.json @@ -0,0 +1,10 @@ +{ + "plural.feed.error_count": [ + "%d error", + "%d errors" + ], + "plural.categories.feed_count": [ + "There is %d feed.", + "There are %d feeds." + ] +} \ No newline at end of file diff --git a/locale/translations/fr_FR.json b/locale/translations/fr_FR.json new file mode 100644 index 00000000..7699bb70 --- /dev/null +++ b/locale/translations/fr_FR.json @@ -0,0 +1,113 @@ +{ + "plural.feed.error_count": [ + "%d erreur", + "%d erreurs" + ], + "plural.categories.feed_count": [ + "Il y %d abonnement.", + "Il y %d abonnements." + ], + "Username": "Nom d'utilisateur", + "Password": "Mot de passe", + "Unread": "Non lus", + "History": "Historique", + "Feeds": "Abonnements", + "Categories": "Catégories", + "Settings": "Réglages", + "Logout": "Se déconnecter", + "Next": "Suivant", + "Previous": "Précédent", + "New Subscription": "Nouvel Abonnment", + "Import": "Importation", + "Export": "Exportation", + "There is no category. You must have at least one category.": "Il n'y a aucune catégorie. Vous devez avoir au moins une catégorie.", + "URL": "URL", + "Category": "Catégorie", + "Find a subscription": "Trouver un abonnement", + "Loading...": "Chargement...", + "Create a category": "Créer une catégorie", + "There is no category.": "Il n'y a aucune catégorie.", + "Edit": "Modifier", + "Remove": "Supprimer", + "No feed.": "Aucun abonnement.", + "There is no article in this category.": "Il n'y a aucun article dans cette catégorie.", + "Original": "Original", + "Mark this page as read": "Marquer cette page comme lu", + "not yet": "pas encore", + "just now": "à l'instant", + "1 minute ago": "il y a une minute", + "%d minutes ago": "il y a %d minutes", + "1 hour ago": "il y a une heure", + "%d hours ago": "il y a %d heures", + "yesterday": "hier", + "%d days ago": "il y a %d jours", + "%d weeks ago": "il y a %d semaines", + "%d months ago": "il y a %d mois", + "%d years ago": "il y a %d années", + "Date": "Date", + "IP Address": "Adresse IP", + "User Agent": "Navigateur Web", + "Actions": "Actions", + "Current session": "Session actuelle", + "Sessions": "Sessions", + "Users": "Utilisateurs", + "Add user": "Ajouter un utilisateur", + "Choose a Subscription": "Choisissez un abonnement", + "Subscribe": "S'abonner", + "New Category": "Nouvelle Catégorie", + "Title": "Titre", + "Save": "Sauvegarder", + "or": "ou", + "cancel": "annuler", + "New User": "Nouvel Utilisateur", + "Confirmation": "Confirmation", + "Administrator": "Administrateur", + "Edit Category: %s": "Modification de la catégorie : %s", + "Update": "Mettre à jour", + "Edit Feed: %s": "Modification de l'abonnement : %s", + "There is no category!": "Il n'y a aucune catégorie !", + "Edit user: %s": "Modification de l'utilisateur : %s", + "There is no article for this feed.": "Il n'y a aucun article pour cet abonnement.", + "Add subscription": "Ajouter un abonnement", + "You don't have any subscription.": "Vous n'avez aucun abonnement", + "Last check:": "Dernière vérification :", + "Refresh": "Actualiser", + "There is no history at the moment.": "Il n'y a aucun historique pour le moment.", + "OPML file": "Fichier OPML", + "Sign In": "Connexion", + "Sign in": "Connexion", + "Theme": "Thème", + "Timezone": "Fuseau horaire", + "Language": "Langue", + "There is no unread article.": "Il n'y a rien de nouveau à lire.", + "You are the only user.": "Vous êtes le seul utilisateur.", + "Last Login": "Dernière connexion", + "Yes": "Oui", + "No": "Non", + "This feed already exists (%s).": "Cet abonnement existe déjà (%s).", + "Unable to fetch feed (statusCode=%d).": "Impossible de récupérer cet abonnement (code=%d).", + "Unable to open this link: %v": "Impossible d'ouvrir ce lien : %v", + "Unable to analyze this page: %v": "Impossible d'analyzer cette page : %v", + "Unable to find any subscription.": "Impossible de trouver un abonnement.", + "The URL and the category are mandatory.": "L'URL et la catégorie sont obligatoire.", + "All fields are mandatory.": "Tous les champs sont obligatoire.", + "Passwords are not the same.": "Les mots de passe ne sont pas les mêmes.", + "You must use at least 6 characters.": "Vous devez utiliser au moins 6 caractères.", + "The username is mandatory.": "Le nom d'utilisateur est obligatoire.", + "The username, theme, language and timezone fields are mandatory.": "Le nom d'utilisateur, le thème, la langue et le fuseau horaire sont obligatoire.", + "The title is mandatory.": "Le titre est obligatoire.", + "About": "A propos", + "version": "Version", + "Version:": "Version :", + "Build Date:": "Date de la compilation :", + "Author:": "Auteur :", + "Authors": "Auteurs", + "License:": "Licence :", + "Attachments": "Pièces jointes", + "Download": "Télécharger", + "Invalid username or password.": "Mauvais identifiant ou mot de passe.", + "Never": "Jamais", + "Unable to execute request: %v": "Impossible d'exécuter cette requête: %v", + "Last Parsing Error": "Dernière erreur d'analyse", + "There is a problem with this feed": "Il y a un problème avec cet abonnement" +} diff --git a/locale/translator.go b/locale/translator.go new file mode 100644 index 00000000..5560dd6c --- /dev/null +++ b/locale/translator.go @@ -0,0 +1,40 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package locale + +import ( + "encoding/json" + "fmt" + "strings" +) + +type Translator struct { + Locales Locales +} + +func (t *Translator) AddLanguage(language, translations string) error { + var decodedTranslations Translation + + decoder := json.NewDecoder(strings.NewReader(translations)) + if err := decoder.Decode(&decodedTranslations); err != nil { + return fmt.Errorf("Invalid JSON file: %v", err) + } + + t.Locales[language] = decodedTranslations + return nil +} + +func (t *Translator) GetLanguage(language string) *Language { + translations, found := t.Locales[language] + if !found { + return &Language{language: language} + } + + return &Language{language: language, translations: translations} +} + +func NewTranslator() *Translator { + return &Translator{Locales: make(Locales)} +} diff --git a/main.go b/main.go new file mode 100644 index 00000000..dedbb3f1 --- /dev/null +++ b/main.go @@ -0,0 +1,124 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package main + +//go:generate go run generate.go + +import ( + "bufio" + "context" + "flag" + "fmt" + "github.com/miniflux/miniflux2/config" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed" + "github.com/miniflux/miniflux2/scheduler" + "github.com/miniflux/miniflux2/server" + "github.com/miniflux/miniflux2/storage" + "github.com/miniflux/miniflux2/version" + "log" + "os" + "os/signal" + "runtime" + "strings" + "time" + + _ "github.com/lib/pq" + "golang.org/x/crypto/ssh/terminal" +) + +func run(cfg *config.Config, store *storage.Storage) { + log.Println("Starting Miniflux...") + + stop := make(chan os.Signal, 1) + signal.Notify(stop, os.Interrupt) + + feedHandler := feed.NewFeedHandler(store) + server := server.NewServer(cfg, store, feedHandler) + + go func() { + pool := scheduler.NewWorkerPool(feedHandler, cfg.GetInt("WORKER_POOL_SIZE", 5)) + scheduler.NewScheduler(store, pool, cfg.GetInt("POLLING_FREQUENCY", 30), cfg.GetInt("BATCH_SIZE", 10)) + }() + + <-stop + log.Println("Shutting down the server...") + ctx, _ := context.WithTimeout(context.Background(), 5*time.Second) + server.Shutdown(ctx) + store.Close() + log.Println("Server gracefully stopped") +} + +func askCredentials() (string, string) { + reader := bufio.NewReader(os.Stdin) + + fmt.Print("Enter Username: ") + username, _ := reader.ReadString('\n') + + fmt.Print("Enter Password: ") + bytePassword, _ := terminal.ReadPassword(0) + + fmt.Printf("\n") + return strings.TrimSpace(username), strings.TrimSpace(string(bytePassword)) +} + +func main() { + flagInfo := flag.Bool("info", false, "Show application information") + flagVersion := flag.Bool("version", false, "Show application version") + flagMigrate := flag.Bool("migrate", false, "Migrate database schema") + flagFlushSessions := flag.Bool("flush-sessions", false, "Flush all sessions (disconnect users)") + flagCreateAdmin := flag.Bool("create-admin", false, "Create admin user") + flag.Parse() + + cfg := config.NewConfig() + store := storage.NewStorage( + cfg.Get("DATABASE_URL", "postgres://postgres:postgres@localhost/miniflux2?sslmode=disable"), + cfg.GetInt("DATABASE_MAX_CONNS", 20), + ) + + if *flagInfo { + fmt.Println("Version:", version.Version) + fmt.Println("Build Date:", version.BuildDate) + fmt.Println("Go Version:", runtime.Version()) + return + } + + if *flagVersion { + fmt.Println(version.Version) + return + } + + if *flagMigrate { + store.Migrate() + return + } + + if *flagFlushSessions { + fmt.Println("Flushing all sessions (disconnect users)") + if err := store.FlushAllSessions(); err != nil { + fmt.Println(err) + os.Exit(1) + } + return + } + + if *flagCreateAdmin { + user := &model.User{IsAdmin: true} + user.Username, user.Password = askCredentials() + if err := user.ValidateUserCreation(); err != nil { + fmt.Println(err) + os.Exit(1) + } + + if err := store.CreateUser(user); err != nil { + fmt.Println(err) + os.Exit(1) + } + + return + } + + run(cfg, store) +} diff --git a/model/category.go b/model/category.go new file mode 100644 index 00000000..76f254fb --- /dev/null +++ b/model/category.go @@ -0,0 +1,51 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +import ( + "errors" + "fmt" +) + +type Category struct { + ID int64 `json:"id,omitempty"` + Title string `json:"title,omitempty"` + UserID int64 `json:"user_id,omitempty"` + FeedCount int `json:"nb_feeds,omitempty"` +} + +func (c *Category) String() string { + return fmt.Sprintf("ID=%d, UserID=%d, Title=%s", c.ID, c.UserID, c.Title) +} + +func (c Category) ValidateCategoryCreation() error { + if c.Title == "" { + return errors.New("The title is mandatory") + } + + if c.UserID == 0 { + return errors.New("The userID is mandatory") + } + + return nil +} + +func (c Category) ValidateCategoryModification() error { + if c.Title == "" { + return errors.New("The title is mandatory") + } + + if c.UserID == 0 { + return errors.New("The userID is mandatory") + } + + if c.ID == 0 { + return errors.New("The ID is mandatory") + } + + return nil +} + +type Categories []*Category diff --git a/model/enclosure.go b/model/enclosure.go new file mode 100644 index 00000000..5a52f781 --- /dev/null +++ b/model/enclosure.go @@ -0,0 +1,18 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +// Enclosure represents an attachment. +type Enclosure struct { + ID int64 `json:"id"` + UserID int64 `json:"user_id"` + EntryID int64 `json:"entry_id"` + URL string `json:"url"` + MimeType string `json:"mime_type"` + Size int `json:"size"` +} + +// EnclosureList represents a list of attachments. +type EnclosureList []*Enclosure diff --git a/model/entry.go b/model/entry.go new file mode 100644 index 00000000..6858935b --- /dev/null +++ b/model/entry.go @@ -0,0 +1,71 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +import ( + "fmt" + "time" +) + +const ( + EntryStatusUnread = "unread" + EntryStatusRead = "read" + EntryStatusRemoved = "removed" + DefaultSortingOrder = "published_at" + DefaultSortingDirection = "desc" +) + +type Entry struct { + ID int64 `json:"id"` + UserID int64 `json:"user_id"` + FeedID int64 `json:"feed_id"` + Status string `json:"status"` + Hash string `json:"hash"` + Title string `json:"title"` + URL string `json:"url"` + Date time.Time `json:"published_at"` + Content string `json:"content"` + Author string `json:"author"` + Enclosures EnclosureList `json:"enclosures,omitempty"` + Feed *Feed `json:"feed,omitempty"` + Category *Category `json:"category,omitempty"` +} + +type Entries []*Entry + +func ValidateEntryStatus(status string) error { + switch status { + case EntryStatusRead, EntryStatusUnread, EntryStatusRemoved: + return nil + } + + return fmt.Errorf(`Invalid entry status, valid status values are: "%s", "%s" and "%s"`, EntryStatusRead, EntryStatusUnread, EntryStatusRemoved) +} + +func ValidateEntryOrder(order string) error { + switch order { + case "id", "status", "published_at", "category_title", "category_id": + return nil + } + + return fmt.Errorf(`Invalid entry order, valid order values are: "id", "status", "published_at", "category_title", "category_id"`) +} + +func ValidateDirection(direction string) error { + switch direction { + case "asc", "desc": + return nil + } + + return fmt.Errorf(`Invalid direction, valid direction values are: "asc" or "desc"`) +} + +func GetOppositeDirection(direction string) string { + if direction == "asc" { + return "desc" + } + + return "asc" +} diff --git a/model/feed.go b/model/feed.go new file mode 100644 index 00000000..17a9b258 --- /dev/null +++ b/model/feed.go @@ -0,0 +1,66 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +import ( + "fmt" + "reflect" + "time" +) + +// Feed represents a feed in the database +type Feed struct { + ID int64 `json:"id"` + UserID int64 `json:"user_id"` + FeedURL string `json:"feed_url"` + SiteURL string `json:"site_url"` + Title string `json:"title"` + CheckedAt time.Time `json:"checked_at,omitempty"` + EtagHeader string `json:"etag_header,omitempty"` + LastModifiedHeader string `json:"last_modified_header,omitempty"` + ParsingErrorMsg string `json:"parsing_error_message,omitempty"` + ParsingErrorCount int `json:"parsing_error_count,omitempty"` + Category *Category `json:"category,omitempty"` + Entries Entries `json:"entries,omitempty"` + Icon *FeedIcon `json:"icon,omitempty"` +} + +func (f *Feed) String() string { + return fmt.Sprintf("ID=%d, UserID=%d, FeedURL=%s, SiteURL=%s, Title=%s, Category={%s}", + f.ID, + f.UserID, + f.FeedURL, + f.SiteURL, + f.Title, + f.Category, + ) +} + +// Merge combine src to the current struct +func (f *Feed) Merge(src *Feed) { + src.ID = f.ID + src.UserID = f.UserID + + new := reflect.ValueOf(src).Elem() + for i := 0; i < new.NumField(); i++ { + field := new.Field(i) + + switch field.Interface().(type) { + case int64: + value := field.Int() + if value != 0 { + reflect.ValueOf(f).Elem().Field(i).SetInt(value) + } + case string: + value := field.String() + if value != "" { + reflect.ValueOf(f).Elem().Field(i).SetString(value) + } + } + } +} + +// Feeds is a list of feed +type Feeds []*Feed diff --git a/model/icon.go b/model/icon.go new file mode 100644 index 00000000..7bf12bfe --- /dev/null +++ b/model/icon.go @@ -0,0 +1,19 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +// Icon represents a website icon (favicon) +type Icon struct { + ID int64 `json:"id"` + Hash string `json:"hash"` + MimeType string `json:"mime_type"` + Content []byte `json:"content"` +} + +// FeedIcon is a jonction table between feeds and icons +type FeedIcon struct { + FeedID int64 `json:"feed_id"` + IconID int64 `json:"icon_id"` +} diff --git a/model/job.go b/model/job.go new file mode 100644 index 00000000..ed3ef5ee --- /dev/null +++ b/model/job.go @@ -0,0 +1,10 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +type Job struct { + UserID int64 + FeedID int64 +} diff --git a/model/session.go b/model/session.go new file mode 100644 index 00000000..ba62d8af --- /dev/null +++ b/model/session.go @@ -0,0 +1,23 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +import "time" +import "fmt" + +type Session struct { + ID int64 + UserID int64 + Token string + CreatedAt time.Time + UserAgent string + IP string +} + +func (s *Session) String() string { + return fmt.Sprintf("ID=%d, UserID=%d, IP=%s", s.ID, s.UserID, s.IP) +} + +type Sessions []*Session diff --git a/model/theme.go b/model/theme.go new file mode 100644 index 00000000..bcbb23ae --- /dev/null +++ b/model/theme.go @@ -0,0 +1,13 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +// GetThemes returns the list of available themes. +func GetThemes() map[string]string { + return map[string]string{ + "default": "Default", + "black": "Black", + } +} diff --git a/model/user.go b/model/user.go new file mode 100644 index 00000000..26bf6c98 --- /dev/null +++ b/model/user.go @@ -0,0 +1,96 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package model + +import ( + "errors" + "time" +) + +// User represents a user in the system. +type User struct { + ID int64 `json:"id"` + Username string `json:"username"` + Password string `json:"password,omitempty"` + IsAdmin bool `json:"is_admin"` + Theme string `json:"theme"` + Language string `json:"language"` + Timezone string `json:"timezone"` + LastLoginAt *time.Time `json:"last_login_at"` +} + +func (u User) ValidateUserCreation() error { + if err := u.ValidateUserLogin(); err != nil { + return err + } + + if err := u.ValidatePassword(); err != nil { + return err + } + + return nil +} + +func (u User) ValidateUserModification() error { + if u.Username == "" { + return errors.New("The username is mandatory") + } + + if err := u.ValidatePassword(); err != nil { + return err + } + + return nil +} + +func (u User) ValidateUserLogin() error { + if u.Username == "" { + return errors.New("The username is mandatory") + } + + if u.Password == "" { + return errors.New("The password is mandatory") + } + + return nil +} + +func (u User) ValidatePassword() error { + if u.Password != "" && len(u.Password) < 6 { + return errors.New("The password must have at least 6 characters") + } + + return nil +} + +// Merge update the current user with another user. +func (u *User) Merge(override *User) { + if u.Username != override.Username { + u.Username = override.Username + } + + if u.Password != override.Password { + u.Password = override.Password + } + + if u.IsAdmin != override.IsAdmin { + u.IsAdmin = override.IsAdmin + } + + if u.Theme != override.Theme { + u.Theme = override.Theme + } + + if u.Language != override.Language { + u.Language = override.Language + } + + if u.Timezone != override.Timezone { + u.Timezone = override.Timezone + } +} + +// Users represents a list of users. +type Users []*User diff --git a/reader/feed/atom/atom.go b/reader/feed/atom/atom.go new file mode 100644 index 00000000..db6172fa --- /dev/null +++ b/reader/feed/atom/atom.go @@ -0,0 +1,214 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package atom + +import ( + "encoding/xml" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed/date" + "github.com/miniflux/miniflux2/reader/processor" + "github.com/miniflux/miniflux2/reader/sanitizer" + "log" + "strconv" + "strings" + "time" +) + +type AtomFeed struct { + XMLName xml.Name `xml:"http://www.w3.org/2005/Atom feed"` + ID string `xml:"id"` + Title string `xml:"title"` + Author Author `xml:"author"` + Links []Link `xml:"link"` + Entries []AtomEntry `xml:"entry"` +} + +type AtomEntry struct { + ID string `xml:"id"` + Title string `xml:"title"` + Updated string `xml:"updated"` + Links []Link `xml:"link"` + Summary string `xml:"summary"` + Content Content `xml:"content"` + MediaGroup MediaGroup `xml:"http://search.yahoo.com/mrss/ group"` + Author Author `xml:"author"` +} + +type Author struct { + Name string `xml:"name"` + Email string `xml:"email"` +} + +type Link struct { + Url string `xml:"href,attr"` + Type string `xml:"type,attr"` + Rel string `xml:"rel,attr"` + Length string `xml:"length,attr"` +} + +type Content struct { + Type string `xml:"type,attr"` + Data string `xml:",chardata"` + Xml string `xml:",innerxml"` +} + +type MediaGroup struct { + Description string `xml:"http://search.yahoo.com/mrss/ description"` +} + +func (a *AtomFeed) getSiteURL() string { + for _, link := range a.Links { + if strings.ToLower(link.Rel) == "alternate" { + return link.Url + } + + if link.Rel == "" && link.Type == "" { + return link.Url + } + } + + return "" +} + +func (a *AtomFeed) getFeedURL() string { + for _, link := range a.Links { + if strings.ToLower(link.Rel) == "self" { + return link.Url + } + } + + return "" +} + +func (a *AtomFeed) Transform() *model.Feed { + feed := new(model.Feed) + feed.FeedURL = a.getFeedURL() + feed.SiteURL = a.getSiteURL() + feed.Title = sanitizer.StripTags(a.Title) + + if feed.Title == "" { + feed.Title = feed.SiteURL + } + + for _, entry := range a.Entries { + item := entry.Transform() + if item.Author == "" { + item.Author = a.GetAuthor() + } + + feed.Entries = append(feed.Entries, item) + } + + return feed +} + +func (a *AtomFeed) GetAuthor() string { + return getAuthor(a.Author) +} + +func (e *AtomEntry) GetDate() time.Time { + if e.Updated != "" { + result, err := date.Parse(e.Updated) + if err != nil { + log.Println(err) + return time.Now() + } + + return result + } + + return time.Now() +} + +func (e *AtomEntry) GetURL() string { + for _, link := range e.Links { + if strings.ToLower(link.Rel) == "alternate" { + return link.Url + } + + if link.Rel == "" && link.Type == "" { + return link.Url + } + } + + return "" +} + +func (e *AtomEntry) GetAuthor() string { + return getAuthor(e.Author) +} + +func (e *AtomEntry) GetHash() string { + for _, value := range []string{e.ID, e.GetURL()} { + if value != "" { + return helper.Hash(value) + } + } + + return "" +} + +func (e *AtomEntry) GetContent() string { + if e.Content.Type == "html" || e.Content.Type == "text" { + return e.Content.Data + } + + if e.Content.Type == "xhtml" { + return e.Content.Xml + } + + if e.Summary != "" { + return e.Summary + } + + if e.MediaGroup.Description != "" { + return e.MediaGroup.Description + } + + return "" +} + +func (e *AtomEntry) GetEnclosures() model.EnclosureList { + enclosures := make(model.EnclosureList, 0) + + for _, link := range e.Links { + if strings.ToLower(link.Rel) == "enclosure" { + length, _ := strconv.Atoi(link.Length) + enclosures = append(enclosures, &model.Enclosure{URL: link.Url, MimeType: link.Type, Size: length}) + } + } + + return enclosures +} + +func (e *AtomEntry) Transform() *model.Entry { + entry := new(model.Entry) + entry.URL = e.GetURL() + entry.Date = e.GetDate() + entry.Author = sanitizer.StripTags(e.GetAuthor()) + entry.Hash = e.GetHash() + entry.Content = processor.ItemContentProcessor(entry.URL, e.GetContent()) + entry.Title = sanitizer.StripTags(strings.Trim(e.Title, " \n\t")) + entry.Enclosures = e.GetEnclosures() + + if entry.Title == "" { + entry.Title = entry.URL + } + + return entry +} + +func getAuthor(author Author) string { + if author.Name != "" { + return author.Name + } + + if author.Email != "" { + return author.Email + } + + return "" +} diff --git a/reader/feed/atom/parser.go b/reader/feed/atom/parser.go new file mode 100644 index 00000000..0b8f0d74 --- /dev/null +++ b/reader/feed/atom/parser.go @@ -0,0 +1,28 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package atom + +import ( + "encoding/xml" + "fmt" + "github.com/miniflux/miniflux2/model" + "io" + + "golang.org/x/net/html/charset" +) + +// Parse returns a normalized feed struct. +func Parse(data io.Reader) (*model.Feed, error) { + atomFeed := new(AtomFeed) + decoder := xml.NewDecoder(data) + decoder.CharsetReader = charset.NewReaderLabel + + err := decoder.Decode(atomFeed) + if err != nil { + return nil, fmt.Errorf("Unable to parse Atom feed: %v\n", err) + } + + return atomFeed.Transform(), nil +} diff --git a/reader/feed/atom/parser_test.go b/reader/feed/atom/parser_test.go new file mode 100644 index 00000000..39d94206 --- /dev/null +++ b/reader/feed/atom/parser_test.go @@ -0,0 +1,319 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package atom + +import ( + "bytes" + "testing" + "time" +) + +func TestParseAtomSample(t *testing.T) { + data := ` + + + Example Feed + + 2003-12-13T18:30:02Z + + John Doe + + urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6 + + + Atom-Powered Robots Run Amok + + urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a + 2003-12-13T18:30:02Z + Some text. + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "Example Feed" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } + + if feed.FeedURL != "" { + t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL) + } + + if feed.SiteURL != "http://example.org/" { + t.Errorf("Incorrect site URL, got: %s", feed.SiteURL) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if !feed.Entries[0].Date.Equal(time.Date(2003, time.December, 13, 18, 30, 2, 0, time.UTC)) { + t.Errorf("Incorrect entry date, got: %v", feed.Entries[0].Date) + } + + if feed.Entries[0].Hash != "3841e5cf232f5111fc5841e9eba5f4b26d95e7d7124902e0f7272729d65601a6" { + t.Errorf("Incorrect entry hash, got: %s", feed.Entries[0].Hash) + } + + if feed.Entries[0].URL != "http://example.org/2003/12/13/atom03" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL) + } + + if feed.Entries[0].Title != "Atom-Powered Robots Run Amok" { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } + + if feed.Entries[0].Content != "Some text." { + t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content) + } + + if feed.Entries[0].Author != "John Doe" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseFeedWithoutTitle(t *testing.T) { + data := ` + + + + 2003-12-13T18:30:02Z + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "https://example.org/" { + t.Errorf("Incorrect feed title, got: %s", feed.Title) + } +} + +func TestParseEntryWithoutTitle(t *testing.T) { + data := ` + + + Example Feed + + 2003-12-13T18:30:02Z + + John Doe + + urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6 + + + + urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a + 2003-12-13T18:30:02Z + Some text. + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Title != "http://example.org/2003/12/13/atom03" { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } +} + +func TestParseFeedURL(t *testing.T) { + data := ` + + Example Feed + + + 2003-12-13T18:30:02Z + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.SiteURL != "https://example.org/" { + t.Errorf("Incorrect site URL, got: %s", feed.SiteURL) + } + + if feed.FeedURL != "https://example.org/feed" { + t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL) + } +} + +func TestParseEntryTitleWithWhitespaces(t *testing.T) { + data := ` + + Example Feed + + + + + Some Title + + + urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a + 2003-12-13T18:30:02Z + Some text. + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Title != "Some Title" { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } +} + +func TestParseEntryWithAuthorName(t *testing.T) { + data := ` + + Example Feed + + + + + urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a + 2003-12-13T18:30:02Z + Some text. + + Me + me@localhost + + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Author != "Me" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseEntryWithoutAuthorName(t *testing.T) { + data := ` + + Example Feed + + + + + urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a + 2003-12-13T18:30:02Z + Some text. + + + me@localhost + + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Author != "me@localhost" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseEntryWithEnclosures(t *testing.T) { + data := ` + + http://www.example.org/myfeed + My Podcast Feed + 2005-07-15T12:00:00Z + + John Doe + + + + + http://www.example.org/entries/1 + Atom 1.0 + 2005-07-15T12:00:00Z + + An overview of Atom 1.0 + + + +
+

Show Notes

+
    +
  • 00:01:00 -- Introduction
  • +
  • 00:15:00 -- Talking about Atom 1.0
  • +
  • 00:30:00 -- Wrapping up
  • +
+
+
+
+
` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].URL != "http://www.example.org/entries/1" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL) + } + + if len(feed.Entries[0].Enclosures) != 2 { + t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures)) + } + + if feed.Entries[0].Enclosures[0].URL != "http://www.example.org/myaudiofile.mp3" { + t.Errorf("Incorrect enclosure URL, got: %s", feed.Entries[0].Enclosures[0].URL) + } + + if feed.Entries[0].Enclosures[0].MimeType != "audio/mpeg" { + t.Errorf("Incorrect enclosure type, got: %s", feed.Entries[0].Enclosures[0].MimeType) + } + + if feed.Entries[0].Enclosures[0].Size != 1234 { + t.Errorf("Incorrect enclosure length, got: %d", feed.Entries[0].Enclosures[0].Size) + } + + if feed.Entries[0].Enclosures[1].URL != "http://www.example.org/myaudiofile.torrent" { + t.Errorf("Incorrect enclosure URL, got: %s", feed.Entries[0].Enclosures[1].URL) + } + + if feed.Entries[0].Enclosures[1].MimeType != "application/x-bittorrent" { + t.Errorf("Incorrect enclosure type, got: %s", feed.Entries[0].Enclosures[1].MimeType) + } + + if feed.Entries[0].Enclosures[1].Size != 4567 { + t.Errorf("Incorrect enclosure length, got: %d", feed.Entries[0].Enclosures[1].Size) + } +} diff --git a/reader/feed/date/parser.go b/reader/feed/date/parser.go new file mode 100644 index 00000000..e573ff87 --- /dev/null +++ b/reader/feed/date/parser.go @@ -0,0 +1,203 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package date + +import ( + "fmt" + "strings" + "time" +) + +// DateFormats taken from github.com/mjibson/goread +var dateFormats = []string{ + time.RFC822, // RSS + time.RFC822Z, // RSS + time.RFC3339, // Atom + time.UnixDate, + time.RubyDate, + time.RFC850, + time.RFC1123Z, + time.RFC1123, + time.ANSIC, + "Mon, January 2 2006 15:04:05 -0700", + "Mon, January 02, 2006, 15:04:05 MST", + "Mon, January 02, 2006 15:04:05 MST", + "Mon, Jan 2, 2006 15:04 MST", + "Mon, Jan 2 2006 15:04 MST", + "Mon, Jan 2, 2006 15:04:05 MST", + "Mon, Jan 2 2006 15:04:05 -700", + "Mon, Jan 2 2006 15:04:05 -0700", + "Mon Jan 2 15:04 2006", + "Mon Jan 2 15:04:05 2006 MST", + "Mon Jan 02, 2006 3:04 pm", + "Mon, Jan 02,2006 15:04:05 MST", + "Mon Jan 02 2006 15:04:05 -0700", + "Monday, January 2, 2006 15:04:05 MST", + "Monday, January 2, 2006 03:04 PM", + "Monday, January 2, 2006", + "Monday, January 02, 2006", + "Monday, 2 January 2006 15:04:05 MST", + "Monday, 2 January 2006 15:04:05 -0700", + "Monday, 2 Jan 2006 15:04:05 MST", + "Monday, 2 Jan 2006 15:04:05 -0700", + "Monday, 02 January 2006 15:04:05 MST", + "Monday, 02 January 2006 15:04:05 -0700", + "Monday, 02 January 2006 15:04:05", + "Mon, 2 January 2006 15:04 MST", + "Mon, 2 January 2006, 15:04 -0700", + "Mon, 2 January 2006, 15:04:05 MST", + "Mon, 2 January 2006 15:04:05 MST", + "Mon, 2 January 2006 15:04:05 -0700", + "Mon, 2 January 2006", + "Mon, 2 Jan 2006 3:04:05 PM -0700", + "Mon, 2 Jan 2006 15:4:5 MST", + "Mon, 2 Jan 2006 15:4:5 -0700 GMT", + "Mon, 2, Jan 2006 15:4", + "Mon, 2 Jan 2006 15:04 MST", + "Mon, 2 Jan 2006, 15:04 -0700", + "Mon, 2 Jan 2006 15:04 -0700", + "Mon, 2 Jan 2006 15:04:05 UT", + "Mon, 2 Jan 2006 15:04:05MST", + "Mon, 2 Jan 2006 15:04:05 MST", + "Mon 2 Jan 2006 15:04:05 MST", + "mon,2 Jan 2006 15:04:05 MST", + "Mon, 2 Jan 2006 15:04:05 -0700 MST", + "Mon, 2 Jan 2006 15:04:05-0700", + "Mon, 2 Jan 2006 15:04:05 -0700", + "Mon, 2 Jan 2006 15:04:05", + "Mon, 2 Jan 2006 15:04", + "Mon,2 Jan 2006", + "Mon, 2 Jan 2006", + "Mon, 2 Jan 15:04:05 MST", + "Mon, 2 Jan 06 15:04:05 MST", + "Mon, 2 Jan 06 15:04:05 -0700", + "Mon, 2006-01-02 15:04", + "Mon,02 January 2006 14:04:05 MST", + "Mon, 02 January 2006", + "Mon, 02 Jan 2006 3:04:05 PM MST", + "Mon, 02 Jan 2006 15 -0700", + "Mon,02 Jan 2006 15:04 MST", + "Mon, 02 Jan 2006 15:04 MST", + "Mon, 02 Jan 2006 15:04 -0700", + "Mon, 02 Jan 2006 15:04:05 Z", + "Mon, 02 Jan 2006 15:04:05 UT", + "Mon, 02 Jan 2006 15:04:05 MST-07:00", + "Mon, 02 Jan 2006 15:04:05 MST -0700", + "Mon, 02 Jan 2006, 15:04:05 MST", + "Mon, 02 Jan 2006 15:04:05MST", + "Mon, 02 Jan 2006 15:04:05 MST", + "Mon , 02 Jan 2006 15:04:05 MST", + "Mon, 02 Jan 2006 15:04:05 GMT-0700", + "Mon,02 Jan 2006 15:04:05 -0700", + "Mon, 02 Jan 2006 15:04:05 -0700", + "Mon, 02 Jan 2006 15:04:05 -07:00", + "Mon, 02 Jan 2006 15:04:05 --0700", + "Mon 02 Jan 2006 15:04:05 -0700", + "Mon, 02 Jan 2006 15:04:05 -07", + "Mon, 02 Jan 2006 15:04:05 00", + "Mon, 02 Jan 2006 15:04:05", + "Mon, 02 Jan 2006", + "Mon, 02 Jan 06 15:04:05 MST", + "January 2, 2006 3:04 PM", + "January 2, 2006, 3:04 p.m.", + "January 2, 2006 15:04:05 MST", + "January 2, 2006 15:04:05", + "January 2, 2006 03:04 PM", + "January 2, 2006", + "January 02, 2006 15:04:05 MST", + "January 02, 2006 15:04", + "January 02, 2006 03:04 PM", + "January 02, 2006", + "Jan 2, 2006 3:04:05 PM MST", + "Jan 2, 2006 3:04:05 PM", + "Jan 2, 2006 15:04:05 MST", + "Jan 2, 2006", + "Jan 02 2006 03:04:05PM", + "Jan 02, 2006", + "6/1/2 15:04", + "6-1-2 15:04", + "2 January 2006 15:04:05 MST", + "2 January 2006 15:04:05 -0700", + "2 January 2006", + "2 Jan 2006 15:04:05 Z", + "2 Jan 2006 15:04:05 MST", + "2 Jan 2006 15:04:05 -0700", + "2 Jan 2006", + "2.1.2006 15:04:05", + "2/1/2006", + "2-1-2006", + "2006 January 02", + "2006-1-2T15:04:05Z", + "2006-1-2 15:04:05", + "2006-1-2", + "2006-1-02T15:04:05Z", + "2006-01-02T15:04Z", + "2006-01-02T15:04-07:00", + "2006-01-02T15:04:05Z", + "2006-01-02T15:04:05-07:00:00", + "2006-01-02T15:04:05:-0700", + "2006-01-02T15:04:05-0700", + "2006-01-02T15:04:05-07:00", + "2006-01-02T15:04:05 -0700", + "2006-01-02T15:04:05:00", + "2006-01-02T15:04:05", + "2006-01-02 at 15:04:05", + "2006-01-02 15:04:05Z", + "2006-01-02 15:04:05 MST", + "2006-01-02 15:04:05-0700", + "2006-01-02 15:04:05-07:00", + "2006-01-02 15:04:05 -0700", + "2006-01-02 15:04", + "2006-01-02 00:00:00.0 15:04:05.0 -0700", + "2006/01/02", + "2006-01-02", + "15:04 02.01.2006 -0700", + "1/2/2006 3:04 PM MST", + "1/2/2006 3:04:05 PM MST", + "1/2/2006 3:04:05 PM", + "1/2/2006 15:04:05 MST", + "1/2/2006", + "06/1/2 15:04", + "06-1-2 15:04", + "02 Monday, Jan 2006 15:04", + "02 Jan 2006 15:04 MST", + "02 Jan 2006 15:04:05 UT", + "02 Jan 2006 15:04:05 MST", + "02 Jan 2006 15:04:05 -0700", + "02 Jan 2006 15:04:05", + "02 Jan 2006", + "02/01/2006 15:04 MST", + "02-01-2006 15:04:05 MST", + "02.01.2006 15:04:05", + "02/01/2006 15:04:05", + "02.01.2006 15:04", + "02/01/2006 - 15:04", + "02.01.2006 -0700", + "02/01/2006", + "02-01-2006", + "01/02/2006 3:04 PM", + "01/02/2006 15:04:05 MST", + "01/02/2006 - 15:04", + "01/02/2006", + "01-02-2006", +} + +// Parse parses a given date string using a large +// list of commonly found feed date formats. +func Parse(ds string) (t time.Time, err error) { + d := strings.TrimSpace(ds) + if d == "" { + return t, fmt.Errorf("Date string is empty") + } + + for _, f := range dateFormats { + if t, err = time.Parse(f, d); err == nil { + return + } + } + + err = fmt.Errorf("Failed to parse date: %s", ds) + return +} diff --git a/reader/feed/handler.go b/reader/feed/handler.go new file mode 100644 index 00000000..27ff126b --- /dev/null +++ b/reader/feed/handler.go @@ -0,0 +1,152 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package feed + +import ( + "fmt" + "github.com/miniflux/miniflux2/errors" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/http" + "github.com/miniflux/miniflux2/reader/icon" + "github.com/miniflux/miniflux2/storage" + "log" + "time" +) + +var ( + errRequestFailed = "Unable to execute request: %v" + errServerFailure = "Unable to fetch feed (statusCode=%d)." + errDuplicate = "This feed already exists (%s)." + errNotFound = "Feed %d not found" +) + +// Handler contains all the logic to create and refresh feeds. +type Handler struct { + store *storage.Storage +} + +// CreateFeed fetch, parse and store a new feed. +func (h *Handler) CreateFeed(userID, categoryID int64, url string) (*model.Feed, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:CreateFeed] feedUrl=%s", url)) + + client := http.NewHttpClient(url) + response, err := client.Get() + if err != nil { + return nil, errors.NewLocalizedError(errRequestFailed, err) + } + + if response.HasServerFailure() { + return nil, errors.NewLocalizedError(errServerFailure, response.StatusCode) + } + + if h.store.FeedURLExists(userID, response.EffectiveURL) { + return nil, errors.NewLocalizedError(errDuplicate, response.EffectiveURL) + } + + subscription, err := parseFeed(response.Body) + if err != nil { + return nil, err + } + + subscription.Category = &model.Category{ID: categoryID} + subscription.EtagHeader = response.ETag + subscription.LastModifiedHeader = response.LastModified + subscription.FeedURL = response.EffectiveURL + subscription.UserID = userID + + err = h.store.CreateFeed(subscription) + if err != nil { + return nil, err + } + + log.Println("[Handler:CreateFeed] Feed saved with ID:", subscription.ID) + + icon, err := icon.FindIcon(subscription.SiteURL) + if err != nil { + log.Println(err) + } else if icon == nil { + log.Printf("No icon found for feedID=%d\n", subscription.ID) + } else { + h.store.CreateFeedIcon(subscription, icon) + } + + return subscription, nil +} + +// RefreshFeed fetch and update a feed if necessary. +func (h *Handler) RefreshFeed(userID, feedID int64) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:RefreshFeed] feedID=%d", feedID)) + + originalFeed, err := h.store.GetFeedById(userID, feedID) + if err != nil { + return err + } + + if originalFeed == nil { + return errors.NewLocalizedError(errNotFound, feedID) + } + + client := http.NewHttpClientWithCacheHeaders(originalFeed.FeedURL, originalFeed.EtagHeader, originalFeed.LastModifiedHeader) + response, err := client.Get() + if err != nil { + customErr := errors.NewLocalizedError(errRequestFailed, err) + originalFeed.ParsingErrorCount++ + originalFeed.ParsingErrorMsg = customErr.Error() + h.store.UpdateFeed(originalFeed) + return customErr + } + + originalFeed.CheckedAt = time.Now() + + if response.HasServerFailure() { + err := errors.NewLocalizedError(errServerFailure, response.StatusCode) + originalFeed.ParsingErrorCount++ + originalFeed.ParsingErrorMsg = err.Error() + h.store.UpdateFeed(originalFeed) + return err + } + + if response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) { + log.Printf("[Handler:RefreshFeed] Feed #%d has been modified\n", feedID) + + subscription, err := parseFeed(response.Body) + if err != nil { + originalFeed.ParsingErrorCount++ + originalFeed.ParsingErrorMsg = err.Error() + h.store.UpdateFeed(originalFeed) + return err + } + + originalFeed.EtagHeader = response.ETag + originalFeed.LastModifiedHeader = response.LastModified + + if err := h.store.UpdateEntries(originalFeed.UserID, originalFeed.ID, subscription.Entries); err != nil { + return err + } + + if !h.store.HasIcon(originalFeed.ID) { + log.Println("[Handler:RefreshFeed] Looking for feed icon") + icon, err := icon.FindIcon(originalFeed.SiteURL) + if err != nil { + log.Println("[Handler:RefreshFeed]", err) + } else { + h.store.CreateFeedIcon(originalFeed, icon) + } + } + } else { + log.Printf("[Handler:RefreshFeed] Feed #%d not modified\n", feedID) + } + + originalFeed.ParsingErrorCount = 0 + originalFeed.ParsingErrorMsg = "" + + return h.store.UpdateFeed(originalFeed) +} + +// NewFeedHandler returns a feed handler. +func NewFeedHandler(store *storage.Storage) *Handler { + return &Handler{store: store} +} diff --git a/reader/feed/json/json.go b/reader/feed/json/json.go new file mode 100644 index 00000000..e7d53c4c --- /dev/null +++ b/reader/feed/json/json.go @@ -0,0 +1,170 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package json + +import ( + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed/date" + "github.com/miniflux/miniflux2/reader/processor" + "github.com/miniflux/miniflux2/reader/sanitizer" + "log" + "strings" + "time" +) + +type JsonFeed struct { + Version string `json:"version"` + Title string `json:"title"` + SiteURL string `json:"home_page_url"` + FeedURL string `json:"feed_url"` + Author JsonAuthor `json:"author"` + Items []JsonItem `json:"items"` +} + +type JsonAuthor struct { + Name string `json:"name"` + URL string `json:"url"` +} + +type JsonItem struct { + ID string `json:"id"` + URL string `json:"url"` + Title string `json:"title"` + Summary string `json:"summary"` + Text string `json:"content_text"` + Html string `json:"content_html"` + DatePublished string `json:"date_published"` + DateModified string `json:"date_modified"` + Author JsonAuthor `json:"author"` + Attachments []JsonAttachment `json:"attachments"` +} + +type JsonAttachment struct { + URL string `json:"url"` + MimeType string `json:"mime_type"` + Title string `json:"title"` + Size int `json:"size_in_bytes"` + Duration int `json:"duration_in_seconds"` +} + +func (j *JsonFeed) GetAuthor() string { + return getAuthor(j.Author) +} + +func (j *JsonFeed) Transform() *model.Feed { + feed := new(model.Feed) + feed.FeedURL = j.FeedURL + feed.SiteURL = j.SiteURL + feed.Title = sanitizer.StripTags(j.Title) + + if feed.Title == "" { + feed.Title = feed.SiteURL + } + + for _, item := range j.Items { + entry := item.Transform() + if entry.Author == "" { + entry.Author = j.GetAuthor() + } + + feed.Entries = append(feed.Entries, entry) + } + + return feed +} + +func (j *JsonItem) GetDate() time.Time { + for _, value := range []string{j.DatePublished, j.DateModified} { + if value != "" { + d, err := date.Parse(value) + if err != nil { + log.Println(err) + return time.Now() + } + + return d + } + } + + return time.Now() +} + +func (j *JsonItem) GetAuthor() string { + return getAuthor(j.Author) +} + +func (j *JsonItem) GetHash() string { + for _, value := range []string{j.ID, j.URL, j.Text + j.Html + j.Summary} { + if value != "" { + return helper.Hash(value) + } + } + + return "" +} + +func (j *JsonItem) GetTitle() string { + for _, value := range []string{j.Title, j.Summary, j.Text, j.Html} { + if value != "" { + return truncate(value) + } + } + + return j.URL +} + +func (j *JsonItem) GetContent() string { + for _, value := range []string{j.Html, j.Text, j.Summary} { + if value != "" { + return value + } + } + + return "" +} + +func (j *JsonItem) GetEnclosures() model.EnclosureList { + enclosures := make(model.EnclosureList, 0) + + for _, attachment := range j.Attachments { + enclosures = append(enclosures, &model.Enclosure{ + URL: attachment.URL, + MimeType: attachment.MimeType, + Size: attachment.Size, + }) + } + + return enclosures +} + +func (j *JsonItem) Transform() *model.Entry { + entry := new(model.Entry) + entry.URL = j.URL + entry.Date = j.GetDate() + entry.Author = sanitizer.StripTags(j.GetAuthor()) + entry.Hash = j.GetHash() + entry.Content = processor.ItemContentProcessor(entry.URL, j.GetContent()) + entry.Title = sanitizer.StripTags(strings.Trim(j.GetTitle(), " \n\t")) + entry.Enclosures = j.GetEnclosures() + return entry +} + +func getAuthor(author JsonAuthor) string { + if author.Name != "" { + return author.Name + } + + return "" +} + +func truncate(str string) string { + max := 100 + if len(str) > max { + return str[:max] + "..." + } + + return str +} diff --git a/reader/feed/json/parser.go b/reader/feed/json/parser.go new file mode 100644 index 00000000..816d28e6 --- /dev/null +++ b/reader/feed/json/parser.go @@ -0,0 +1,23 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package json + +import ( + "encoding/json" + "fmt" + "github.com/miniflux/miniflux2/model" + "io" +) + +// Parse returns a normalized feed struct. +func Parse(data io.Reader) (*model.Feed, error) { + jsonFeed := new(JsonFeed) + decoder := json.NewDecoder(data) + if err := decoder.Decode(&jsonFeed); err != nil { + return nil, fmt.Errorf("Unable to parse JSON Feed: %v", err) + } + + return jsonFeed.Transform(), nil +} diff --git a/reader/feed/json/parser_test.go b/reader/feed/json/parser_test.go new file mode 100644 index 00000000..f2c97fc9 --- /dev/null +++ b/reader/feed/json/parser_test.go @@ -0,0 +1,345 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package json + +import ( + "bytes" + "strings" + "testing" + "time" +) + +func TestParseJsonFeed(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "title": "My Example Feed", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "items": [ + { + "id": "2", + "content_text": "This is a second item.", + "url": "https://example.org/second-item" + }, + { + "id": "1", + "content_html": "

Hello, world!

", + "url": "https://example.org/initial-post" + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "My Example Feed" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } + + if feed.FeedURL != "https://example.org/feed.json" { + t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL) + } + + if feed.SiteURL != "https://example.org/" { + t.Errorf("Incorrect site URL, got: %s", feed.SiteURL) + } + + if len(feed.Entries) != 2 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].Hash != "d4735e3a265e16eee03f59718b9b5d03019c07d8b6c51f90da3a666eec13ab35" { + t.Errorf("Incorrect entry hash, got: %s", feed.Entries[0].Hash) + } + + if feed.Entries[0].URL != "https://example.org/second-item" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL) + } + + if feed.Entries[0].Title != "This is a second item." { + t.Errorf(`Incorrect entry title, got: "%s"`, feed.Entries[0].Title) + } + + if feed.Entries[0].Content != "This is a second item." { + t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content) + } + + if feed.Entries[1].Hash != "6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" { + t.Errorf("Incorrect entry hash, got: %s", feed.Entries[1].Hash) + } + + if feed.Entries[1].URL != "https://example.org/initial-post" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[1].URL) + } + + if feed.Entries[1].Title != "Hello, world!" { + t.Errorf(`Incorrect entry title, got: "%s"`, feed.Entries[1].Title) + } + + if feed.Entries[1].Content != "

Hello, world!

" { + t.Errorf("Incorrect entry content, got: %s", feed.Entries[1].Content) + } +} + +func TestParsePodcast(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "user_comment": "This is a podcast feed. You can add this feed to your podcast client using the following URL: http://therecord.co/feed.json", + "title": "The Record", + "home_page_url": "http://therecord.co/", + "feed_url": "http://therecord.co/feed.json", + "items": [ + { + "id": "http://therecord.co/chris-parrish", + "title": "Special #1 - Chris Parrish", + "url": "http://therecord.co/chris-parrish", + "content_text": "Chris has worked at Adobe and as a founder of Rogue Sheep, which won an Apple Design Award for Postage. Chris’s new company is Aged & Distilled with Guy English — which shipped Napkin, a Mac app for visual collaboration. Chris is also the co-host of The Record. He lives on Bainbridge Island, a quick ferry ride from Seattle.", + "content_html": "Chris has worked at Adobe and as a founder of Rogue Sheep, which won an Apple Design Award for Postage. Chris’s new company is Aged & Distilled with Guy English — which shipped Napkin, a Mac app for visual collaboration. Chris is also the co-host of The Record. He lives on Bainbridge Island, a quick ferry ride from Seattle.", + "summary": "Brent interviews Chris Parrish, co-host of The Record and one-half of Aged & Distilled.", + "date_published": "2014-05-09T14:04:00-07:00", + "attachments": [ + { + "url": "http://therecord.co/downloads/The-Record-sp1e1-ChrisParrish.m4a", + "mime_type": "audio/x-m4a", + "size_in_bytes": 89970236, + "duration_in_seconds": 6629 + } + ] + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "The Record" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } + + if feed.FeedURL != "http://therecord.co/feed.json" { + t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL) + } + + if feed.SiteURL != "http://therecord.co/" { + t.Errorf("Incorrect site URL, got: %s", feed.SiteURL) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].Hash != "6b678e57962a1b001e4e873756563cdc08bbd06ca561e764e0baa9a382485797" { + t.Errorf("Incorrect entry hash, got: %s", feed.Entries[0].Hash) + } + + if feed.Entries[0].URL != "http://therecord.co/chris-parrish" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL) + } + + if feed.Entries[0].Title != "Special #1 - Chris Parrish" { + t.Errorf(`Incorrect entry title, got: "%s"`, feed.Entries[0].Title) + } + + if feed.Entries[0].Content != `Chris has worked at Adobe and as a founder of Rogue Sheep, which won an Apple Design Award for Postage. Chris’s new company is Aged & Distilled with Guy English — which shipped Napkin, a Mac app for visual collaboration. Chris is also the co-host of The Record. He lives on Bainbridge Island, a quick ferry ride from Seattle.` { + t.Errorf(`Incorrect entry content, got: "%s"`, feed.Entries[0].Content) + } + + location, _ := time.LoadLocation("America/Vancouver") + if !feed.Entries[0].Date.Equal(time.Date(2014, time.May, 9, 14, 4, 0, 0, location)) { + t.Errorf("Incorrect entry date, got: %v", feed.Entries[0].Date) + } + + if len(feed.Entries[0].Enclosures) != 1 { + t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures)) + } + + if feed.Entries[0].Enclosures[0].URL != "http://therecord.co/downloads/The-Record-sp1e1-ChrisParrish.m4a" { + t.Errorf("Incorrect enclosure URL, got: %s", feed.Entries[0].Enclosures[0].URL) + } + + if feed.Entries[0].Enclosures[0].MimeType != "audio/x-m4a" { + t.Errorf("Incorrect enclosure type, got: %s", feed.Entries[0].Enclosures[0].MimeType) + } + + if feed.Entries[0].Enclosures[0].Size != 89970236 { + t.Errorf("Incorrect enclosure length, got: %d", feed.Entries[0].Enclosures[0].Size) + } +} + +func TestParseAuthor(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "user_comment": "This is a microblog feed. You can add this to your feed reader using the following URL: https://example.org/feed.json", + "title": "Brent Simmons’s Microblog", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "author": { + "name": "Brent Simmons", + "url": "http://example.org/", + "avatar": "https://example.org/avatar.png" + }, + "items": [ + { + "id": "2347259", + "url": "https://example.org/2347259", + "content_text": "Cats are neat. \n\nhttps://example.org/cats", + "date_published": "2016-02-09T14:22:00-07:00" + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].Author != "Brent Simmons" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseFeedWithoutTitle(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "items": [ + { + "id": "2347259", + "url": "https://example.org/2347259", + "content_text": "Cats are neat. \n\nhttps://example.org/cats", + "date_published": "2016-02-09T14:22:00-07:00" + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "https://example.org/" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } +} + +func TestParseFeedItemWithInvalidDate(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "title": "My Example Feed", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "items": [ + { + "id": "2347259", + "url": "https://example.org/2347259", + "content_text": "Cats are neat. \n\nhttps://example.org/cats", + "date_published": "Tomorrow" + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if !feed.Entries[0].Date.Before(time.Now()) { + t.Errorf("Incorrect entry date, got: %v", feed.Entries[0].Date) + } +} + +func TestParseFeedItemWithoutID(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "title": "My Example Feed", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "items": [ + { + "content_text": "Some text." + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].Hash != "13b4c5aecd1b6d749afcee968fbf9c80f1ed1bbdbe1aaf25cb34ebd01144bbe9" { + t.Errorf("Incorrect entry hash, got: %s", feed.Entries[0].Hash) + } +} + +func TestParseFeedItemWithoutTitle(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "title": "My Example Feed", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "items": [ + { + "url": "https://example.org/item" + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].Title != "https://example.org/item" { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } +} + +func TestParseTruncateItemTitle(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "title": "My Example Feed", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "items": [ + { + "title": "` + strings.Repeat("a", 200) + `" + } + ] + }` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if len(feed.Entries[0].Title) != 103 { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } +} diff --git a/reader/feed/parser.go b/reader/feed/parser.go new file mode 100644 index 00000000..8df6b46d --- /dev/null +++ b/reader/feed/parser.go @@ -0,0 +1,82 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package feed + +import ( + "bytes" + "encoding/xml" + "errors" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed/atom" + "github.com/miniflux/miniflux2/reader/feed/json" + "github.com/miniflux/miniflux2/reader/feed/rss" + "io" + "strings" + "time" + + "golang.org/x/net/html/charset" +) + +const ( + FormatRss = "rss" + FormatAtom = "atom" + FormatJson = "json" + FormatUnknown = "unknown" +) + +func DetectFeedFormat(data io.Reader) string { + defer helper.ExecutionTime(time.Now(), "[Feed:DetectFeedFormat]") + + var buffer bytes.Buffer + tee := io.TeeReader(data, &buffer) + + decoder := xml.NewDecoder(tee) + decoder.CharsetReader = charset.NewReaderLabel + + for { + token, _ := decoder.Token() + if token == nil { + break + } + + if element, ok := token.(xml.StartElement); ok { + switch element.Name.Local { + case "rss": + return FormatRss + case "feed": + return FormatAtom + } + } + } + + if strings.HasPrefix(strings.TrimSpace(buffer.String()), "{") { + return FormatJson + } + + return FormatUnknown +} + +func parseFeed(data io.Reader) (*model.Feed, error) { + defer helper.ExecutionTime(time.Now(), "[Feed:ParseFeed]") + + var buffer bytes.Buffer + io.Copy(&buffer, data) + + reader := bytes.NewReader(buffer.Bytes()) + format := DetectFeedFormat(reader) + reader.Seek(0, io.SeekStart) + + switch format { + case FormatAtom: + return atom.Parse(reader) + case FormatRss: + return rss.Parse(reader) + case FormatJson: + return json.Parse(reader) + default: + return nil, errors.New("Unsupported feed format") + } +} diff --git a/reader/feed/parser_test.go b/reader/feed/parser_test.go new file mode 100644 index 00000000..0dd8dd68 --- /dev/null +++ b/reader/feed/parser_test.go @@ -0,0 +1,169 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package feed + +import ( + "bytes" + "testing" +) + +func TestDetectRSS(t *testing.T) { + data := `` + format := DetectFeedFormat(bytes.NewBufferString(data)) + + if format != FormatRss { + t.Errorf("Wrong format detected: %s instead of %s", format, FormatRss) + } +} + +func TestDetectAtom(t *testing.T) { + data := `` + format := DetectFeedFormat(bytes.NewBufferString(data)) + + if format != FormatAtom { + t.Errorf("Wrong format detected: %s instead of %s", format, FormatAtom) + } +} + +func TestDetectAtomWithISOCharset(t *testing.T) { + data := `` + format := DetectFeedFormat(bytes.NewBufferString(data)) + + if format != FormatAtom { + t.Errorf("Wrong format detected: %s instead of %s", format, FormatAtom) + } +} + +func TestDetectJSON(t *testing.T) { + data := ` + { + "version" : "https://jsonfeed.org/version/1", + "title" : "Example" + } + ` + format := DetectFeedFormat(bytes.NewBufferString(data)) + + if format != FormatJson { + t.Errorf("Wrong format detected: %s instead of %s", format, FormatJson) + } +} + +func TestDetectUnknown(t *testing.T) { + data := ` + + ` + format := DetectFeedFormat(bytes.NewBufferString(data)) + + if format != FormatUnknown { + t.Errorf("Wrong format detected: %s instead of %s", format, FormatUnknown) + } +} + +func TestParseAtom(t *testing.T) { + data := ` + + + Example Feed + + 2003-12-13T18:30:02Z + + John Doe + + urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6 + + + Atom-Powered Robots Run Amok + + urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a + 2003-12-13T18:30:02Z + Some text. + + + ` + + feed, err := parseFeed(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "Example Feed" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } +} + +func TestParseRss(t *testing.T) { + data := ` + + + Liftoff News + http://liftoff.msfc.nasa.gov/ + + Star City + http://liftoff.msfc.nasa.gov/news/2003/news-starcity.asp + How do Americans get ready to work with Russians aboard the International Space Station? They take a crash course in culture, language and protocol at Russia's <a href="http://howe.iki.rssi.ru/GCTC/gctc_e.htm">Star City</a>. + Tue, 03 Jun 2003 09:39:21 GMT + http://liftoff.msfc.nasa.gov/2003/06/03.html#item573 + + + ` + + feed, err := parseFeed(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "Liftoff News" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } +} + +func TestParseJson(t *testing.T) { + data := `{ + "version": "https://jsonfeed.org/version/1", + "title": "My Example Feed", + "home_page_url": "https://example.org/", + "feed_url": "https://example.org/feed.json", + "items": [ + { + "id": "2", + "content_text": "This is a second item.", + "url": "https://example.org/second-item" + }, + { + "id": "1", + "content_html": "

Hello, world!

", + "url": "https://example.org/initial-post" + } + ] + }` + + feed, err := parseFeed(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "My Example Feed" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } +} + +func TestParseUnknownFeed(t *testing.T) { + data := ` + + + + Title of document + + + some content + + + ` + + _, err := parseFeed(bytes.NewBufferString(data)) + if err == nil { + t.Error("ParseFeed must returns an error") + } +} diff --git a/reader/feed/rss/parser.go b/reader/feed/rss/parser.go new file mode 100644 index 00000000..52310e48 --- /dev/null +++ b/reader/feed/rss/parser.go @@ -0,0 +1,28 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package rss + +import ( + "encoding/xml" + "fmt" + "github.com/miniflux/miniflux2/model" + "io" + + "golang.org/x/net/html/charset" +) + +// Parse returns a normalized feed struct. +func Parse(data io.Reader) (*model.Feed, error) { + rssFeed := new(RssFeed) + decoder := xml.NewDecoder(data) + decoder.CharsetReader = charset.NewReaderLabel + + err := decoder.Decode(rssFeed) + if err != nil { + return nil, fmt.Errorf("Unable to parse RSS feed: %v", err) + } + + return rssFeed.Transform(), nil +} diff --git a/reader/feed/rss/parser_test.go b/reader/feed/rss/parser_test.go new file mode 100644 index 00000000..42dc965c --- /dev/null +++ b/reader/feed/rss/parser_test.go @@ -0,0 +1,466 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package rss + +import ( + "bytes" + "testing" + "time" +) + +func TestParseRss2Sample(t *testing.T) { + data := ` + + + + Liftoff News + http://liftoff.msfc.nasa.gov/ + Liftoff to Space Exploration. + en-us + Tue, 10 Jun 2003 04:00:00 GMT + Tue, 10 Jun 2003 09:41:01 GMT + http://blogs.law.harvard.edu/tech/rss + Weblog Editor 2.0 + editor@example.com + webmaster@example.com + + Star City + http://liftoff.msfc.nasa.gov/news/2003/news-starcity.asp + How do Americans get ready to work with Russians aboard the International Space Station? They take a crash course in culture, language and protocol at Russia's <a href="http://howe.iki.rssi.ru/GCTC/gctc_e.htm">Star City</a>. + Tue, 03 Jun 2003 09:39:21 GMT + http://liftoff.msfc.nasa.gov/2003/06/03.html#item573 + + + Sky watchers in Europe, Asia, and parts of Alaska and Canada will experience a <a href="http://science.nasa.gov/headlines/y2003/30may_solareclipse.htm">partial eclipse of the Sun</a> on Saturday, May 31st. + Fri, 30 May 2003 11:06:42 GMT + http://liftoff.msfc.nasa.gov/2003/05/30.html#item572 + + + The Engine That Does More + http://liftoff.msfc.nasa.gov/news/2003/news-VASIMR.asp + Before man travels to Mars, NASA hopes to design new engines that will let us fly through the Solar System more quickly. The proposed VASIMR engine would do that. + Tue, 27 May 2003 08:37:32 GMT + http://liftoff.msfc.nasa.gov/2003/05/27.html#item571 + + + Astronauts' Dirty Laundry + http://liftoff.msfc.nasa.gov/news/2003/news-laundry.asp + Compared to earlier spacecraft, the International Space Station has many luxuries, but laundry facilities are not one of them. Instead, astronauts have other options. + Tue, 20 May 2003 08:56:02 GMT + http://liftoff.msfc.nasa.gov/2003/05/20.html#item570 + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "Liftoff News" { + t.Errorf("Incorrect title, got: %s", feed.Title) + } + + if feed.FeedURL != "" { + t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL) + } + + if feed.SiteURL != "http://liftoff.msfc.nasa.gov/" { + t.Errorf("Incorrect site URL, got: %s", feed.SiteURL) + } + + if len(feed.Entries) != 4 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + expectedDate := time.Date(2003, time.June, 3, 9, 39, 21, 0, time.UTC) + if !feed.Entries[0].Date.Equal(expectedDate) { + t.Errorf("Incorrect entry date, got: %v, want: %v", feed.Entries[0].Date, expectedDate) + } + + if feed.Entries[0].Hash != "5b2b4ac2fe1786ddf0fd2da2f1b07f64e691264f41f2db3ea360f31bb6d9152b" { + t.Errorf("Incorrect entry hash, got: %s", feed.Entries[0].Hash) + } + + if feed.Entries[0].URL != "http://liftoff.msfc.nasa.gov/news/2003/news-starcity.asp" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL) + } + + if feed.Entries[0].Title != "Star City" { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } + + if feed.Entries[0].Content != `How do Americans get ready to work with Russians aboard the International Space Station? They take a crash course in culture, language and protocol at Russia's Star City.` { + t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content) + } +} + +func TestParseFeedWithoutTitle(t *testing.T) { + data := ` + + + https://example.org/ + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Title != "https://example.org/" { + t.Errorf("Incorrect feed title, got: %s", feed.Title) + } +} + +func TestParseEntryWithoutTitle(t *testing.T) { + data := ` + + + https://example.org/ + + https://example.org/item + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Title != "https://example.org/item" { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } +} + +func TestParseFeedURLWithAtomLink(t *testing.T) { + data := ` + + + Example + https://example.org/ + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.FeedURL != "https://example.org/rss" { + t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL) + } + + if feed.SiteURL != "https://example.org/" { + t.Errorf("Incorrect site URL, got: %s", feed.SiteURL) + } +} + +func TestParseEntryWithAtomAuthor(t *testing.T) { + data := ` + + + Example + https://example.org/ + + + Test + https://example.org/item + + Foo Bar + Vice President + + FooBar Inc. + + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Author != "Foo Bar" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseEntryWithDublinCoreAuthor(t *testing.T) { + data := ` + + + Example + https://example.org/ + + Test + https://example.org/item + Me (me@example.com) + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Author != "Me (me@example.com)" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseEntryWithItunesAuthor(t *testing.T) { + data := ` + + + Example + https://example.org/ + + Test + https://example.org/item + Someone + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Author != "Someone" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseFeedWithItunesAuthor(t *testing.T) { + data := ` + + + Example + https://example.org/ + Someone + + Test + https://example.org/item + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Author != "Someone" { + t.Errorf("Incorrect entry author, got: %s", feed.Entries[0].Author) + } +} + +func TestParseEntryWithDublinCoreDate(t *testing.T) { + data := ` + + + Example + http://example.org/ + + Item 1 + http://example.org/item1 + Description. + UUID + 2002-09-29T23:40:06-05:00 + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + location, _ := time.LoadLocation("EST") + expectedDate := time.Date(2002, time.September, 29, 23, 40, 06, 0, location) + if !feed.Entries[0].Date.Equal(expectedDate) { + t.Errorf("Incorrect entry date, got: %v, want: %v", feed.Entries[0].Date, expectedDate) + } +} + +func TestParseEntryWithContentEncoded(t *testing.T) { + data := ` + + + Example + http://example.org/ + + Item 1 + http://example.org/item1 + Description. + UUID + Example.

]]>
+
+
+
` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Content != `

Example.

` { + t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].Content) + } +} + +func TestParseEntryWithFeedBurnerLink(t *testing.T) { + data := ` + + + Example + http://example.org/ + + Item 1 + http://example.org/item1 + http://example.org/original + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].URL != "http://example.org/original" { + t.Errorf("Incorrect entry content, got: %s", feed.Entries[0].URL) + } +} + +func TestParseEntryTitleWithWhitespaces(t *testing.T) { + data := ` + + + Example + http://example.org + + + Some Title + + http://www.example.org/entries/1 + Fri, 15 Jul 2005 00:00:00 -0500 + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if feed.Entries[0].Title != "Some Title" { + t.Errorf("Incorrect entry title, got: %s", feed.Entries[0].Title) + } +} + +func TestParseEntryWithEnclosures(t *testing.T) { + data := ` + + + My Podcast Feed + http://example.org + some.email@example.org + + Podcasting with RSS + http://www.example.org/entries/1 + An overview of RSS podcasting + Fri, 15 Jul 2005 00:00:00 -0500 + http://www.example.org/entries/1 + + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].URL != "http://www.example.org/entries/1" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL) + } + + if len(feed.Entries[0].Enclosures) != 1 { + t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures)) + } + + if feed.Entries[0].Enclosures[0].URL != "http://www.example.org/myaudiofile.mp3" { + t.Errorf("Incorrect enclosure URL, got: %s", feed.Entries[0].Enclosures[0].URL) + } + + if feed.Entries[0].Enclosures[0].MimeType != "audio/mpeg" { + t.Errorf("Incorrect enclosure type, got: %s", feed.Entries[0].Enclosures[0].MimeType) + } + + if feed.Entries[0].Enclosures[0].Size != 12345 { + t.Errorf("Incorrect enclosure length, got: %d", feed.Entries[0].Enclosures[0].Size) + } +} + +func TestParseEntryWithFeedBurnerEnclosures(t *testing.T) { + data := ` + + + My Example Feed + http://example.org + some.email@example.org + + Example Item + http://www.example.org/entries/1 + + http://example.org/67ca416c-f22a-4228-a681-68fc9998ec10/File.mp3 + + + ` + + feed, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(feed.Entries) != 1 { + t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries)) + } + + if feed.Entries[0].URL != "http://www.example.org/entries/1" { + t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL) + } + + if len(feed.Entries[0].Enclosures) != 1 { + t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures)) + } + + if feed.Entries[0].Enclosures[0].URL != "http://example.org/67ca416c-f22a-4228-a681-68fc9998ec10/File.mp3" { + t.Errorf("Incorrect enclosure URL, got: %s", feed.Entries[0].Enclosures[0].URL) + } + + if feed.Entries[0].Enclosures[0].MimeType != "audio/mpeg" { + t.Errorf("Incorrect enclosure type, got: %s", feed.Entries[0].Enclosures[0].MimeType) + } + + if feed.Entries[0].Enclosures[0].Size != 76192460 { + t.Errorf("Incorrect enclosure length, got: %d", feed.Entries[0].Enclosures[0].Size) + } +} diff --git a/reader/feed/rss/rss.go b/reader/feed/rss/rss.go new file mode 100644 index 00000000..d88ed6bd --- /dev/null +++ b/reader/feed/rss/rss.go @@ -0,0 +1,207 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package rss + +import ( + "encoding/xml" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed/date" + "github.com/miniflux/miniflux2/reader/processor" + "github.com/miniflux/miniflux2/reader/sanitizer" + "log" + "path" + "strconv" + "strings" + "time" +) + +type RssLink struct { + XMLName xml.Name + Data string `xml:",chardata"` + Href string `xml:"href,attr"` +} + +type RssFeed struct { + XMLName xml.Name `xml:"rss"` + Version string `xml:"version,attr"` + Title string `xml:"channel>title"` + Links []RssLink `xml:"channel>link"` + Language string `xml:"channel>language"` + Description string `xml:"channel>description"` + PubDate string `xml:"channel>pubDate"` + ItunesAuthor string `xml:"http://www.itunes.com/dtds/podcast-1.0.dtd channel>author"` + Items []RssItem `xml:"channel>item"` +} + +type RssItem struct { + Guid string `xml:"guid"` + Title string `xml:"title"` + Link string `xml:"link"` + OriginalLink string `xml:"http://rssnamespace.org/feedburner/ext/1.0 origLink"` + Description string `xml:"description"` + Content string `xml:"http://purl.org/rss/1.0/modules/content/ encoded"` + PubDate string `xml:"pubDate"` + Date string `xml:"http://purl.org/dc/elements/1.1/ date"` + Authors []RssAuthor `xml:"author"` + Creator string `xml:"http://purl.org/dc/elements/1.1/ creator"` + Enclosures []RssEnclosure `xml:"enclosure"` + OrigEnclosureLink string `xml:"http://rssnamespace.org/feedburner/ext/1.0 origEnclosureLink"` +} + +type RssAuthor struct { + XMLName xml.Name + Data string `xml:",chardata"` + Name string `xml:"name"` +} + +type RssEnclosure struct { + Url string `xml:"url,attr"` + Type string `xml:"type,attr"` + Length string `xml:"length,attr"` +} + +func (r *RssFeed) GetSiteURL() string { + for _, elem := range r.Links { + if elem.XMLName.Space == "" { + return elem.Data + } + } + + return "" +} + +func (r *RssFeed) GetFeedURL() string { + for _, elem := range r.Links { + if elem.XMLName.Space == "http://www.w3.org/2005/Atom" { + return elem.Href + } + } + + return "" +} + +func (r *RssFeed) Transform() *model.Feed { + feed := new(model.Feed) + feed.SiteURL = r.GetSiteURL() + feed.FeedURL = r.GetFeedURL() + feed.Title = sanitizer.StripTags(r.Title) + + if feed.Title == "" { + feed.Title = feed.SiteURL + } + + for _, item := range r.Items { + entry := item.Transform() + + if entry.Author == "" && r.ItunesAuthor != "" { + entry.Author = r.ItunesAuthor + } + entry.Author = sanitizer.StripTags(entry.Author) + + feed.Entries = append(feed.Entries, entry) + } + + return feed +} +func (i *RssItem) GetDate() time.Time { + value := i.PubDate + if i.Date != "" { + value = i.Date + } + + if value != "" { + result, err := date.Parse(value) + if err != nil { + log.Println(err) + return time.Now() + } + + return result + } + + return time.Now() +} + +func (i *RssItem) GetAuthor() string { + for _, element := range i.Authors { + if element.Name != "" { + return element.Name + } + + if element.Data != "" { + return element.Data + } + } + + return i.Creator +} + +func (i *RssItem) GetHash() string { + for _, value := range []string{i.Guid, i.Link} { + if value != "" { + return helper.Hash(value) + } + } + + return "" +} + +func (i *RssItem) GetContent() string { + if i.Content != "" { + return i.Content + } + + return i.Description +} + +func (i *RssItem) GetURL() string { + if i.OriginalLink != "" { + return i.OriginalLink + } + + return i.Link +} + +func (i *RssItem) GetEnclosures() model.EnclosureList { + enclosures := make(model.EnclosureList, 0) + + for _, enclosure := range i.Enclosures { + length, _ := strconv.Atoi(enclosure.Length) + enclosureURL := enclosure.Url + + if i.OrigEnclosureLink != "" { + filename := path.Base(i.OrigEnclosureLink) + if strings.Contains(enclosureURL, filename) { + enclosureURL = i.OrigEnclosureLink + } + } + + enclosures = append(enclosures, &model.Enclosure{ + URL: enclosureURL, + MimeType: enclosure.Type, + Size: length, + }) + } + + return enclosures +} + +func (i *RssItem) Transform() *model.Entry { + entry := new(model.Entry) + entry.URL = i.GetURL() + entry.Date = i.GetDate() + entry.Author = i.GetAuthor() + entry.Hash = i.GetHash() + entry.Content = processor.ItemContentProcessor(entry.URL, i.GetContent()) + entry.Title = sanitizer.StripTags(strings.Trim(i.Title, " \n\t")) + entry.Enclosures = i.GetEnclosures() + + if entry.Title == "" { + entry.Title = entry.URL + } + + return entry +} diff --git a/reader/http/client.go b/reader/http/client.go new file mode 100644 index 00000000..745ff0db --- /dev/null +++ b/reader/http/client.go @@ -0,0 +1,95 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package http + +import ( + "crypto/tls" + "fmt" + "github.com/miniflux/miniflux2/helper" + "log" + "net/http" + "net/url" + "time" +) + +const HTTP_USER_AGENT = "Miniflux " + +type HttpClient struct { + url string + etagHeader string + lastModifiedHeader string + Insecure bool +} + +func (h *HttpClient) Get() (*ServerResponse, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[HttpClient:Get] url=%s", h.url)) + u, _ := url.Parse(h.url) + + req := &http.Request{ + URL: u, + Method: "GET", + Header: h.buildHeaders(), + } + + client := h.buildClient() + resp, err := client.Do(req) + if err != nil { + return nil, err + } + + response := &ServerResponse{ + Body: resp.Body, + StatusCode: resp.StatusCode, + EffectiveURL: resp.Request.URL.String(), + LastModified: resp.Header.Get("Last-Modified"), + ETag: resp.Header.Get("ETag"), + ContentType: resp.Header.Get("Content-Type"), + } + + log.Println("[HttpClient:Get]", + "OriginalURL:", h.url, + "StatusCode:", response.StatusCode, + "ETag:", response.ETag, + "LastModified:", response.LastModified, + "EffectiveURL:", response.EffectiveURL, + ) + + return response, err +} + +func (h *HttpClient) buildClient() http.Client { + if h.Insecure { + transport := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + + return http.Client{Transport: transport} + } + + return http.Client{} +} + +func (h *HttpClient) buildHeaders() http.Header { + headers := make(http.Header) + headers.Add("User-Agent", HTTP_USER_AGENT) + + if h.etagHeader != "" { + headers.Add("If-None-Match", h.etagHeader) + } + + if h.lastModifiedHeader != "" { + headers.Add("If-Modified-Since", h.lastModifiedHeader) + } + + return headers +} + +func NewHttpClient(url string) *HttpClient { + return &HttpClient{url: url, Insecure: false} +} + +func NewHttpClientWithCacheHeaders(url, etagHeader, lastModifiedHeader string) *HttpClient { + return &HttpClient{url: url, etagHeader: etagHeader, lastModifiedHeader: lastModifiedHeader, Insecure: false} +} diff --git a/reader/http/response.go b/reader/http/response.go new file mode 100644 index 00000000..49e9f196 --- /dev/null +++ b/reader/http/response.go @@ -0,0 +1,32 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package http + +import "io" + +type ServerResponse struct { + Body io.Reader + StatusCode int + EffectiveURL string + LastModified string + ETag string + ContentType string +} + +func (s *ServerResponse) HasServerFailure() bool { + return s.StatusCode >= 400 +} + +func (s *ServerResponse) IsModified(etag, lastModified string) bool { + if s.StatusCode == 304 { + return false + } + + if s.ETag != "" && s.LastModified != "" && (s.ETag == etag || s.LastModified == lastModified) { + return false + } + + return true +} diff --git a/reader/icon/finder.go b/reader/icon/finder.go new file mode 100644 index 00000000..54d509f5 --- /dev/null +++ b/reader/icon/finder.go @@ -0,0 +1,109 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package icon + +import ( + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/http" + "github.com/miniflux/miniflux2/reader/url" + "io" + "io/ioutil" + "log" + + "github.com/PuerkitoBio/goquery" +) + +// FindIcon try to find the website's icon. +func FindIcon(websiteURL string) (*model.Icon, error) { + rootURL := url.GetRootURL(websiteURL) + client := http.NewHttpClient(rootURL) + response, err := client.Get() + if err != nil { + return nil, fmt.Errorf("unable to download website index page: %v", err) + } + + if response.HasServerFailure() { + return nil, fmt.Errorf("unable to download website index page: status=%d", response.StatusCode) + } + + iconURL, err := parseDocument(rootURL, response.Body) + if err != nil { + return nil, err + } + + log.Println("[FindIcon] Fetching icon =>", iconURL) + icon, err := downloadIcon(iconURL) + if err != nil { + return nil, err + } + + return icon, nil +} + +func parseDocument(websiteURL string, data io.Reader) (string, error) { + queries := []string{ + "link[rel='shortcut icon']", + "link[rel='Shortcut Icon']", + "link[rel='icon shortcut']", + "link[rel='icon']", + } + + doc, err := goquery.NewDocumentFromReader(data) + if err != nil { + return "", fmt.Errorf("unable to read document: %v", err) + } + + var iconURL string + for _, query := range queries { + doc.Find(query).Each(func(i int, s *goquery.Selection) { + if href, exists := s.Attr("href"); exists { + iconURL = href + } + }) + + if iconURL != "" { + break + } + } + + if iconURL == "" { + iconURL = url.GetRootURL(websiteURL) + "favicon.ico" + } else { + iconURL, _ = url.GetAbsoluteURL(websiteURL, iconURL) + } + + return iconURL, nil +} + +func downloadIcon(iconURL string) (*model.Icon, error) { + client := http.NewHttpClient(iconURL) + response, err := client.Get() + if err != nil { + return nil, fmt.Errorf("unable to download iconURL: %v", err) + } + + if response.HasServerFailure() { + return nil, fmt.Errorf("unable to download icon: status=%d", response.StatusCode) + } + + body, err := ioutil.ReadAll(response.Body) + if err != nil { + return nil, fmt.Errorf("unable to read downloaded icon: %v", err) + } + + if len(body) == 0 { + return nil, fmt.Errorf("downloaded icon is empty, iconURL=%s", iconURL) + } + + icon := &model.Icon{ + Hash: helper.HashFromBytes(body), + MimeType: response.ContentType, + Content: body, + } + + return icon, nil +} diff --git a/reader/opml/handler.go b/reader/opml/handler.go new file mode 100644 index 00000000..6150d919 --- /dev/null +++ b/reader/opml/handler.go @@ -0,0 +1,94 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package opml + +import ( + "errors" + "fmt" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/storage" + "io" + "log" +) + +type OpmlHandler struct { + store *storage.Storage +} + +func (o *OpmlHandler) Export(userID int64) (string, error) { + feeds, err := o.store.GetFeeds(userID) + if err != nil { + log.Println(err) + return "", errors.New("Unable to fetch feeds.") + } + + var subscriptions SubcriptionList + for _, feed := range feeds { + subscriptions = append(subscriptions, &Subcription{ + Title: feed.Title, + FeedURL: feed.FeedURL, + SiteURL: feed.SiteURL, + CategoryName: feed.Category.Title, + }) + } + + return Serialize(subscriptions), nil +} + +func (o *OpmlHandler) Import(userID int64, data io.Reader) (err error) { + subscriptions, err := Parse(data) + if err != nil { + return err + } + + for _, subscription := range subscriptions { + if !o.store.FeedURLExists(userID, subscription.FeedURL) { + var category *model.Category + + if subscription.CategoryName == "" { + category, err = o.store.GetFirstCategory(userID) + if err != nil { + log.Println(err) + return errors.New("Unable to find first category.") + } + } else { + category, err = o.store.GetCategoryByTitle(userID, subscription.CategoryName) + if err != nil { + log.Println(err) + return errors.New("Unable to search category by title.") + } + + if category == nil { + category = &model.Category{ + UserID: userID, + Title: subscription.CategoryName, + } + + err := o.store.CreateCategory(category) + if err != nil { + log.Println(err) + return fmt.Errorf(`Unable to create this category: "%s".`, subscription.CategoryName) + } + } + } + + feed := &model.Feed{ + UserID: userID, + Title: subscription.Title, + FeedURL: subscription.FeedURL, + SiteURL: subscription.SiteURL, + Category: category, + } + + o.store.CreateFeed(feed) + } + } + + return nil +} + +func NewOpmlHandler(store *storage.Storage) *OpmlHandler { + return &OpmlHandler{store: store} +} diff --git a/reader/opml/opml.go b/reader/opml/opml.go new file mode 100644 index 00000000..d5278a75 --- /dev/null +++ b/reader/opml/opml.go @@ -0,0 +1,82 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package opml + +import "encoding/xml" + +type Opml struct { + XMLName xml.Name `xml:"opml"` + Version string `xml:"version,attr"` + Outlines []Outline `xml:"body>outline"` +} + +type Outline struct { + Title string `xml:"title,attr,omitempty"` + Text string `xml:"text,attr"` + FeedURL string `xml:"xmlUrl,attr,omitempty"` + SiteURL string `xml:"htmlUrl,attr,omitempty"` + Outlines []Outline `xml:"outline,omitempty"` +} + +func (o *Outline) GetTitle() string { + if o.Title != "" { + return o.Title + } + + if o.Text != "" { + return o.Text + } + + if o.SiteURL != "" { + return o.SiteURL + } + + if o.FeedURL != "" { + return o.FeedURL + } + + return "" +} + +func (o *Outline) GetSiteURL() string { + if o.SiteURL != "" { + return o.SiteURL + } + + return o.FeedURL +} + +func (o *Outline) IsCategory() bool { + return o.Text != "" && o.SiteURL == "" && o.FeedURL == "" +} + +func (o *Outline) Append(subscriptions SubcriptionList, category string) SubcriptionList { + if o.FeedURL != "" { + subscriptions = append(subscriptions, &Subcription{ + Title: o.GetTitle(), + FeedURL: o.FeedURL, + SiteURL: o.GetSiteURL(), + CategoryName: category, + }) + } + + return subscriptions +} + +func (o *Opml) Transform() SubcriptionList { + var subscriptions SubcriptionList + + for _, outline := range o.Outlines { + if outline.IsCategory() { + for _, element := range outline.Outlines { + subscriptions = element.Append(subscriptions, outline.Text) + } + } else { + subscriptions = outline.Append(subscriptions, "") + } + } + + return subscriptions +} diff --git a/reader/opml/parser.go b/reader/opml/parser.go new file mode 100644 index 00000000..5d8babd4 --- /dev/null +++ b/reader/opml/parser.go @@ -0,0 +1,26 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package opml + +import ( + "encoding/xml" + "fmt" + "io" + + "golang.org/x/net/html/charset" +) + +func Parse(data io.Reader) (SubcriptionList, error) { + opml := new(Opml) + decoder := xml.NewDecoder(data) + decoder.CharsetReader = charset.NewReaderLabel + + err := decoder.Decode(opml) + if err != nil { + return nil, fmt.Errorf("Unable to parse OPML file: %v\n", err) + } + + return opml.Transform(), nil +} diff --git a/reader/opml/parser_test.go b/reader/opml/parser_test.go new file mode 100644 index 00000000..02543dfc --- /dev/null +++ b/reader/opml/parser_test.go @@ -0,0 +1,138 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package opml + +import "testing" +import "bytes" + +func TestParseOpmlWithoutCategories(t *testing.T) { + data := ` + + + mySubscriptions.opml + + + + + + + + + + + + + + + + + + ` + + var expected SubcriptionList + expected = append(expected, &Subcription{Title: "CNET News.com", FeedURL: "http://news.com.com/2547-1_3-0-5.xml", SiteURL: "http://news.com.com/"}) + + subscriptions, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(subscriptions) != 13 { + t.Errorf("Wrong number of subscriptions: %d instead of %d", len(subscriptions), 13) + } + + if !subscriptions[0].Equals(expected[0]) { + t.Errorf(`Subscription are different: "%v" vs "%v"`, subscriptions[0], expected[0]) + } +} + +func TestParseOpmlWithCategories(t *testing.T) { + data := ` + + + mySubscriptions.opml + + + + + + + + + + + + ` + + var expected SubcriptionList + expected = append(expected, &Subcription{Title: "Feed 1", FeedURL: "http://example.org/feed1/", SiteURL: "http://example.org/1", CategoryName: "My Category 1"}) + expected = append(expected, &Subcription{Title: "Feed 2", FeedURL: "http://example.org/feed2/", SiteURL: "http://example.org/2", CategoryName: "My Category 1"}) + expected = append(expected, &Subcription{Title: "Feed 3", FeedURL: "http://example.org/feed3/", SiteURL: "http://example.org/3", CategoryName: "My Category 2"}) + + subscriptions, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(subscriptions) != 3 { + t.Errorf("Wrong number of subscriptions: %d instead of %d", len(subscriptions), 3) + } + + for i := 0; i < len(subscriptions); i++ { + if !subscriptions[i].Equals(expected[i]) { + t.Errorf(`Subscription are different: "%v" vs "%v"`, subscriptions[i], expected[i]) + } + } +} + +func TestParseOpmlWithEmptyTitleAndEmptySiteURL(t *testing.T) { + data := ` + + + mySubscriptions.opml + + + + + + + ` + + var expected SubcriptionList + expected = append(expected, &Subcription{Title: "http://example.org/1", FeedURL: "http://example.org/feed1/", SiteURL: "http://example.org/1", CategoryName: ""}) + expected = append(expected, &Subcription{Title: "http://example.org/feed2/", FeedURL: "http://example.org/feed2/", SiteURL: "http://example.org/feed2/", CategoryName: ""}) + + subscriptions, err := Parse(bytes.NewBufferString(data)) + if err != nil { + t.Error(err) + } + + if len(subscriptions) != 2 { + t.Errorf("Wrong number of subscriptions: %d instead of %d", len(subscriptions), 2) + } + + for i := 0; i < len(subscriptions); i++ { + if !subscriptions[i].Equals(expected[i]) { + t.Errorf(`Subscription are different: "%v" vs "%v"`, subscriptions[i], expected[i]) + } + } +} + +func TestParseInvalidXML(t *testing.T) { + data := ` + + + + + + + ` + + _, err := Parse(bytes.NewBufferString(data)) + if err == nil { + t.Error(err) + } +} diff --git a/reader/opml/serializer.go b/reader/opml/serializer.go new file mode 100644 index 00000000..20c7046a --- /dev/null +++ b/reader/opml/serializer.go @@ -0,0 +1,58 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package opml + +import ( + "bufio" + "bytes" + "encoding/xml" + "log" +) + +func Serialize(subscriptions SubcriptionList) string { + var b bytes.Buffer + writer := bufio.NewWriter(&b) + writer.WriteString(xml.Header) + + opml := new(Opml) + opml.Version = "2.0" + for categoryName, subs := range groupSubscriptionsByFeed(subscriptions) { + outline := Outline{Text: categoryName} + + for _, subscription := range subs { + outline.Outlines = append(outline.Outlines, Outline{ + Title: subscription.Title, + Text: subscription.Title, + FeedURL: subscription.FeedURL, + SiteURL: subscription.SiteURL, + }) + } + + opml.Outlines = append(opml.Outlines, outline) + } + + encoder := xml.NewEncoder(writer) + encoder.Indent(" ", " ") + if err := encoder.Encode(opml); err != nil { + log.Println(err) + return "" + } + + return b.String() +} + +func groupSubscriptionsByFeed(subscriptions SubcriptionList) map[string]SubcriptionList { + groups := make(map[string]SubcriptionList) + + for _, subscription := range subscriptions { + // if subs, ok := groups[subscription.CategoryName]; !ok { + // groups[subscription.CategoryName] = SubcriptionList{} + // } + + groups[subscription.CategoryName] = append(groups[subscription.CategoryName], subscription) + } + + return groups +} diff --git a/reader/opml/serializer_test.go b/reader/opml/serializer_test.go new file mode 100644 index 00000000..b1ef2a63 --- /dev/null +++ b/reader/opml/serializer_test.go @@ -0,0 +1,31 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package opml + +import "testing" +import "bytes" + +func TestSerialize(t *testing.T) { + var subscriptions SubcriptionList + subscriptions = append(subscriptions, &Subcription{Title: "Feed 1", FeedURL: "http://example.org/feed/1", SiteURL: "http://example.org/1", CategoryName: "Category 1"}) + subscriptions = append(subscriptions, &Subcription{Title: "Feed 2", FeedURL: "http://example.org/feed/2", SiteURL: "http://example.org/2", CategoryName: "Category 1"}) + subscriptions = append(subscriptions, &Subcription{Title: "Feed 3", FeedURL: "http://example.org/feed/3", SiteURL: "http://example.org/3", CategoryName: "Category 2"}) + + output := Serialize(subscriptions) + feeds, err := Parse(bytes.NewBufferString(output)) + if err != nil { + t.Error(err) + } + + if len(feeds) != 3 { + t.Errorf("Wrong number of subscriptions: %d instead of %d", len(feeds), 3) + } + + for i := 0; i < len(feeds); i++ { + if !feeds[i].Equals(subscriptions[i]) { + t.Errorf(`Subscription are different: "%v" vs "%v"`, subscriptions[i], feeds[i]) + } + } +} diff --git a/reader/opml/subscription.go b/reader/opml/subscription.go new file mode 100644 index 00000000..b968bb08 --- /dev/null +++ b/reader/opml/subscription.go @@ -0,0 +1,18 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package opml + +type Subcription struct { + Title string + SiteURL string + FeedURL string + CategoryName string +} + +func (s Subcription) Equals(subscription *Subcription) bool { + return s.Title == subscription.Title && s.SiteURL == subscription.SiteURL && s.FeedURL == subscription.FeedURL && s.CategoryName == subscription.CategoryName +} + +type SubcriptionList []*Subcription diff --git a/reader/processor/processor.go b/reader/processor/processor.go new file mode 100644 index 00000000..a4f4de13 --- /dev/null +++ b/reader/processor/processor.go @@ -0,0 +1,15 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package processor + +import ( + "github.com/miniflux/miniflux2/reader/rewrite" + "github.com/miniflux/miniflux2/reader/sanitizer" +) + +func ItemContentProcessor(url, content string) string { + content = sanitizer.Sanitize(url, content) + return rewrite.Rewriter(url, content) +} diff --git a/reader/rewrite/rewriter.go b/reader/rewrite/rewriter.go new file mode 100644 index 00000000..f23c0af7 --- /dev/null +++ b/reader/rewrite/rewriter.go @@ -0,0 +1,47 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package rewrite + +import ( + "regexp" + "strings" + + "github.com/PuerkitoBio/goquery" +) + +var rewriteRules = []func(string, string) string{ + func(url, content string) string { + re := regexp.MustCompile(`youtube\.com/watch\?v=(.*)`) + matches := re.FindStringSubmatch(url) + + if len(matches) == 2 { + video := `` + return video + "

" + content + "

" + } + return content + }, + func(url, content string) string { + if strings.HasPrefix(url, "https://xkcd.com") { + doc, err := goquery.NewDocumentFromReader(strings.NewReader(content)) + if err != nil { + return content + } + + imgTag := doc.Find("img").First() + if titleAttr, found := imgTag.Attr("title"); found { + return content + `
` + titleAttr + "
" + } + } + return content + }, +} + +func Rewriter(url, content string) string { + for _, rewriteRule := range rewriteRules { + content = rewriteRule(url, content) + } + + return content +} diff --git a/reader/rewrite/rewriter_test.go b/reader/rewrite/rewriter_test.go new file mode 100644 index 00000000..a6664067 --- /dev/null +++ b/reader/rewrite/rewriter_test.go @@ -0,0 +1,34 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package rewrite + +import "testing" + +func TestRewriteWithNoMatchingRule(t *testing.T) { + output := Rewriter("https://example.org/article", `Some text.`) + expected := `Some text.` + + if expected != output { + t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) + } +} + +func TestRewriteWithYoutubeLink(t *testing.T) { + output := Rewriter("https://www.youtube.com/watch?v=1234", `Video Description`) + expected := `

Video Description

` + + if expected != output { + t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) + } +} + +func TestRewriteWithXkcdLink(t *testing.T) { + description := `Your problem is so terrible, I worry that, if I help you, I risk drawing the attention of whatever god of technology inflicted it on you.` + output := Rewriter("https://xkcd.com/1912/", description) + expected := description + `
Your problem is so terrible, I worry that, if I help you, I risk drawing the attention of whatever god of technology inflicted it on you.
` + if expected != output { + t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) + } +} diff --git a/reader/sanitizer/sanitizer.go b/reader/sanitizer/sanitizer.go new file mode 100644 index 00000000..6af034c8 --- /dev/null +++ b/reader/sanitizer/sanitizer.go @@ -0,0 +1,360 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package sanitizer + +import ( + "bytes" + "fmt" + "github.com/miniflux/miniflux2/reader/url" + "io" + "strings" + + "golang.org/x/net/html" +) + +// Sanitize returns safe HTML. +func Sanitize(baseURL, input string) string { + tokenizer := html.NewTokenizer(bytes.NewBufferString(input)) + var buffer bytes.Buffer + var tagStack []string + + for { + if tokenizer.Next() == html.ErrorToken { + err := tokenizer.Err() + if err == io.EOF { + return buffer.String() + } + + return "" + } + + token := tokenizer.Token() + switch token.Type { + case html.TextToken: + buffer.WriteString(token.Data) + case html.StartTagToken: + tagName := token.DataAtom.String() + + if !isPixelTracker(tagName, token.Attr) && isValidTag(tagName) { + attrNames, htmlAttributes := sanitizeAttributes(baseURL, tagName, token.Attr) + + if hasRequiredAttributes(tagName, attrNames) { + if len(attrNames) > 0 { + buffer.WriteString("<" + tagName + " " + htmlAttributes + ">") + } else { + buffer.WriteString("<" + tagName + ">") + } + + tagStack = append(tagStack, tagName) + } + } + case html.EndTagToken: + tagName := token.DataAtom.String() + if isValidTag(tagName) && inList(tagName, tagStack) { + buffer.WriteString(fmt.Sprintf("", tagName)) + } + case html.SelfClosingTagToken: + tagName := token.DataAtom.String() + if !isPixelTracker(tagName, token.Attr) && isValidTag(tagName) { + attrNames, htmlAttributes := sanitizeAttributes(baseURL, tagName, token.Attr) + + if hasRequiredAttributes(tagName, attrNames) { + if len(attrNames) > 0 { + buffer.WriteString("<" + tagName + " " + htmlAttributes + "/>") + } else { + buffer.WriteString("<" + tagName + "/>") + } + } + } + } + } +} + +func sanitizeAttributes(baseURL, tagName string, attributes []html.Attribute) (attrNames []string, html string) { + var htmlAttrs []string + var err error + + for _, attribute := range attributes { + value := attribute.Val + + if !isValidAttribute(tagName, attribute.Key) { + continue + } + + if isExternalResourceAttribute(attribute.Key) { + if tagName == "iframe" && !isValidIframeSource(attribute.Val) { + continue + } else { + value, err = url.GetAbsoluteURL(baseURL, value) + if err != nil { + continue + } + + if !hasValidScheme(value) || isBlacklistedResource(value) { + continue + } + } + } + + attrNames = append(attrNames, attribute.Key) + htmlAttrs = append(htmlAttrs, fmt.Sprintf(`%s="%s"`, attribute.Key, value)) + } + + extraAttrNames, extraHTMLAttributes := getExtraAttributes(tagName) + if len(extraAttrNames) > 0 { + attrNames = append(attrNames, extraAttrNames...) + htmlAttrs = append(htmlAttrs, extraHTMLAttributes...) + } + + return attrNames, strings.Join(htmlAttrs, " ") +} + +func getExtraAttributes(tagName string) ([]string, []string) { + if tagName == "a" { + return []string{"rel", "target", "referrerpolicy"}, []string{`rel="noopener noreferrer"`, `target="_blank"`, `referrerpolicy="no-referrer"`} + } + + if tagName == "video" || tagName == "audio" { + return []string{"controls"}, []string{"controls"} + } + + return nil, nil +} + +func isValidTag(tagName string) bool { + for element := range getTagWhitelist() { + if tagName == element { + return true + } + } + + return false +} + +func isValidAttribute(tagName, attributeName string) bool { + for element, attributes := range getTagWhitelist() { + if tagName == element { + if inList(attributeName, attributes) { + return true + } + } + } + + return false +} + +func isExternalResourceAttribute(attribute string) bool { + switch attribute { + case "src", "href", "poster", "cite": + return true + default: + return false + } +} + +func isPixelTracker(tagName string, attributes []html.Attribute) bool { + if tagName == "img" { + hasHeight := false + hasWidth := false + + for _, attribute := range attributes { + if attribute.Key == "height" && attribute.Val == "1" { + hasHeight = true + } + + if attribute.Key == "width" && attribute.Val == "1" { + hasWidth = true + } + } + + return hasHeight && hasWidth + } + + return false +} + +func hasRequiredAttributes(tagName string, attributes []string) bool { + elements := make(map[string][]string) + elements["a"] = []string{"href"} + elements["iframe"] = []string{"src"} + elements["img"] = []string{"src"} + elements["source"] = []string{"src"} + + for element, attrs := range elements { + if tagName == element { + for _, attribute := range attributes { + for _, attr := range attrs { + if attr == attribute { + return true + } + } + } + + return false + } + } + + return true +} + +func hasValidScheme(src string) bool { + // See https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml + whitelist := []string{ + "apt://", + "bitcoin://", + "callto://", + "ed2k://", + "facetime://", + "feed://", + "ftp://", + "geo://", + "gopher://", + "git://", + "http://", + "https://", + "irc://", + "irc6://", + "ircs://", + "itms://", + "jabber://", + "magnet://", + "mailto://", + "maps://", + "news://", + "nfs://", + "nntp://", + "rtmp://", + "sip://", + "sips://", + "skype://", + "smb://", + "sms://", + "spotify://", + "ssh://", + "sftp://", + "steam://", + "svn://", + "tel://", + "webcal://", + "xmpp://", + } + + for _, prefix := range whitelist { + if strings.HasPrefix(src, prefix) { + return true + } + } + + return false +} + +func isBlacklistedResource(src string) bool { + blacklist := []string{ + "feedsportal.com", + "api.flattr.com", + "stats.wordpress.com", + "plus.google.com/share", + "twitter.com/share", + "feeds.feedburner.com", + } + + for _, element := range blacklist { + if strings.Contains(src, element) { + return true + } + } + + return false +} + +func isValidIframeSource(src string) bool { + whitelist := []string{ + "http://www.youtube.com", + "https://www.youtube.com", + "http://player.vimeo.com", + "https://player.vimeo.com", + "http://www.dailymotion.com", + "https://www.dailymotion.com", + "http://vk.com", + "https://vk.com", + } + + for _, prefix := range whitelist { + if strings.HasPrefix(src, prefix) { + return true + } + } + + return false +} + +func getTagWhitelist() map[string][]string { + whitelist := make(map[string][]string) + whitelist["img"] = []string{"alt", "title", "src"} + whitelist["audio"] = []string{"src"} + whitelist["video"] = []string{"poster", "height", "width", "src"} + whitelist["source"] = []string{"src", "type"} + whitelist["dt"] = []string{} + whitelist["dd"] = []string{} + whitelist["dl"] = []string{} + whitelist["table"] = []string{} + whitelist["caption"] = []string{} + whitelist["thead"] = []string{} + whitelist["tfooter"] = []string{} + whitelist["tr"] = []string{} + whitelist["td"] = []string{"rowspan", "colspan"} + whitelist["th"] = []string{"rowspan", "colspan"} + whitelist["h1"] = []string{} + whitelist["h2"] = []string{} + whitelist["h3"] = []string{} + whitelist["h4"] = []string{} + whitelist["h5"] = []string{} + whitelist["h6"] = []string{} + whitelist["strong"] = []string{} + whitelist["em"] = []string{} + whitelist["code"] = []string{} + whitelist["pre"] = []string{} + whitelist["blockquote"] = []string{} + whitelist["q"] = []string{"cite"} + whitelist["p"] = []string{} + whitelist["ul"] = []string{} + whitelist["li"] = []string{} + whitelist["ol"] = []string{} + whitelist["br"] = []string{} + whitelist["del"] = []string{} + whitelist["a"] = []string{"href", "title"} + whitelist["figure"] = []string{} + whitelist["figcaption"] = []string{} + whitelist["cite"] = []string{} + whitelist["time"] = []string{"datetime"} + whitelist["abbr"] = []string{"title"} + whitelist["acronym"] = []string{"title"} + whitelist["wbr"] = []string{} + whitelist["dfn"] = []string{} + whitelist["sub"] = []string{} + whitelist["sup"] = []string{} + whitelist["var"] = []string{} + whitelist["samp"] = []string{} + whitelist["s"] = []string{} + whitelist["del"] = []string{} + whitelist["ins"] = []string{} + whitelist["kbd"] = []string{} + whitelist["rp"] = []string{} + whitelist["rt"] = []string{} + whitelist["rtc"] = []string{} + whitelist["ruby"] = []string{} + whitelist["iframe"] = []string{"width", "height", "frameborder", "src", "allowfullscreen"} + return whitelist +} + +func inList(needle string, haystack []string) bool { + for _, element := range haystack { + if element == needle { + return true + } + } + + return false +} diff --git a/reader/sanitizer/sanitizer_test.go b/reader/sanitizer/sanitizer_test.go new file mode 100644 index 00000000..73862d37 --- /dev/null +++ b/reader/sanitizer/sanitizer_test.go @@ -0,0 +1,144 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package sanitizer + +import "testing" + +func TestValidInput(t *testing.T) { + input := `

This is a text with an image: Test.

` + output := Sanitize("http://example.org/", input) + + if input != output { + t.Errorf(`Wrong output: "%s" != "%s"`, input, output) + } +} + +func TestSelfClosingTags(t *testing.T) { + input := `

This
is a text
with an image: Test.

` + output := Sanitize("http://example.org/", input) + + if input != output { + t.Errorf(`Wrong output: "%s" != "%s"`, input, output) + } +} + +func TestTable(t *testing.T) { + input := `
AB
CDE
` + output := Sanitize("http://example.org/", input) + + if input != output { + t.Errorf(`Wrong output: "%s" != "%s"`, input, output) + } +} + +func TestRelativeURL(t *testing.T) { + input := `This link is relative and this image: ` + expected := `This link is relative and this image: ` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestProtocolRelativeURL(t *testing.T) { + input := `This link is relative.` + expected := `This link is relative.` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestInvalidTag(t *testing.T) { + input := `

My invalid tag.

` + expected := `

My invalid tag.

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestVideoTag(t *testing.T) { + input := `

My valid .

` + expected := `

My valid .

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestAudioAndSourceTag(t *testing.T) { + input := `

My music .

` + expected := `

My music .

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestUnknownTag(t *testing.T) { + input := `

My invalid tag.

` + expected := `

My invalid tag.

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestInvalidNestedTag(t *testing.T) { + input := `

My invalid tag with some valid tag.

` + expected := `

My invalid tag with some valid tag.

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestInvalidIFrame(t *testing.T) { + input := `` + expected := `` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestInvalidURLScheme(t *testing.T) { + input := `

This link is not valid

` + expected := `

This link is not valid

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestBlacklistedLink(t *testing.T) { + input := `

This image is not valid

` + expected := `

This image is not valid

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} + +func TestPixelTracker(t *testing.T) { + input := `

and

` + expected := `

and

` + output := Sanitize("http://example.org/", input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} diff --git a/reader/sanitizer/strip_tags.go b/reader/sanitizer/strip_tags.go new file mode 100644 index 00000000..f3cecd11 --- /dev/null +++ b/reader/sanitizer/strip_tags.go @@ -0,0 +1,35 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package sanitizer + +import ( + "bytes" + "io" + + "golang.org/x/net/html" +) + +// StripTags removes all HTML/XML tags from the input string. +func StripTags(input string) string { + tokenizer := html.NewTokenizer(bytes.NewBufferString(input)) + var buffer bytes.Buffer + + for { + if tokenizer.Next() == html.ErrorToken { + err := tokenizer.Err() + if err == io.EOF { + return buffer.String() + } + + return "" + } + + token := tokenizer.Token() + switch token.Type { + case html.TextToken: + buffer.WriteString(token.Data) + } + } +} diff --git a/reader/sanitizer/strip_tags_test.go b/reader/sanitizer/strip_tags_test.go new file mode 100644 index 00000000..763ccc9e --- /dev/null +++ b/reader/sanitizer/strip_tags_test.go @@ -0,0 +1,17 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package sanitizer + +import "testing" + +func TestStripTags(t *testing.T) { + input := `This link is relative and this image: ` + expected := `This link is relative and this image: ` + output := StripTags(input) + + if expected != output { + t.Errorf(`Wrong output: "%s" != "%s"`, expected, output) + } +} diff --git a/reader/subscription/finder.go b/reader/subscription/finder.go new file mode 100644 index 00000000..7314644d --- /dev/null +++ b/reader/subscription/finder.go @@ -0,0 +1,96 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package subscription + +import ( + "bytes" + "fmt" + "github.com/miniflux/miniflux2/errors" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/reader/feed" + "github.com/miniflux/miniflux2/reader/http" + "github.com/miniflux/miniflux2/reader/url" + "io" + "log" + "time" + + "github.com/PuerkitoBio/goquery" +) + +var ( + errConnectionFailure = "Unable to open this link: %v" + errUnreadableDoc = "Unable to analyze this page: %v" +) + +// FindSubscriptions downloads and try to find one or more subscriptions from an URL. +func FindSubscriptions(websiteURL string) (Subscriptions, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[FindSubscriptions] url=%s", websiteURL)) + + client := http.NewHttpClient(websiteURL) + response, err := client.Get() + if err != nil { + return nil, errors.NewLocalizedError(errConnectionFailure, err) + } + + var buffer bytes.Buffer + io.Copy(&buffer, response.Body) + reader := bytes.NewReader(buffer.Bytes()) + + if format := feed.DetectFeedFormat(reader); format != feed.FormatUnknown { + var subscriptions Subscriptions + subscriptions = append(subscriptions, &Subscription{ + Title: response.EffectiveURL, + URL: response.EffectiveURL, + Type: format, + }) + + return subscriptions, nil + } + + reader.Seek(0, io.SeekStart) + return parseDocument(response.EffectiveURL, bytes.NewReader(buffer.Bytes())) +} + +func parseDocument(websiteURL string, data io.Reader) (Subscriptions, error) { + var subscriptions Subscriptions + queries := map[string]string{ + "link[type='application/rss+xml']": "rss", + "link[type='application/atom+xml']": "atom", + "link[type='application/json']": "json", + } + + doc, err := goquery.NewDocumentFromReader(data) + if err != nil { + return nil, errors.NewLocalizedError(errUnreadableDoc, err) + } + + for query, kind := range queries { + doc.Find(query).Each(func(i int, s *goquery.Selection) { + subscription := new(Subscription) + subscription.Type = kind + + if title, exists := s.Attr("title"); exists { + subscription.Title = title + } else { + subscription.Title = "Feed" + } + + if feedURL, exists := s.Attr("href"); exists { + subscription.URL, _ = url.GetAbsoluteURL(websiteURL, feedURL) + } + + if subscription.Title == "" { + subscription.Title = subscription.URL + } + + if subscription.URL != "" { + log.Println("[FindSubscriptions]", subscription) + subscriptions = append(subscriptions, subscription) + } + }) + } + + return subscriptions, nil +} diff --git a/reader/subscription/subscription.go b/reader/subscription/subscription.go new file mode 100644 index 00000000..f619f738 --- /dev/null +++ b/reader/subscription/subscription.go @@ -0,0 +1,21 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package subscription + +import "fmt" + +// Subscription represents a feed subscription. +type Subscription struct { + Title string `json:"title"` + URL string `json:"url"` + Type string `json:"type"` +} + +func (s Subscription) String() string { + return fmt.Sprintf(`Title="%s", URL="%s", Type="%s"`, s.Title, s.URL, s.Type) +} + +// Subscriptions represents a list of subscription. +type Subscriptions []*Subscription diff --git a/reader/url/url.go b/reader/url/url.go new file mode 100644 index 00000000..eea658bf --- /dev/null +++ b/reader/url/url.go @@ -0,0 +1,61 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package url + +import "net/url" +import "fmt" +import "strings" + +// GetAbsoluteURL converts the input URL as absolute URL if necessary. +func GetAbsoluteURL(baseURL, input string) (string, error) { + if strings.HasPrefix(input, "//") { + input = "https://" + input[2:] + } + + u, err := url.Parse(input) + if err != nil { + return "", fmt.Errorf("unable to parse input URL: %v", err) + } + + if u.IsAbs() { + return u.String(), nil + } + + base, err := url.Parse(baseURL) + if err != nil { + return "", fmt.Errorf("unable to parse base URL: %v", err) + } + + return base.ResolveReference(u).String(), nil +} + +// GetRootURL returns absolute URL without the path. +func GetRootURL(websiteURL string) string { + if strings.HasPrefix(websiteURL, "//") { + websiteURL = "https://" + websiteURL[2:] + } + + absoluteURL, err := GetAbsoluteURL(websiteURL, "") + if err != nil { + return websiteURL + } + + u, err := url.Parse(absoluteURL) + if err != nil { + return absoluteURL + } + + return u.Scheme + "://" + u.Host + "/" +} + +// IsHTTPS returns true if the URL is using HTTPS. +func IsHTTPS(websiteURL string) bool { + parsedURL, err := url.Parse(websiteURL) + if err != nil { + return false + } + + return strings.ToLower(parsedURL.Scheme) == "https" +} diff --git a/reader/url/url_test.go b/reader/url/url_test.go new file mode 100644 index 00000000..0a970868 --- /dev/null +++ b/reader/url/url_test.go @@ -0,0 +1,107 @@ +package url + +import "testing" + +func TestGetAbsoluteURLWithAbsolutePath(t *testing.T) { + expected := `https://example.org/path/file.ext` + input := `/path/file.ext` + output, err := GetAbsoluteURL("https://example.org/folder/", input) + + if err != nil { + t.Error(err) + } + + if expected != output { + t.Errorf(`Unexpected output, got "%s" instead of "%s"`, output, expected) + } +} + +func TestGetAbsoluteURLWithRelativePath(t *testing.T) { + expected := `https://example.org/folder/path/file.ext` + input := `path/file.ext` + output, err := GetAbsoluteURL("https://example.org/folder/", input) + + if err != nil { + t.Error(err) + } + + if expected != output { + t.Errorf(`Unexpected output, got "%s" instead of "%s"`, output, expected) + } +} + +func TestGetAbsoluteURLWithRelativePaths(t *testing.T) { + expected := `https://example.org/path/file.ext` + input := `path/file.ext` + output, err := GetAbsoluteURL("https://example.org/folder", input) + + if err != nil { + t.Error(err) + } + + if expected != output { + t.Errorf(`Unexpected output, got "%s" instead of "%s"`, output, expected) + } +} + +func TestWhenInputIsAlreadyAbsolute(t *testing.T) { + expected := `https://example.org/path/file.ext` + input := `https://example.org/path/file.ext` + output, err := GetAbsoluteURL("https://example.org/folder/", input) + + if err != nil { + t.Error(err) + } + + if expected != output { + t.Errorf(`Unexpected output, got "%s" instead of "%s"`, output, expected) + } +} + +func TestGetAbsoluteURLWithProtocolRelative(t *testing.T) { + expected := `https://static.example.org/path/file.ext` + input := `//static.example.org/path/file.ext` + output, err := GetAbsoluteURL("https://www.example.org/", input) + + if err != nil { + t.Error(err) + } + + if expected != output { + t.Errorf(`Unexpected output, got "%s" instead of "%s"`, output, expected) + } +} + +func TestGetRootURL(t *testing.T) { + expected := `https://example.org/` + input := `https://example.org/path/file.ext` + output := GetRootURL(input) + + if expected != output { + t.Errorf(`Unexpected output, got "%s" instead of "%s"`, output, expected) + } +} + +func TestGetRootURLWithProtocolRelativePath(t *testing.T) { + expected := `https://static.example.org/` + input := `//static.example.org/path/file.ext` + output := GetRootURL(input) + + if expected != output { + t.Errorf(`Unexpected output, got "%s" instead of "%s"`, output, expected) + } +} + +func TestIsHTTPS(t *testing.T) { + if !IsHTTPS("https://example.org/") { + t.Error("Unable to recognize HTTPS URL") + } + + if IsHTTPS("http://example.org/") { + t.Error("Unable to recognize HTTP URL") + } + + if IsHTTPS("") { + t.Error("Unable to recognize malformed URL") + } +} diff --git a/scheduler/scheduler.go b/scheduler/scheduler.go new file mode 100644 index 00000000..ab87e992 --- /dev/null +++ b/scheduler/scheduler.go @@ -0,0 +1,24 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package scheduler + +import ( + "github.com/miniflux/miniflux2/storage" + "log" + "time" +) + +// NewScheduler starts a new scheduler to push jobs to a pool of workers. +func NewScheduler(store *storage.Storage, workerPool *WorkerPool, frequency, batchSize int) { + c := time.Tick(time.Duration(frequency) * time.Minute) + for now := range c { + jobs := store.GetJobs(batchSize) + log.Printf("[Scheduler:%v] => Pushing %d jobs\n", now, len(jobs)) + + for _, job := range jobs { + workerPool.Push(job) + } + } +} diff --git a/scheduler/worker.go b/scheduler/worker.go new file mode 100644 index 00000000..39382a38 --- /dev/null +++ b/scheduler/worker.go @@ -0,0 +1,35 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package scheduler + +import ( + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed" + "log" + "time" +) + +// A Worker refresh a feed in the background. +type Worker struct { + id int + feedHandler *feed.Handler +} + +// Run wait for a job and refresh the given feed. +func (w *Worker) Run(c chan model.Job) { + log.Printf("[Worker] #%d started\n", w.id) + + for { + job := <-c + log.Printf("[Worker #%d] got userID=%d, feedID=%d\n", w.id, job.UserID, job.FeedID) + + err := w.feedHandler.RefreshFeed(job.UserID, job.FeedID) + if err != nil { + log.Println("Worker:", err) + } + + time.Sleep(time.Millisecond * 1000) + } +} diff --git a/scheduler/worker_pool.go b/scheduler/worker_pool.go new file mode 100644 index 00000000..b753f893 --- /dev/null +++ b/scheduler/worker_pool.go @@ -0,0 +1,34 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package scheduler + +import ( + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed" +) + +// WorkerPool handle a pool of workers. +type WorkerPool struct { + queue chan model.Job +} + +// Push send a job on the queue. +func (w *WorkerPool) Push(job model.Job) { + w.queue <- job +} + +// NewWorkerPool creates a pool of background workers. +func NewWorkerPool(feedHandler *feed.Handler, nbWorkers int) *WorkerPool { + workerPool := &WorkerPool{ + queue: make(chan model.Job), + } + + for i := 0; i < nbWorkers; i++ { + worker := &Worker{id: i, feedHandler: feedHandler} + go worker.Run(workerPool.queue) + } + + return workerPool +} diff --git a/server/api/controller/category.go b/server/api/controller/category.go new file mode 100644 index 00000000..01aa14b1 --- /dev/null +++ b/server/api/controller/category.go @@ -0,0 +1,97 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package api + +import ( + "errors" + "github.com/miniflux/miniflux2/server/api/payload" + "github.com/miniflux/miniflux2/server/core" +) + +// CreateCategory is the API handler to create a new category. +func (c *Controller) CreateCategory(ctx *core.Context, request *core.Request, response *core.Response) { + category, err := payload.DecodeCategoryPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(err) + return + } + + category.UserID = ctx.GetUserID() + if err := category.ValidateCategoryCreation(); err != nil { + response.Json().ServerError(err) + return + } + + err = c.store.CreateCategory(category) + if err != nil { + response.Json().ServerError(errors.New("Unable to create this category")) + return + } + + response.Json().Created(category) +} + +// UpdateCategory is the API handler to update a category. +func (c *Controller) UpdateCategory(ctx *core.Context, request *core.Request, response *core.Response) { + categoryID, err := request.GetIntegerParam("categoryID") + if err != nil { + response.Json().BadRequest(err) + return + } + + category, err := payload.DecodeCategoryPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(err) + return + } + + category.UserID = ctx.GetUserID() + category.ID = categoryID + if err := category.ValidateCategoryModification(); err != nil { + response.Json().BadRequest(err) + return + } + + err = c.store.UpdateCategory(category) + if err != nil { + response.Json().ServerError(errors.New("Unable to update this category")) + return + } + + response.Json().Created(category) +} + +// GetCategories is the API handler to get a list of categories for a given user. +func (c *Controller) GetCategories(ctx *core.Context, request *core.Request, response *core.Response) { + categories, err := c.store.GetCategories(ctx.GetUserID()) + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch categories")) + return + } + + response.Json().Standard(categories) +} + +// RemoveCategory is the API handler to remove a category. +func (c *Controller) RemoveCategory(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + categoryID, err := request.GetIntegerParam("categoryID") + if err != nil { + response.Json().BadRequest(err) + return + } + + if !c.store.CategoryExists(userID, categoryID) { + response.Json().NotFound(errors.New("Category not found")) + return + } + + if err := c.store.RemoveCategory(userID, categoryID); err != nil { + response.Json().ServerError(errors.New("Unable to remove this category")) + return + } + + response.Json().NoContent() +} diff --git a/server/api/controller/controller.go b/server/api/controller/controller.go new file mode 100644 index 00000000..629d71ac --- /dev/null +++ b/server/api/controller/controller.go @@ -0,0 +1,21 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package api + +import ( + "github.com/miniflux/miniflux2/reader/feed" + "github.com/miniflux/miniflux2/storage" +) + +// Controller holds all handlers for the API. +type Controller struct { + store *storage.Storage + feedHandler *feed.Handler +} + +// NewController creates a new controller. +func NewController(store *storage.Storage, feedHandler *feed.Handler) *Controller { + return &Controller{store: store, feedHandler: feedHandler} +} diff --git a/server/api/controller/entry.go b/server/api/controller/entry.go new file mode 100644 index 00000000..92420e0a --- /dev/null +++ b/server/api/controller/entry.go @@ -0,0 +1,156 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package api + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/api/payload" + "github.com/miniflux/miniflux2/server/core" +) + +// GetEntry is the API handler to get a single feed entry. +func (c *Controller) GetEntry(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Json().BadRequest(err) + return + } + + entryID, err := request.GetIntegerParam("entryID") + if err != nil { + response.Json().BadRequest(err) + return + } + + builder := c.store.GetEntryQueryBuilder(userID, ctx.GetUserTimezone()) + builder.WithFeedID(feedID) + builder.WithEntryID(entryID) + + entry, err := builder.GetEntry() + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch this entry from the database")) + return + } + + if entry == nil { + response.Json().NotFound(errors.New("Entry not found")) + return + } + + response.Json().Standard(entry) +} + +// GetFeedEntries is the API handler to get all feed entries. +func (c *Controller) GetFeedEntries(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Json().BadRequest(err) + return + } + + status := request.GetQueryStringParam("status", "") + if status != "" { + if err := model.ValidateEntryStatus(status); err != nil { + response.Json().BadRequest(err) + return + } + } + + order := request.GetQueryStringParam("order", "id") + if err := model.ValidateEntryOrder(order); err != nil { + response.Json().BadRequest(err) + return + } + + direction := request.GetQueryStringParam("direction", "desc") + if err := model.ValidateDirection(direction); err != nil { + response.Json().BadRequest(err) + return + } + + limit := request.GetQueryIntegerParam("limit", 100) + offset := request.GetQueryIntegerParam("offset", 0) + + builder := c.store.GetEntryQueryBuilder(userID, ctx.GetUserTimezone()) + builder.WithFeedID(feedID) + builder.WithStatus(status) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.DefaultSortingDirection) + builder.WithOffset(offset) + builder.WithLimit(limit) + + entries, err := builder.GetEntries() + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch the list of entries")) + return + } + + count, err := builder.CountEntries() + if err != nil { + response.Json().ServerError(errors.New("Unable to count the number of entries")) + return + } + + response.Json().Standard(&payload.EntriesResponse{Total: count, Entries: entries}) +} + +// SetEntryStatus is the API handler to change the status of an entry. +func (c *Controller) SetEntryStatus(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Json().BadRequest(err) + return + } + + entryID, err := request.GetIntegerParam("entryID") + if err != nil { + response.Json().BadRequest(err) + return + } + + status, err := payload.DecodeEntryStatusPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(errors.New("Invalid JSON payload")) + return + } + + if err := model.ValidateEntryStatus(status); err != nil { + response.Json().BadRequest(err) + return + } + + builder := c.store.GetEntryQueryBuilder(userID, ctx.GetUserTimezone()) + builder.WithFeedID(feedID) + builder.WithEntryID(entryID) + + entry, err := builder.GetEntry() + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch this entry from the database")) + return + } + + if entry == nil { + response.Json().NotFound(errors.New("Entry not found")) + return + } + + if err := c.store.SetEntriesStatus(userID, []int64{entry.ID}, status); err != nil { + response.Json().ServerError(errors.New("Unable to change entry status")) + return + } + + entry, err = builder.GetEntry() + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch this entry from the database")) + return + } + + response.Json().Standard(entry) +} diff --git a/server/api/controller/feed.go b/server/api/controller/feed.go new file mode 100644 index 00000000..6b76fec6 --- /dev/null +++ b/server/api/controller/feed.go @@ -0,0 +1,138 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package api + +import ( + "errors" + "github.com/miniflux/miniflux2/server/api/payload" + "github.com/miniflux/miniflux2/server/core" +) + +// CreateFeed is the API handler to create a new feed. +func (c *Controller) CreateFeed(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + feedURL, categoryID, err := payload.DecodeFeedCreationPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(err) + return + } + + feed, err := c.feedHandler.CreateFeed(userID, categoryID, feedURL) + if err != nil { + response.Json().ServerError(errors.New("Unable to create this feed")) + return + } + + response.Json().Created(feed) +} + +// RefreshFeed is the API handler to refresh a feed. +func (c *Controller) RefreshFeed(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Json().BadRequest(err) + return + } + + err = c.feedHandler.RefreshFeed(userID, feedID) + if err != nil { + response.Json().ServerError(errors.New("Unable to refresh this feed")) + return + } + + response.Json().NoContent() +} + +// UpdateFeed is the API handler that is used to update a feed. +func (c *Controller) UpdateFeed(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Json().BadRequest(err) + return + } + + newFeed, err := payload.DecodeFeedModificationPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(err) + return + } + + originalFeed, err := c.store.GetFeedById(userID, feedID) + if err != nil { + response.Json().NotFound(errors.New("Unable to find this feed")) + return + } + + if originalFeed == nil { + response.Json().NotFound(errors.New("Feed not found")) + return + } + + originalFeed.Merge(newFeed) + if err := c.store.UpdateFeed(originalFeed); err != nil { + response.Json().ServerError(errors.New("Unable to update this feed")) + return + } + + response.Json().Created(originalFeed) +} + +// GetFeeds is the API handler that get all feeds that belongs to the given user. +func (c *Controller) GetFeeds(ctx *core.Context, request *core.Request, response *core.Response) { + feeds, err := c.store.GetFeeds(ctx.GetUserID()) + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch feeds from the database")) + return + } + + response.Json().Standard(feeds) +} + +// GetFeed is the API handler to get a feed. +func (c *Controller) GetFeed(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Json().BadRequest(err) + return + } + + feed, err := c.store.GetFeedById(userID, feedID) + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch this feed")) + return + } + + if feed == nil { + response.Json().NotFound(errors.New("Feed not found")) + return + } + + response.Json().Standard(feed) +} + +// RemoveFeed is the API handler to remove a feed. +func (c *Controller) RemoveFeed(ctx *core.Context, request *core.Request, response *core.Response) { + userID := ctx.GetUserID() + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Json().BadRequest(err) + return + } + + if !c.store.FeedExists(userID, feedID) { + response.Json().NotFound(errors.New("Feed not found")) + return + } + + if err := c.store.RemoveFeed(userID, feedID); err != nil { + response.Json().ServerError(errors.New("Unable to remove this feed")) + return + } + + response.Json().NoContent() +} diff --git a/server/api/controller/subscription.go b/server/api/controller/subscription.go new file mode 100644 index 00000000..cb442d95 --- /dev/null +++ b/server/api/controller/subscription.go @@ -0,0 +1,35 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package api + +import ( + "errors" + "fmt" + "github.com/miniflux/miniflux2/reader/subscription" + "github.com/miniflux/miniflux2/server/api/payload" + "github.com/miniflux/miniflux2/server/core" +) + +// GetSubscriptions is the API handler to find subscriptions. +func (c *Controller) GetSubscriptions(ctx *core.Context, request *core.Request, response *core.Response) { + websiteURL, err := payload.DecodeURLPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(err) + return + } + + subscriptions, err := subscription.FindSubscriptions(websiteURL) + if err != nil { + response.Json().ServerError(errors.New("Unable to discover subscriptions")) + return + } + + if subscriptions == nil { + response.Json().NotFound(fmt.Errorf("No subscription found")) + return + } + + response.Json().Standard(subscriptions) +} diff --git a/server/api/controller/user.go b/server/api/controller/user.go new file mode 100644 index 00000000..c8276b3c --- /dev/null +++ b/server/api/controller/user.go @@ -0,0 +1,163 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package api + +import ( + "errors" + "github.com/miniflux/miniflux2/server/api/payload" + "github.com/miniflux/miniflux2/server/core" +) + +// CreateUser is the API handler to create a new user. +func (c *Controller) CreateUser(ctx *core.Context, request *core.Request, response *core.Response) { + if !ctx.IsAdminUser() { + response.Json().Forbidden() + return + } + + user, err := payload.DecodeUserPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(err) + return + } + + if err := user.ValidateUserCreation(); err != nil { + response.Json().BadRequest(err) + return + } + + if c.store.UserExists(user.Username) { + response.Json().BadRequest(errors.New("This user already exists")) + return + } + + err = c.store.CreateUser(user) + if err != nil { + response.Json().ServerError(errors.New("Unable to create this user")) + return + } + + user.Password = "" + response.Json().Created(user) +} + +// UpdateUser is the API handler to update the given user. +func (c *Controller) UpdateUser(ctx *core.Context, request *core.Request, response *core.Response) { + if !ctx.IsAdminUser() { + response.Json().Forbidden() + return + } + + userID, err := request.GetIntegerParam("userID") + if err != nil { + response.Json().BadRequest(err) + return + } + + user, err := payload.DecodeUserPayload(request.GetBody()) + if err != nil { + response.Json().BadRequest(err) + return + } + + if err := user.ValidateUserModification(); err != nil { + response.Json().BadRequest(err) + return + } + + originalUser, err := c.store.GetUserById(userID) + if err != nil { + response.Json().BadRequest(errors.New("Unable to fetch this user from the database")) + return + } + + if originalUser == nil { + response.Json().NotFound(errors.New("User not found")) + return + } + + originalUser.Merge(user) + if err = c.store.UpdateUser(originalUser); err != nil { + response.Json().ServerError(errors.New("Unable to update this user")) + return + } + + response.Json().Created(originalUser) +} + +// GetUsers is the API handler to get the list of users. +func (c *Controller) GetUsers(ctx *core.Context, request *core.Request, response *core.Response) { + if !ctx.IsAdminUser() { + response.Json().Forbidden() + return + } + + users, err := c.store.GetUsers() + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch the list of users")) + return + } + + response.Json().Standard(users) +} + +// GetUser is the API handler to fetch the given user. +func (c *Controller) GetUser(ctx *core.Context, request *core.Request, response *core.Response) { + if !ctx.IsAdminUser() { + response.Json().Forbidden() + return + } + + userID, err := request.GetIntegerParam("userID") + if err != nil { + response.Json().BadRequest(err) + return + } + + user, err := c.store.GetUserById(userID) + if err != nil { + response.Json().BadRequest(errors.New("Unable to fetch this user from the database")) + return + } + + if user == nil { + response.Json().NotFound(errors.New("User not found")) + return + } + + response.Json().Standard(user) +} + +// RemoveUser is the API handler to remove an existing user. +func (c *Controller) RemoveUser(ctx *core.Context, request *core.Request, response *core.Response) { + if !ctx.IsAdminUser() { + response.Json().Forbidden() + return + } + + userID, err := request.GetIntegerParam("userID") + if err != nil { + response.Json().BadRequest(err) + return + } + + user, err := c.store.GetUserById(userID) + if err != nil { + response.Json().ServerError(errors.New("Unable to fetch this user from the database")) + return + } + + if user == nil { + response.Json().NotFound(errors.New("User not found")) + return + } + + if err := c.store.RemoveUser(user.ID); err != nil { + response.Json().BadRequest(errors.New("Unable to remove this user from the database")) + return + } + + response.Json().NoContent() +} diff --git a/server/api/payload/payload.go b/server/api/payload/payload.go new file mode 100644 index 00000000..e26f9fc0 --- /dev/null +++ b/server/api/payload/payload.go @@ -0,0 +1,93 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package payload + +import ( + "encoding/json" + "fmt" + "github.com/miniflux/miniflux2/model" + "io" +) + +type EntriesResponse struct { + Total int `json:"total"` + Entries model.Entries `json:"entries"` +} + +func DecodeUserPayload(data io.Reader) (*model.User, error) { + var user model.User + + decoder := json.NewDecoder(data) + if err := decoder.Decode(&user); err != nil { + return nil, fmt.Errorf("Unable to decode user JSON object: %v", err) + } + + return &user, nil +} + +func DecodeURLPayload(data io.Reader) (string, error) { + type payload struct { + URL string `json:"url"` + } + + var p payload + decoder := json.NewDecoder(data) + if err := decoder.Decode(&p); err != nil { + return "", fmt.Errorf("invalid JSON payload: %v", err) + } + + return p.URL, nil +} + +func DecodeEntryStatusPayload(data io.Reader) (string, error) { + type payload struct { + Status string `json:"status"` + } + + var p payload + decoder := json.NewDecoder(data) + if err := decoder.Decode(&p); err != nil { + return "", fmt.Errorf("invalid JSON payload: %v", err) + } + + return p.Status, nil +} + +func DecodeFeedCreationPayload(data io.Reader) (string, int64, error) { + type payload struct { + FeedURL string `json:"feed_url"` + CategoryID int64 `json:"category_id"` + } + + var p payload + decoder := json.NewDecoder(data) + if err := decoder.Decode(&p); err != nil { + return "", 0, fmt.Errorf("invalid JSON payload: %v", err) + } + + return p.FeedURL, p.CategoryID, nil +} + +func DecodeFeedModificationPayload(data io.Reader) (*model.Feed, error) { + var feed model.Feed + + decoder := json.NewDecoder(data) + if err := decoder.Decode(&feed); err != nil { + return nil, fmt.Errorf("Unable to decode feed JSON object: %v", err) + } + + return &feed, nil +} + +func DecodeCategoryPayload(data io.Reader) (*model.Category, error) { + var category model.Category + + decoder := json.NewDecoder(data) + if err := decoder.Decode(&category); err != nil { + return nil, fmt.Errorf("Unable to decode category JSON object: %v", err) + } + + return &category, nil +} diff --git a/server/core/context.go b/server/core/context.go new file mode 100644 index 00000000..c9d2dc2e --- /dev/null +++ b/server/core/context.go @@ -0,0 +1,99 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package core + +import ( + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/route" + "github.com/miniflux/miniflux2/storage" + "log" + "net/http" + + "github.com/gorilla/mux" +) + +// Context contains helper functions related to the current request. +type Context struct { + writer http.ResponseWriter + request *http.Request + store *storage.Storage + router *mux.Router + user *model.User +} + +// IsAdminUser checks if the logged user is administrator. +func (c *Context) IsAdminUser() bool { + if v := c.request.Context().Value("IsAdminUser"); v != nil { + return v.(bool) + } + return false +} + +// GetUserTimezone returns the timezone used by the logged user. +func (c *Context) GetUserTimezone() string { + if v := c.request.Context().Value("UserTimezone"); v != nil { + return v.(string) + } + return "UTC" +} + +// IsAuthenticated returns a boolean if the user is authenticated. +func (c *Context) IsAuthenticated() bool { + if v := c.request.Context().Value("IsAuthenticated"); v != nil { + return v.(bool) + } + return false +} + +// GetUserID returns the UserID of the logged user. +func (c *Context) GetUserID() int64 { + if v := c.request.Context().Value("UserId"); v != nil { + return v.(int64) + } + return 0 +} + +// GetLoggedUser returns all properties related to the logged user. +func (c *Context) GetLoggedUser() *model.User { + if c.user == nil { + var err error + c.user, err = c.store.GetUserById(c.GetUserID()) + if err != nil { + log.Fatalln(err) + } + + if c.user == nil { + log.Fatalln("Unable to find user from context") + } + } + + return c.user +} + +// GetUserLanguage get the locale used by the current logged user. +func (c *Context) GetUserLanguage() string { + user := c.GetLoggedUser() + return user.Language +} + +// GetCsrfToken returns the current CSRF token. +func (c *Context) GetCsrfToken() string { + if v := c.request.Context().Value("CsrfToken"); v != nil { + return v.(string) + } + + log.Println("No CSRF token in context!") + return "" +} + +// GetRoute returns the path for the given arguments. +func (c *Context) GetRoute(name string, args ...interface{}) string { + return route.GetRoute(c.router, name, args...) +} + +// NewContext creates a new Context. +func NewContext(w http.ResponseWriter, r *http.Request, store *storage.Storage, router *mux.Router) *Context { + return &Context{writer: w, request: r, store: store, router: router} +} diff --git a/server/core/handler.go b/server/core/handler.go new file mode 100644 index 00000000..43205647 --- /dev/null +++ b/server/core/handler.go @@ -0,0 +1,57 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package core + +import ( + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/locale" + "github.com/miniflux/miniflux2/server/middleware" + "github.com/miniflux/miniflux2/server/template" + "github.com/miniflux/miniflux2/storage" + "log" + "net/http" + "time" + + "github.com/gorilla/mux" +) + +type HandlerFunc func(ctx *Context, request *Request, response *Response) + +type Handler struct { + store *storage.Storage + translator *locale.Translator + template *template.TemplateEngine + router *mux.Router + middleware *middleware.MiddlewareChain +} + +func (h *Handler) Use(f HandlerFunc) http.Handler { + return h.middleware.WrapFunc(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer helper.ExecutionTime(time.Now(), r.URL.Path) + log.Println(r.Method, r.URL.Path) + + ctx := NewContext(w, r, h.store, h.router) + request := NewRequest(w, r) + response := NewResponse(w, r, h.template) + + if ctx.IsAuthenticated() { + h.template.SetLanguage(ctx.GetUserLanguage()) + } else { + h.template.SetLanguage("en_US") + } + + f(ctx, request, response) + })) +} + +func NewHandler(store *storage.Storage, router *mux.Router, template *template.TemplateEngine, translator *locale.Translator, middleware *middleware.MiddlewareChain) *Handler { + return &Handler{ + store: store, + translator: translator, + router: router, + template: template, + middleware: middleware, + } +} diff --git a/server/core/html_response.go b/server/core/html_response.go new file mode 100644 index 00000000..9f493d20 --- /dev/null +++ b/server/core/html_response.go @@ -0,0 +1,58 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package core + +import ( + "github.com/miniflux/miniflux2/server/template" + "log" + "net/http" +) + +type HtmlResponse struct { + writer http.ResponseWriter + request *http.Request + template *template.TemplateEngine +} + +func (h *HtmlResponse) Render(template string, args map[string]interface{}) { + h.writer.Header().Set("Content-Type", "text/html; charset=utf-8") + h.template.Execute(h.writer, template, args) +} + +func (h *HtmlResponse) ServerError(err error) { + h.writer.WriteHeader(http.StatusInternalServerError) + h.writer.Header().Set("Content-Type", "text/html; charset=utf-8") + + if err != nil { + log.Println(err) + h.writer.Write([]byte("Internal Server Error: " + err.Error())) + } else { + h.writer.Write([]byte("Internal Server Error")) + } +} + +func (h *HtmlResponse) BadRequest(err error) { + h.writer.WriteHeader(http.StatusBadRequest) + h.writer.Header().Set("Content-Type", "text/html; charset=utf-8") + + if err != nil { + log.Println(err) + h.writer.Write([]byte("Bad Request: " + err.Error())) + } else { + h.writer.Write([]byte("Bad Request")) + } +} + +func (h *HtmlResponse) NotFound() { + h.writer.WriteHeader(http.StatusNotFound) + h.writer.Header().Set("Content-Type", "text/html; charset=utf-8") + h.writer.Write([]byte("Page Not Found")) +} + +func (h *HtmlResponse) Forbidden() { + h.writer.WriteHeader(http.StatusForbidden) + h.writer.Header().Set("Content-Type", "text/html; charset=utf-8") + h.writer.Write([]byte("Access Forbidden")) +} diff --git a/server/core/json_response.go b/server/core/json_response.go new file mode 100644 index 00000000..51a9ede8 --- /dev/null +++ b/server/core/json_response.go @@ -0,0 +1,94 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package core + +import ( + "encoding/json" + "errors" + "log" + "net/http" +) + +type JsonResponse struct { + writer http.ResponseWriter + request *http.Request +} + +func (j *JsonResponse) Standard(v interface{}) { + j.writer.WriteHeader(http.StatusOK) + j.commonHeaders() + j.writer.Write(j.toJSON(v)) +} + +func (j *JsonResponse) Created(v interface{}) { + j.writer.WriteHeader(http.StatusCreated) + j.commonHeaders() + j.writer.Write(j.toJSON(v)) +} + +func (j *JsonResponse) NoContent() { + j.writer.WriteHeader(http.StatusNoContent) + j.commonHeaders() +} + +func (j *JsonResponse) BadRequest(err error) { + log.Println("[API:BadRequest]", err) + j.writer.WriteHeader(http.StatusBadRequest) + j.commonHeaders() + + if err != nil { + j.writer.Write(j.encodeError(err)) + } +} + +func (j *JsonResponse) NotFound(err error) { + log.Println("[API:NotFound]", err) + j.writer.WriteHeader(http.StatusNotFound) + j.commonHeaders() + j.writer.Write(j.encodeError(err)) +} + +func (j *JsonResponse) ServerError(err error) { + log.Println("[API:ServerError]", err) + j.writer.WriteHeader(http.StatusInternalServerError) + j.commonHeaders() + j.writer.Write(j.encodeError(err)) +} + +func (j *JsonResponse) Forbidden() { + log.Println("[API:Forbidden]") + j.writer.WriteHeader(http.StatusForbidden) + j.commonHeaders() + j.writer.Write(j.encodeError(errors.New("Access Forbidden"))) +} + +func (j *JsonResponse) commonHeaders() { + j.writer.Header().Set("Accept", "application/json") + j.writer.Header().Set("Content-Type", "application/json") +} + +func (j *JsonResponse) encodeError(err error) []byte { + type errorMsg struct { + ErrorMessage string `json:"error_message"` + } + + tmp := errorMsg{ErrorMessage: err.Error()} + data, err := json.Marshal(tmp) + if err != nil { + log.Println("encodeError:", err) + } + + return data +} + +func (j *JsonResponse) toJSON(v interface{}) []byte { + b, err := json.Marshal(v) + if err != nil { + log.Println("Unable to convert interface to JSON:", err) + return []byte("") + } + + return b +} diff --git a/server/core/request.go b/server/core/request.go new file mode 100644 index 00000000..189e2498 --- /dev/null +++ b/server/core/request.go @@ -0,0 +1,108 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package core + +import ( + "fmt" + "io" + "log" + "mime/multipart" + "net/http" + "strconv" + + "github.com/gorilla/mux" +) + +type Request struct { + writer http.ResponseWriter + request *http.Request +} + +func (r *Request) GetRequest() *http.Request { + return r.request +} + +func (r *Request) GetBody() io.ReadCloser { + return r.request.Body +} + +func (r *Request) GetHeaders() http.Header { + return r.request.Header +} + +func (r *Request) GetScheme() string { + return r.request.URL.Scheme +} + +func (r *Request) GetFile(name string) (multipart.File, *multipart.FileHeader, error) { + return r.request.FormFile(name) +} + +func (r *Request) IsHTTPS() bool { + return r.request.URL.Scheme == "https" +} + +func (r *Request) GetCookie(name string) string { + cookie, err := r.request.Cookie(name) + if err == http.ErrNoCookie { + return "" + } + + return cookie.Value +} + +func (r *Request) GetIntegerParam(param string) (int64, error) { + vars := mux.Vars(r.request) + value, err := strconv.Atoi(vars[param]) + if err != nil { + log.Println(err) + return 0, fmt.Errorf("%s parameter is not an integer", param) + } + + if value < 0 { + return 0, nil + } + + return int64(value), nil +} + +func (r *Request) GetStringParam(param, defaultValue string) string { + vars := mux.Vars(r.request) + value := vars[param] + if value == "" { + value = defaultValue + } + return value +} + +func (r *Request) GetQueryStringParam(param, defaultValue string) string { + value := r.request.URL.Query().Get(param) + if value == "" { + value = defaultValue + } + return value +} + +func (r *Request) GetQueryIntegerParam(param string, defaultValue int) int { + value := r.request.URL.Query().Get(param) + if value == "" { + return defaultValue + } + + val, err := strconv.Atoi(value) + if err != nil { + return defaultValue + } + + if val < 0 { + return defaultValue + } + + return val +} + +func NewRequest(w http.ResponseWriter, r *http.Request) *Request { + return &Request{writer: w, request: r} +} diff --git a/server/core/response.go b/server/core/response.go new file mode 100644 index 00000000..4acbe95c --- /dev/null +++ b/server/core/response.go @@ -0,0 +1,63 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package core + +import ( + "github.com/miniflux/miniflux2/server/template" + "net/http" + "time" +) + +type Response struct { + writer http.ResponseWriter + request *http.Request + template *template.TemplateEngine +} + +func (r *Response) SetCookie(cookie *http.Cookie) { + http.SetCookie(r.writer, cookie) +} + +func (r *Response) Json() *JsonResponse { + r.commonHeaders() + return &JsonResponse{writer: r.writer, request: r.request} +} + +func (r *Response) Html() *HtmlResponse { + r.commonHeaders() + return &HtmlResponse{writer: r.writer, request: r.request, template: r.template} +} + +func (r *Response) Xml() *XmlResponse { + r.commonHeaders() + return &XmlResponse{writer: r.writer, request: r.request} +} + +func (r *Response) Redirect(path string) { + http.Redirect(r.writer, r.request, path, http.StatusFound) +} + +func (r *Response) Cache(mime_type, etag string, content []byte, duration time.Duration) { + r.writer.Header().Set("Content-Type", mime_type) + r.writer.Header().Set("Etag", etag) + r.writer.Header().Set("Cache-Control", "public") + r.writer.Header().Set("Expires", time.Now().Add(duration).Format(time.RFC1123)) + + if etag == r.request.Header.Get("If-None-Match") { + r.writer.WriteHeader(http.StatusNotModified) + } else { + r.writer.Write(content) + } +} + +func (r *Response) commonHeaders() { + r.writer.Header().Set("X-XSS-Protection", "1; mode=block") + r.writer.Header().Set("X-Content-Type-Options", "nosniff") + r.writer.Header().Set("X-Frame-Options", "DENY") +} + +func NewResponse(w http.ResponseWriter, r *http.Request, template *template.TemplateEngine) *Response { + return &Response{writer: w, request: r, template: template} +} diff --git a/server/core/xml_response.go b/server/core/xml_response.go new file mode 100644 index 00000000..6ffd5c9d --- /dev/null +++ b/server/core/xml_response.go @@ -0,0 +1,21 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package core + +import ( + "fmt" + "net/http" +) + +type XmlResponse struct { + writer http.ResponseWriter + request *http.Request +} + +func (x *XmlResponse) Download(filename, data string) { + x.writer.Header().Set("Content-Type", "text/xml") + x.writer.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s", filename)) + x.writer.Write([]byte(data)) +} diff --git a/server/middleware/basic_auth.go b/server/middleware/basic_auth.go new file mode 100644 index 00000000..73dfb98c --- /dev/null +++ b/server/middleware/basic_auth.go @@ -0,0 +1,61 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package middleware + +import ( + "context" + "github.com/miniflux/miniflux2/storage" + "log" + "net/http" +) + +type BasicAuthMiddleware struct { + store *storage.Storage +} + +func (b *BasicAuthMiddleware) Handler(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("WWW-Authenticate", `Basic realm="Restricted"`) + errorResponse := `{"error_message": "Not Authorized"}` + + username, password, authOK := r.BasicAuth() + if !authOK { + log.Println("[Middleware:BasicAuth] No authentication headers sent") + w.WriteHeader(http.StatusUnauthorized) + w.Write([]byte(errorResponse)) + return + } + + if err := b.store.CheckPassword(username, password); err != nil { + log.Println("[Middleware:BasicAuth] Invalid username or password:", username) + w.WriteHeader(http.StatusUnauthorized) + w.Write([]byte(errorResponse)) + return + } + + user, err := b.store.GetUserByUsername(username) + if err != nil || user == nil { + log.Println("[Middleware:BasicAuth] User not found:", username) + w.WriteHeader(http.StatusUnauthorized) + w.Write([]byte(errorResponse)) + return + } + + log.Println("[Middleware:BasicAuth] User authenticated:", username) + b.store.SetLastLogin(user.ID) + + ctx := r.Context() + ctx = context.WithValue(ctx, "UserId", user.ID) + ctx = context.WithValue(ctx, "UserTimezone", user.Timezone) + ctx = context.WithValue(ctx, "IsAdminUser", user.IsAdmin) + ctx = context.WithValue(ctx, "IsAuthenticated", true) + + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +func NewBasicAuthMiddleware(s *storage.Storage) *BasicAuthMiddleware { + return &BasicAuthMiddleware{store: s} +} diff --git a/server/middleware/csrf.go b/server/middleware/csrf.go new file mode 100644 index 00000000..74736b57 --- /dev/null +++ b/server/middleware/csrf.go @@ -0,0 +1,48 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package middleware + +import ( + "context" + "github.com/miniflux/miniflux2/helper" + "log" + "net/http" +) + +func Csrf(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + var csrfToken string + + csrfCookie, err := r.Cookie("csrfToken") + if err == http.ErrNoCookie || csrfCookie.Value == "" { + csrfToken = helper.GenerateRandomString(64) + cookie := &http.Cookie{ + Name: "csrfToken", + Value: csrfToken, + Path: "/", + Secure: r.URL.Scheme == "https", + HttpOnly: true, + } + + http.SetCookie(w, cookie) + } else { + csrfToken = csrfCookie.Value + } + + ctx := r.Context() + ctx = context.WithValue(ctx, "CsrfToken", csrfToken) + + w.Header().Add("Vary", "Cookie") + isTokenValid := csrfToken == r.FormValue("csrf") || csrfToken == r.Header.Get("X-Csrf-Token") + + if r.Method == "POST" && !isTokenValid { + log.Println("[Middleware:CSRF] Invalid or missing CSRF token!") + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte("Invalid or missing CSRF token!")) + } else { + next.ServeHTTP(w, r.WithContext(ctx)) + } + }) +} diff --git a/server/middleware/middleware.go b/server/middleware/middleware.go new file mode 100644 index 00000000..cab01c86 --- /dev/null +++ b/server/middleware/middleware.go @@ -0,0 +1,31 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package middleware + +import ( + "net/http" +) + +type Middleware func(http.Handler) http.Handler + +type MiddlewareChain struct { + middlewares []Middleware +} + +func (m *MiddlewareChain) Wrap(h http.Handler) http.Handler { + for i := range m.middlewares { + h = m.middlewares[len(m.middlewares)-1-i](h) + } + + return h +} + +func (m *MiddlewareChain) WrapFunc(fn http.HandlerFunc) http.Handler { + return m.Wrap(fn) +} + +func NewMiddlewareChain(middlewares ...Middleware) *MiddlewareChain { + return &MiddlewareChain{append(([]Middleware)(nil), middlewares...)} +} diff --git a/server/middleware/session.go b/server/middleware/session.go new file mode 100644 index 00000000..54559721 --- /dev/null +++ b/server/middleware/session.go @@ -0,0 +1,72 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package middleware + +import ( + "context" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/route" + "github.com/miniflux/miniflux2/storage" + "log" + "net/http" + + "github.com/gorilla/mux" +) + +type SessionMiddleware struct { + store *storage.Storage + router *mux.Router +} + +func (s *SessionMiddleware) Handler(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + session := s.getSessionFromCookie(r) + + if session == nil { + log.Println("[Middleware:Session] Session not found") + if s.isPublicRoute(r) { + next.ServeHTTP(w, r) + } else { + http.Redirect(w, r, route.GetRoute(s.router, "login"), http.StatusFound) + } + } else { + log.Println("[Middleware:Session]", session) + ctx := r.Context() + ctx = context.WithValue(ctx, "UserId", session.UserID) + ctx = context.WithValue(ctx, "IsAuthenticated", true) + + next.ServeHTTP(w, r.WithContext(ctx)) + } + }) +} + +func (s *SessionMiddleware) isPublicRoute(r *http.Request) bool { + route := mux.CurrentRoute(r) + switch route.GetName() { + case "login", "checkLogin", "stylesheet", "javascript": + return true + default: + return false + } +} + +func (s *SessionMiddleware) getSessionFromCookie(r *http.Request) *model.Session { + sessionCookie, err := r.Cookie("sessionID") + if err == http.ErrNoCookie { + return nil + } + + session, err := s.store.GetSessionByToken(sessionCookie.Value) + if err != nil { + log.Println(err) + return nil + } + + return session +} + +func NewSessionMiddleware(s *storage.Storage, r *mux.Router) *SessionMiddleware { + return &SessionMiddleware{store: s, router: r} +} diff --git a/server/route/route.go b/server/route/route.go new file mode 100644 index 00000000..885f0bc4 --- /dev/null +++ b/server/route/route.go @@ -0,0 +1,37 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package route + +import ( + "log" + "strconv" + + "github.com/gorilla/mux" +) + +func GetRoute(router *mux.Router, name string, args ...interface{}) string { + route := router.Get(name) + if route == nil { + log.Fatalln("Route not found:", name) + } + + var pairs []string + for _, param := range args { + switch param.(type) { + case string: + pairs = append(pairs, param.(string)) + case int64: + val := param.(int64) + pairs = append(pairs, strconv.FormatInt(val, 10)) + } + } + + result, err := route.URLPath(pairs...) + if err != nil { + log.Fatalln(err) + } + + return result.String() +} diff --git a/server/routes.go b/server/routes.go new file mode 100644 index 00000000..0c5ec65f --- /dev/null +++ b/server/routes.go @@ -0,0 +1,132 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package server + +import ( + "github.com/miniflux/miniflux2/locale" + "github.com/miniflux/miniflux2/reader/feed" + "github.com/miniflux/miniflux2/reader/opml" + api_controller "github.com/miniflux/miniflux2/server/api/controller" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/middleware" + "github.com/miniflux/miniflux2/server/template" + ui_controller "github.com/miniflux/miniflux2/server/ui/controller" + "github.com/miniflux/miniflux2/storage" + "net/http" + + "github.com/gorilla/mux" +) + +func getRoutes(store *storage.Storage, feedHandler *feed.Handler) *mux.Router { + router := mux.NewRouter() + translator := locale.Load() + templateEngine := template.NewTemplateEngine(router, translator) + + apiController := api_controller.NewController(store, feedHandler) + uiController := ui_controller.NewController(store, feedHandler, opml.NewOpmlHandler(store)) + + apiHandler := core.NewHandler(store, router, templateEngine, translator, middleware.NewMiddlewareChain( + middleware.NewBasicAuthMiddleware(store).Handler, + )) + + uiHandler := core.NewHandler(store, router, templateEngine, translator, middleware.NewMiddlewareChain( + middleware.NewSessionMiddleware(store, router).Handler, + middleware.Csrf, + )) + + router.Handle("/v1/users", apiHandler.Use(apiController.CreateUser)).Methods("POST") + router.Handle("/v1/users", apiHandler.Use(apiController.GetUsers)).Methods("GET") + router.Handle("/v1/users/{userID}", apiHandler.Use(apiController.GetUser)).Methods("GET") + router.Handle("/v1/users/{userID}", apiHandler.Use(apiController.UpdateUser)).Methods("PUT") + router.Handle("/v1/users/{userID}", apiHandler.Use(apiController.RemoveUser)).Methods("DELETE") + + router.Handle("/v1/categories", apiHandler.Use(apiController.CreateCategory)).Methods("POST") + router.Handle("/v1/categories", apiHandler.Use(apiController.GetCategories)).Methods("GET") + router.Handle("/v1/categories/{categoryID}", apiHandler.Use(apiController.UpdateCategory)).Methods("PUT") + router.Handle("/v1/categories/{categoryID}", apiHandler.Use(apiController.RemoveCategory)).Methods("DELETE") + + router.Handle("/v1/discover", apiHandler.Use(apiController.GetSubscriptions)).Methods("POST") + + router.Handle("/v1/feeds", apiHandler.Use(apiController.CreateFeed)).Methods("POST") + router.Handle("/v1/feeds", apiHandler.Use(apiController.GetFeeds)).Methods("Get") + router.Handle("/v1/feeds/{feedID}/refresh", apiHandler.Use(apiController.RefreshFeed)).Methods("PUT") + router.Handle("/v1/feeds/{feedID}", apiHandler.Use(apiController.GetFeed)).Methods("GET") + router.Handle("/v1/feeds/{feedID}", apiHandler.Use(apiController.UpdateFeed)).Methods("PUT") + router.Handle("/v1/feeds/{feedID}", apiHandler.Use(apiController.RemoveFeed)).Methods("DELETE") + + router.Handle("/v1/feeds/{feedID}/entries", apiHandler.Use(apiController.GetFeedEntries)).Methods("GET") + router.Handle("/v1/feeds/{feedID}/entries/{entryID}", apiHandler.Use(apiController.GetEntry)).Methods("GET") + router.Handle("/v1/feeds/{feedID}/entries/{entryID}", apiHandler.Use(apiController.SetEntryStatus)).Methods("PUT") + + router.Handle("/stylesheets/{name}.css", uiHandler.Use(uiController.Stylesheet)).Name("stylesheet").Methods("GET") + router.Handle("/js", uiHandler.Use(uiController.Javascript)).Name("javascript").Methods("GET") + router.Handle("/favicon.ico", uiHandler.Use(uiController.Favicon)).Name("favicon").Methods("GET") + + router.Handle("/subscribe", uiHandler.Use(uiController.AddSubscription)).Name("addSubscription").Methods("GET") + router.Handle("/subscribe", uiHandler.Use(uiController.SubmitSubscription)).Name("submitSubscription").Methods("POST") + router.Handle("/subscriptions", uiHandler.Use(uiController.ChooseSubscription)).Name("chooseSubscription").Methods("POST") + + router.Handle("/unread", uiHandler.Use(uiController.ShowUnreadPage)).Name("unread").Methods("GET") + router.Handle("/history", uiHandler.Use(uiController.ShowHistoryPage)).Name("history").Methods("GET") + + router.Handle("/feed/{feedID}/refresh", uiHandler.Use(uiController.RefreshFeed)).Name("refreshFeed").Methods("GET") + router.Handle("/feed/{feedID}/edit", uiHandler.Use(uiController.EditFeed)).Name("editFeed").Methods("GET") + router.Handle("/feed/{feedID}/remove", uiHandler.Use(uiController.RemoveFeed)).Name("removeFeed").Methods("GET") + router.Handle("/feed/{feedID}/update", uiHandler.Use(uiController.UpdateFeed)).Name("updateFeed").Methods("POST") + router.Handle("/feed/{feedID}/entries", uiHandler.Use(uiController.ShowFeedEntries)).Name("feedEntries").Methods("GET") + router.Handle("/feeds", uiHandler.Use(uiController.ShowFeedsPage)).Name("feeds").Methods("GET") + + router.Handle("/unread/entry/{entryID}", uiHandler.Use(uiController.ShowUnreadEntry)).Name("unreadEntry").Methods("GET") + router.Handle("/history/entry/{entryID}", uiHandler.Use(uiController.ShowReadEntry)).Name("readEntry").Methods("GET") + router.Handle("/feed/{feedID}/entry/{entryID}", uiHandler.Use(uiController.ShowFeedEntry)).Name("feedEntry").Methods("GET") + router.Handle("/category/{categoryID}/entry/{entryID}", uiHandler.Use(uiController.ShowCategoryEntry)).Name("categoryEntry").Methods("GET") + + router.Handle("/entry/status", uiHandler.Use(uiController.UpdateEntriesStatus)).Name("updateEntriesStatus").Methods("POST") + + router.Handle("/categories", uiHandler.Use(uiController.ShowCategories)).Name("categories").Methods("GET") + router.Handle("/category/create", uiHandler.Use(uiController.CreateCategory)).Name("createCategory").Methods("GET") + router.Handle("/category/save", uiHandler.Use(uiController.SaveCategory)).Name("saveCategory").Methods("POST") + router.Handle("/category/{categoryID}/entries", uiHandler.Use(uiController.ShowCategoryEntries)).Name("categoryEntries").Methods("GET") + router.Handle("/category/{categoryID}/edit", uiHandler.Use(uiController.EditCategory)).Name("editCategory").Methods("GET") + router.Handle("/category/{categoryID}/update", uiHandler.Use(uiController.UpdateCategory)).Name("updateCategory").Methods("POST") + router.Handle("/category/{categoryID}/remove", uiHandler.Use(uiController.RemoveCategory)).Name("removeCategory").Methods("GET") + + router.Handle("/icon/{iconID}", uiHandler.Use(uiController.ShowIcon)).Name("icon").Methods("GET") + router.Handle("/proxy/{encodedURL}", uiHandler.Use(uiController.ImageProxy)).Name("proxy").Methods("GET") + + router.Handle("/users", uiHandler.Use(uiController.ShowUsers)).Name("users").Methods("GET") + router.Handle("/user/create", uiHandler.Use(uiController.CreateUser)).Name("createUser").Methods("GET") + router.Handle("/user/save", uiHandler.Use(uiController.SaveUser)).Name("saveUser").Methods("POST") + router.Handle("/users/{userID}/edit", uiHandler.Use(uiController.EditUser)).Name("editUser").Methods("GET") + router.Handle("/users/{userID}/update", uiHandler.Use(uiController.UpdateUser)).Name("updateUser").Methods("POST") + router.Handle("/users/{userID}/remove", uiHandler.Use(uiController.RemoveUser)).Name("removeUser").Methods("GET") + + router.Handle("/about", uiHandler.Use(uiController.AboutPage)).Name("about").Methods("GET") + + router.Handle("/settings", uiHandler.Use(uiController.ShowSettings)).Name("settings").Methods("GET") + router.Handle("/settings", uiHandler.Use(uiController.UpdateSettings)).Name("updateSettings").Methods("POST") + + router.Handle("/sessions", uiHandler.Use(uiController.ShowSessions)).Name("sessions").Methods("GET") + router.Handle("/sessions/{sessionID}/remove", uiHandler.Use(uiController.RemoveSession)).Name("removeSession").Methods("GET") + + router.Handle("/export", uiHandler.Use(uiController.Export)).Name("export").Methods("GET") + router.Handle("/import", uiHandler.Use(uiController.Import)).Name("import").Methods("GET") + router.Handle("/upload", uiHandler.Use(uiController.UploadOPML)).Name("uploadOPML").Methods("POST") + + router.Handle("/login", uiHandler.Use(uiController.CheckLogin)).Name("checkLogin").Methods("POST") + router.Handle("/logout", uiHandler.Use(uiController.Logout)).Name("logout").Methods("GET") + router.Handle("/", uiHandler.Use(uiController.ShowLoginPage)).Name("login").Methods("GET") + + router.HandleFunc("/healthcheck", func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte("OK")) + }) + + router.HandleFunc("/robots.txt", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/plain") + w.Write([]byte("User-agent: *\nDisallow: /")) + }) + + return router +} diff --git a/server/server.go b/server/server.go new file mode 100644 index 00000000..ec32329f --- /dev/null +++ b/server/server.go @@ -0,0 +1,33 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package server + +import ( + "github.com/miniflux/miniflux2/config" + "github.com/miniflux/miniflux2/reader/feed" + "github.com/miniflux/miniflux2/storage" + "log" + "net/http" + "time" +) + +func NewServer(cfg *config.Config, store *storage.Storage, feedHandler *feed.Handler) *http.Server { + server := &http.Server{ + ReadTimeout: 5 * time.Second, + WriteTimeout: 10 * time.Second, + IdleTimeout: 60 * time.Second, + Addr: cfg.Get("LISTEN_ADDR", "127.0.0.1:8080"), + Handler: getRoutes(store, feedHandler), + } + + go func() { + log.Printf("Listening on %s\n", server.Addr) + if err := server.ListenAndServe(); err != nil { + log.Fatal(err) + } + }() + + return server +} diff --git a/server/static/bin.go b/server/static/bin.go new file mode 100644 index 00000000..11883294 --- /dev/null +++ b/server/static/bin.go @@ -0,0 +1,12 @@ +// Code generated by go generate; DO NOT EDIT. +// 2017-11-19 22:01:21.922229748 -0800 PST m=+0.003062891 + +package static + +var Binaries = map[string]string{ + "favicon.ico": `AAABAAEAQEAAAAEAIAAoQgAAFgAAACgAAABAAAAAgAAAAAEAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAADoAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAFf///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAAAYAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAADf///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAADYAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAGf///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAFb///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAYQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADe////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADZAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGb///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABW////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAGEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA3v///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAA2QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABm////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAVv///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAABhAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAN7///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAANkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZv///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAFb///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAYQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADe////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADZAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGb///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABW////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAGEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA3v///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAA2QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABm////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAVv///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAABhAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAN7///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAANkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZv///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAFb///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAYQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADe////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADZAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGb///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABW////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAGEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA3v///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAA2QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABm////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAVv///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAABhAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAN7///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAANkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZv///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAFb///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAYQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADe////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADZAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGb///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABW////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAGEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA3v///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAA2QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABm////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAVv///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAABhAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAN7///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAANkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZv///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAFb///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAYQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADe////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADZAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGb///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABW////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAGEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA3v///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAA2QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABm////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAVv///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAABhAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAN7///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAANkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZv///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAFb///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAYQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADe////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADZAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGb///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABW////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAGEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA3v///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAA2QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABm////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAXf///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAABhAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAOX///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAANkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZv///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGv///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAaAAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADz////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADVAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGb///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAB/////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAHYAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/gAAAAn///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAzQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABm////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAApv///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAACDAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAAy////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAANMAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZv///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAANf///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAAkAAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAZ////wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAF////8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAKf///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAJ8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAALf///8A////AP///wD///8A////AP///wD///8A////AP///wAAAAACAAAA/QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABR////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAJP///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADVAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD7AAAAI////wD///8A////AP///wD///8A////AP///wD///8AAAAAJwAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAQ////wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD4AAAAJ////wD///8A////AP///wD///8A////AP///wAAAAAdAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAKr///8A////AP///wD///8A////AP///wD///8A////AAAAAIQAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAC7///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAANoAAAAa////AP///wD///8A////AP///wD///8AAAAAgQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAfP///wD///8A////AP///wD///8A////AAAAABoAAADlAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAPcAAAAF////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA6wAAAGUAAAAN////AP///wAAAAAdAAAAhwAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAACyAAAAOf///wD///8AAAAACQAAAEAAAADgAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAADI////AP///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/QAAAOYAAADtAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD0AAAA4AAAAPwAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAiP///wD///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/gAAACX///8A////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA8AAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD+AAAAUQAAAOEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAALz///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAGcAAADYAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAmP///wAAAAAxAAAA9gAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAPoAAAAu////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAABWAAAAIwAAAOsAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA4gAAAA////8A////AAAAAFoAAAD9AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAB9////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAVv///wAAAAAxAAAA3QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA0QAAAB3///8A////AP///wD///8AAAAARgAAAOsAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAACQAAAAAf///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAADpAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAFb///8A////AAAAAA8AAACsAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAswAAAA3///8A////AP///wD///8A////AP///wAAAAAbAAAAwgAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAPMAAABy////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8AAAAArwAAAMAAAADAAAAAwAAAAMAAAADAAAAAwAAAAMAAAADAAAAAwAAAAMAAAABA////AP///wD///8A////AAAAAEUAAACrAAAA+gAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAPsAAACpAAAAOf///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAAA9AAAApwAAAPoAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA5wAAAJEAAAAW////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AAAAABcAAABLAAAAagAAAIkAAACGAAAAZQAAAEQAAAAY////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wAAAAAUAAAAQgAAAGMAAACFAAAAjwAAAHsAAABnAAAAMwAAAAH///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A////AP///wD///8A//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////AB/+AD/4AP8AH/4AP/gA/wAf/gA/+AD/AB/+AD/4AP8AH/4AP/gA/wAf/gA/+AD/AB/+AD/4AP8AH/4AP/gA/wAf/gA/+AD/AB/+AD/4AP8AH/4AP/gA/wAf/gA/+AD/AB/+AD/4AP8AH/4AP/gA/wAf/gA/+AD/AB/+AD/4AP8AH/4AP/gA/wAf/gA/+AD/AB/+AD/4AP8AH/4AP/gA/wAP/AA/+AD/AA/8AD/4AP8AD/wAH/gA/wAH/AAf+AD/AAf8AA/wAP8AA/gAD/AA/wAB8AAD4AD/AAAAAAAAAP8AAAAAAAAB/wAAAACAAAH/ABAAAMAAA/8AGAAB4AAH/wAcAAPwAAf/AB4AB/gAH/8AH4Af/gA/////+f//5///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8=`, +} + +var BinariesChecksums = map[string]string{ + "favicon.ico": "abb2a2675b0696252719f51dbfc1efc50affb2f17ec82166e27f9529eec896fb", +} diff --git a/server/static/bin/favicon.ico b/server/static/bin/favicon.ico new file mode 100644 index 00000000..77af6f92 Binary files /dev/null and b/server/static/bin/favicon.ico differ diff --git a/server/static/css.go b/server/static/css.go new file mode 100644 index 00000000..c53dee0f --- /dev/null +++ b/server/static/css.go @@ -0,0 +1,14 @@ +// Code generated by go generate; DO NOT EDIT. +// 2017-11-19 22:01:21.922613988 -0800 PST m=+0.003447131 + +package static + +var Stylesheets = map[string]string{ + "black": `body{background:#222;color:#efefef}h1,h2,h3{color:#aaa}a{color:#aaa}a:focus,a:hover{color:#ddd}.header li{border-color:#333}.header a{color:#ddd;font-weight:400}.header .active a{font-weight:400;color:#9b9494}.header a:focus,.header a:hover{color:rgba(82,168,236,.85)}.page-header h1{border-color:#333}.logo a:hover span{color:#555}table,th,td{border:1px solid #555}th{background:#333;color:#aaa;font-weight:400}tr:hover{background-color:#333;color:#aaa}input[type=url],input[type=password],input[type=text]{border:1px solid #555;background:#333;color:#ccc}input[type=url]:focus,input[type=password]:focus,input[type=text]:focus{color:#efefef;border-color:rgba(82,168,236,.8);box-shadow:0 0 8px rgba(82,168,236,.6)}.button-primary{border-color:#444;background:#333;color:#efefef}.button-primary:hover,.button-primary:focus{border-color:#888;background:#555}.alert,.alert-success,.alert-error,.alert-info,.alert-normal{color:#efefef;background-color:#333;border-color:#444}.panel{background:#333;border-color:#555}.unread-counter{color:#bbb}.category{color:#efefef;background-color:#333;border-color:#444}.category a{color:#999}.category a:hover,.category a:focus{color:#aaa}.pagination a{color:#aaa}.pagination-bottom{border-color:#333}.item{border-color:#666;padding:4px}.item.current-item{border-width:2px;border-color:rgba(82,168,236,.8);box-shadow:0 0 8px rgba(82,168,236,.6)}.item-title a{font-weight:400}.item-status-read .item-title a{color:#666}.item-status-read .item-title a:focus,.item-status-read .item-title a:hover{color:rgba(82,168,236,.6)}.item-meta a:hover,.item-meta a:focus{color:#aaa}.item-meta li:after{color:#ddd}.entry header{border-color:#333}.entry header h1 a{color:#bbb}.entry-content,.entry-content p,ul{color:#999}.entry-content pre,.entry-content code{color:#fff;background:#555;border-color:#888}.entry-enclosure{border-color:#333}`, + "common": `*{margin:0;padding:0;box-sizing:border-box}body{font-family:helvetica neue,Helvetica,Arial,sans-serif;text-rendering:optimizeLegibility}.main{padding-left:3px;padding-right:3px}a{color:#36c}a:focus{outline:0;color:red;text-decoration:none;border:1px dotted #aaa}a:hover{color:#333;text-decoration:none}.header{margin-top:10px;margin-bottom:20px}.header nav ul{display:none}.header li{cursor:pointer;padding-left:10px;line-height:2.1em;font-size:1.2em;border-bottom:1px dotted #ddd}.header li:hover a{color:#888}.header a{font-size:.9em;color:#444;text-decoration:none;border:0}.header .active a{font-weight:600}.header a:hover,.header a:focus{color:#888}.page-header{margin-bottom:25px}.page-header h1{font-weight:500;border-bottom:1px dotted #ddd}.page-header ul{margin-left:25px;font-size:.9em}.page-header li{list-style-type:circle;line-height:1.4em}.logo{cursor:pointer;text-align:center}.logo a{color:#000;letter-spacing:1px}.logo a:hover{color:#396}.logo a span{color:#396}.logo a:hover span{color:#000}@media(min-width:600px){body{margin:auto;max-width:750px}.logo{text-align:left;float:left;margin-right:15px}.header nav ul{display:block}.header li{display:inline;padding:0;padding-right:15px;line-height:normal;font-size:1em;border:0}.page-header ul{margin-left:0}.page-header li{display:inline;padding-right:15px}}table{width:100%;border-collapse:collapse}table,th,td{border:1px solid #ddd}th,td{padding:5px;text-align:left}td{vertical-align:top}th{background:#fcfcfc}.table-overflow td{max-width:0;text-overflow:ellipsis;white-space:nowrap;overflow:hidden}tr:hover{background-color:#f9f9f9}.column-40{width:40%}.column-25{width:25%}.column-20{width:20%}label{cursor:pointer;display:block}.radio-group{line-height:1.9em}div.radio-group label{display:inline-block}select{margin-bottom:15px}input[type=url],input[type=password],input[type=text]{border:1px solid #ccc;padding:3px;line-height:15px;width:250px;font-size:99%;margin-bottom:10px;margin-top:5px;-webkit-appearance:none}input[type=url]:focus,input[type=password]:focus,input[type=text]:focus{color:#000;border-color:rgba(82,168,236,.8);outline:0;box-shadow:0 0 8px rgba(82,168,236,.6)}::-moz-placeholder,::-ms-input-placeholder,::-webkit-input-placeholder{color:#ddd;padding-top:2px}.form-help{font-size:.9em;color:brown;margin-bottom:15px}a.button{text-decoration:none}.button{display:inline-block;-webkit-appearance:none;-moz-appearance:none;font-size:1.1em;cursor:pointer;padding:3px 10px;border:1px solid;border-radius:unset}.button-primary{border-color:#3079ed;background:#4d90fe;color:#fff}.button-primary:hover,.button-primary:focus{border-color:#2f5bb7;background:#357ae8}.button-danger{border-color:#b0281a;background:#d14836;color:#fff}.button-danger:hover,.button-danger:focus{color:#fff;background:#c53727}.button:disabled{color:#ccc;background:#f7f7f7;border-color:#ccc}.buttons{margin-top:10px;margin-bottom:20px}.alert{padding:8px 35px 8px 14px;margin-bottom:20px;color:#c09853;background-color:#fcf8e3;border:1px solid #fbeed5;border-radius:4px;overflow:auto}.alert h3{margin-top:0;margin-bottom:15px}.alert-success{color:#468847;background-color:#dff0d8;border-color:#d6e9c6}.alert-error{color:#b94a48;background-color:#f2dede;border-color:#eed3d7}.alert-error a{color:#b94a48}.alert-info{color:#3a87ad;background-color:#d9edf7;border-color:#bce8f1}.panel{color:#333;background-color:#f0f0f0;border:1px solid #ddd;border-radius:5px;padding:10px;margin-bottom:15px}.panel h3{font-weight:500;margin-top:0;margin-bottom:20px}.panel ul{margin-left:30px}.login-form{margin:auto;margin-top:50px;width:350px}.unread-counter{font-size:.8em;font-weight:300;color:#666}.category{font-size:.75em;background-color:#fffcd7;border:1px solid #d5d458;border-radius:5px;margin-left:.25em;padding:1px .4em;white-space:nowrap}.category a{color:#555;text-decoration:none}.category a:hover,.category a:focus{color:#000}.pagination{font-size:1.1em;display:flex;align-items:center;padding-top:8px}.pagination-bottom{border-top:1px dotted #ddd;margin-bottom:15px;margin-top:50px}.pagination>div{flex:1}.pagination-next{text-align:right}.pagination-prev:before{content:"« "}.pagination-next:after{content:" »"}.pagination a{color:#333}.pagination a:hover,.pagination a:focus{text-decoration:none}.item{border:1px dotted #ddd;margin-bottom:20px;padding:5px;overflow:hidden}.item.current-item{border:3px solid #bce;padding:3px}.item-title a{text-decoration:none;font-weight:600}.item-status-read .item-title a{color:#777}.item-meta{color:#777;font-size:.8em}.item-meta a{color:#777;text-decoration:none}.item-meta a:hover,.item-meta a:focus{color:#333}.item-meta ul{margin-top:5px}.item-meta li{display:inline}.item-meta li:after{content:"|";color:#aaa}.item-meta li:last-child:after{content:""}.hide-read-items .item-status-read{display:none}.entry header{padding-bottom:5px;border-bottom:1px dotted #ddd}.entry header h1{font-size:2em;line-height:1.25em;margin:30px 0}.entry header h1 a{text-decoration:none;color:#333}.entry header h1 a:hover,.entry header h1 a:focus{color:#666}.entry-meta{font-size:.95em;margin:0 0 20px;color:#666}.entry-website img{vertical-align:top}.entry-website a{color:#666;vertical-align:top;text-decoration:none}.entry-website a:hover,.entry-website a:focus{text-decoration:underline}.entry-date{font-size:.65em;font-style:italic;color:#555}.entry-content{padding-top:15px;font-size:1.1em;font-weight:300;color:#444}.entry-content h1,h2,h3,h4,h5,h6{margin-top:15px}.entry-content iframe,.entry-content video,.entry-content img{max-width:100%}.entry-content figure img{border:1px solid #000}.entry-content figcaption{font-size:.75em;text-transform:uppercase;color:#777}.entry-content p{margin-top:15px;margin-bottom:15px;text-align:justify}.entry-content a:visited{color:purple}.entry-content dt{font-weight:500;margin-top:15px;color:#555}.entry-content dd{margin-left:15px;margin-top:5px;padding-left:20px;border-left:3px solid #ddd;color:#777;font-weight:300;line-height:1.4em}.entry-content blockquote{border-left:4px solid #ddd;padding-left:25px;margin-left:20px;margin-top:20px;margin-bottom:20px;color:#888;line-height:1.4em;font-family:Georgia,serif}.entry-content blockquote+p{color:#555;font-style:italic;font-weight:200}.entry-content q{color:purple;font-family:Georgia,serif;font-style:italic}.entry-content q:before{content:"“"}.entry-content q:after{content:"”"}.entry-content pre{padding:5px;background:#f0f0f0;border:1px solid #ddd;overflow:scroll}.entry-content ul,.entry-content ol{margin-left:30px}.entry-content ul{list-style-type:square}.entry-enclosures h3{font-weight:500}.entry-enclosure{border:1px dotted #ddd;padding:5px;margin-top:10px;max-width:100%}.entry-enclosure-download{font-size:.85em}.enclosure-video video,.enclosure-image img{max-width:100%}`, +} + +var StylesheetsChecksums = map[string]string{ + "black": "38e7fee92187a036ce37f3c15fde2deff59a55c5ab693c7b8578af79d6a117d2", + "common": "0f4de90d16570a37392ff64dd85b336372477afee298c47b6a3d98d3fb4bd4b3", +} diff --git a/server/static/css/black.css b/server/static/css/black.css new file mode 100644 index 00000000..793e51fb --- /dev/null +++ b/server/static/css/black.css @@ -0,0 +1,197 @@ +/* Layout */ +body { + background: #222; + color: #efefef; +} + +h1, h2, h3 { + color: #aaa; +} + +a { + color: #aaa; +} + +a:focus, +a:hover { + color: #ddd; +} + +.header li { + border-color: #333; +} + +.header a { + color: #ddd; + font-weight: 400; +} + +.header .active a { + font-weight: 400; + color: #9b9494; +} + +.header a:focus, +.header a:hover { + color: rgba(82, 168, 236, 0.85); +} + +.page-header h1 { + border-color: #333; +} + +.logo a:hover span { + color: #555; +} + +/* Tables */ +table, th, td { + border: 1px solid #555; +} + +th { + background: #333; + color: #aaa; + font-weight: 400; +} + +tr:hover { + background-color: #333; + color: #aaa; +} + +/* Forms */ +input[type="url"], +input[type="password"], +input[type="text"] { + border: 1px solid #555; + background: #333; + color: #ccc; +} + +input[type="url"]:focus, +input[type="password"]:focus, +input[type="text"]:focus { + color: #efefef; + border-color: rgba(82, 168, 236, 0.8); + box-shadow: 0 0 8px rgba(82, 168, 236, 0.6); +} + +/* Buttons */ +.button-primary { + border-color: #444; + background: #333; + color: #efefef; +} + +.button-primary:hover, +.button-primary:focus { + border-color: #888; + background: #555; +} + +/* Alerts */ +.alert, +.alert-success, +.alert-error, +.alert-info, +.alert-normal { + color: #efefef; + background-color: #333; + border-color: #444; +} + +/* Panel */ +.panel { + background: #333; + border-color: #555; +} + +/* Counter */ +.unread-counter { + color: #bbb; +} + +/* Category label */ +.category { + color: #efefef; + background-color: #333; + border-color: #444; +} + +.category a { + color: #999; +} + +.category a:hover, +.category a:focus { + color: #aaa; +} + +/* Pagination */ +.pagination a { + color: #aaa; +} + +.pagination-bottom { + border-color: #333; +} + +/* List view */ +.item { + border-color: #666; + padding: 4px; +} + +.item.current-item { + border-width: 2px; + border-color: rgba(82, 168, 236, 0.8); + box-shadow: 0 0 8px rgba(82, 168, 236, 0.6); +} + +.item-title a { + font-weight: 400; +} + +.item-status-read .item-title a { + color: #666; +} + +.item-status-read .item-title a:focus, +.item-status-read .item-title a:hover { + color: rgba(82, 168, 236, 0.6); +} + +.item-meta a:hover, +.item-meta a:focus { + color: #aaa; +} + +.item-meta li:after { + color: #ddd; +} + +/* Entry view */ +.entry header { + border-color: #333; +} + +.entry header h1 a { + color: #bbb; +} + +.entry-content, +.entry-content p, ul { + color: #999; +} + +.entry-content pre, +.entry-content code { + color: #fff; + background: #555; + border-color: #888; +} + +.entry-enclosure { + border-color: #333; +} diff --git a/server/static/css/common.css b/server/static/css/common.css new file mode 100644 index 00000000..bfbc43eb --- /dev/null +++ b/server/static/css/common.css @@ -0,0 +1,654 @@ +/* Layout */ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +body { + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + text-rendering: optimizeLegibility; +} + +.main { + padding-left: 3px; + padding-right: 3px; +} + +a { + color: #3366CC; +} + +a:focus { + outline: 0; + color: red; + text-decoration: none; + border: 1px dotted #aaa; +} + +a:hover { + color: #333; + text-decoration: none; +} + +.header { + margin-top: 10px; + margin-bottom: 20px; +} + +.header nav ul { + display: none; +} + +.header li { + cursor: pointer; + padding-left: 10px; + line-height: 2.1em; + font-size: 1.2em; + border-bottom: 1px dotted #ddd; +} + +.header li:hover a { + color: #888; +} + +.header a { + font-size: 0.9em; + color: #444; + text-decoration: none; + border: none; +} + +.header .active a { + font-weight: 600; +} + +.header a:hover, +.header a:focus { + color: #888; +} + +.page-header { + margin-bottom: 25px; +} + +.page-header h1 { + font-weight: 500; + border-bottom: 1px dotted #ddd; +} + +.page-header ul { + margin-left: 25px; + font-size: 0.9em; +} + +.page-header li { + list-style-type: circle; + line-height: 1.4em; +} + +.logo { + cursor: pointer; + text-align: center; +} + +.logo a { + color: #000; + letter-spacing: 1px; +} + +.logo a:hover { + color: #339966; +} + +.logo a span { + color: #339966; +} + +.logo a:hover span { + color: #000; +} + +@media (min-width: 600px) { + body { + margin: auto; + max-width: 750px; + } + + .logo { + text-align: left; + float: left; + margin-right: 15px; + } + + .header nav ul { + display: block; + } + + .header li { + display: inline; + padding: 0; + padding-right: 15px; + line-height: normal; + font-size: 1.0em; + border: none; + } + + .page-header ul { + margin-left: 0; + } + + .page-header li { + display: inline; + padding-right: 15px; + } +} + +/* Tables */ +table { + width: 100%; + border-collapse: collapse; +} + +table, th, td { + border: 1px solid #ddd; +} + +th, td { + padding: 5px; + text-align: left; +} + +td { + vertical-align: top; +} + +th { + background: #fcfcfc; +} + +.table-overflow td { + max-width: 0; + text-overflow: ellipsis; + white-space: nowrap; + overflow: hidden; +} + +tr:hover { + background-color: #f9f9f9; +} + +.column-40 { + width: 40%; +} + +.column-25 { + width: 25%; +} + +.column-20 { + width: 20%; +} + +/* Forms */ +label { + cursor: pointer; + display: block; +} + +.radio-group { + line-height: 1.9em; +} + +div.radio-group label { + display: inline-block; +} + +select { + margin-bottom: 15px; +} + +input[type="url"], +input[type="password"], +input[type="text"] { + border: 1px solid #ccc; + padding: 3px; + line-height: 15px; + width: 250px; + font-size: 99%; + margin-bottom: 10px; + margin-top: 5px; + -webkit-appearance: none; +} + +input[type="url"]:focus, +input[type="password"]:focus, +input[type="text"]:focus { + color: #000; + border-color: rgba(82, 168, 236, 0.8); + outline: 0; + box-shadow: 0 0 8px rgba(82, 168, 236, 0.6); +} + +::-moz-placeholder, +::-ms-input-placeholder, +::-webkit-input-placeholder { + color: #ddd; + padding-top: 2px; +} + +.form-help { + font-size: 0.9em; + color: brown; + margin-bottom: 15px; +} + +/* Buttons */ +a.button { + text-decoration: none; +} + +.button { + display: inline-block; + -webkit-appearance: none; + -moz-appearance: none; + font-size: 1.1em; + cursor: pointer; + padding: 3px 10px; + border: 1px solid; + border-radius: unset; +} + +.button-primary { + border-color: #3079ed; + background: #4d90fe; + color: #fff; +} + +.button-primary:hover, +.button-primary:focus { + border-color: #2f5bb7; + background: #357ae8; +} + +.button-danger { + border-color: #b0281a; + background: #d14836; + color: #fff; +} + +.button-danger:hover, +.button-danger:focus { + color: #fff; + background: #c53727; +} + +.button:disabled { + color: #ccc; + background: #f7f7f7; + border-color: #ccc; +} + +.buttons { + margin-top: 10px; + margin-bottom: 20px; +} + +/* Alerts */ +.alert { + padding: 8px 35px 8px 14px; + margin-bottom: 20px; + color: #c09853; + background-color: #fcf8e3; + border: 1px solid #fbeed5; + border-radius: 4px; + overflow: auto; +} + +.alert h3 { + margin-top: 0; + margin-bottom: 15px; +} + +.alert-success { + color: #468847; + background-color: #dff0d8; + border-color: #d6e9c6; +} + +.alert-error { + color: #b94a48; + background-color: #f2dede; + border-color: #eed3d7; +} + +.alert-error a { + color: #b94a48; +} + +.alert-info { + color: #3a87ad; + background-color: #d9edf7; + border-color: #bce8f1; +} + +/* Panel */ +.panel { + color: #333; + background-color: #f0f0f0; + border: 1px solid #ddd; + border-radius: 5px; + padding: 10px; + margin-bottom: 15px; +} + +.panel h3 { + font-weight: 500; + margin-top: 0; + margin-bottom: 20px; +} + +.panel ul { + margin-left: 30px; +} + +/* Login form */ +.login-form { + margin: auto; + margin-top: 50px; + width: 350px; +} + +/* Counter */ +.unread-counter { + font-size: 0.8em; + font-weight: 300; + color: #666; +} + +/* Category label */ +.category { + font-size: 0.75em; + background-color: #fffcd7; + border: 1px solid #d5d458; + border-radius: 5px; + margin-left: 0.25em; + padding: 1px 0.4em 1px 0.4em; + white-space: nowrap; +} + +.category a { + color: #555; + text-decoration: none; +} + +.category a:hover, +.category a:focus { + color: #000; +} + +/* Pagination */ +.pagination { + font-size: 1.1em; + display: flex; + align-items: center; + padding-top: 8px; +} + +.pagination-bottom { + border-top: 1px dotted #ddd; + margin-bottom: 15px; + margin-top: 50px; +} + +.pagination > div { + flex: 1; +} + +.pagination-next { + text-align: right; +} + +.pagination-prev:before { + content: "« "; +} + +.pagination-next:after { + content: " »"; +} + +.pagination a { + color: #333; +} + +.pagination a:hover, +.pagination a:focus { + text-decoration: none; +} + +/* List view */ +.item { + border: 1px dotted #ddd; + margin-bottom: 20px; + padding: 5px; + overflow: hidden; +} + +.item.current-item { + border: 3px solid #bce; + padding: 3px; +} + +.item-title a { + text-decoration: none; + font-weight: 600; +} + +.item-status-read .item-title a { + color: #777; +} + +.item-meta { + color: #777; + font-size: 0.8em; +} + +.item-meta a { + color: #777; + text-decoration: none; +} + +.item-meta a:hover, +.item-meta a:focus { + color: #333; +} + +.item-meta ul { + margin-top: 5px; +} + +.item-meta li { + display: inline; +} + +.item-meta li:after { + content: "|"; + color: #aaa; +} + +.item-meta li:last-child:after { + content: ""; +} + +.hide-read-items .item-status-read { + display: none; +} + +/* Entry view */ +.entry header { + padding-bottom: 5px; + border-bottom: 1px dotted #ddd; +} + +.entry header h1 { + font-size: 2.0em; + line-height: 1.25em; + margin: 30px 0; +} + +.entry header h1 a { + text-decoration: none; + color: #333; +} + +.entry header h1 a:hover, +.entry header h1 a:focus { + color: #666; +} + +.entry-meta { + font-size: 0.95em; + margin: 0 0 20px; + color: #666; +} + +.entry-website img { + vertical-align: top; +} + +.entry-website a { + color: #666; + vertical-align: top; + text-decoration: none; +} + +.entry-website a:hover, +.entry-website a:focus { + text-decoration: underline; +} + +.entry-date { + font-size: 0.65em; + font-style: italic; + color: #555; +} + +.entry-content { + padding-top: 15px; + font-size: 1.1em; + font-weight: 300; + color: #444; +} + +.entry-content h1, h2, h3, h4, h5, h6 { + margin-top: 15px; +} + +.entry-content iframe, +.entry-content video, +.entry-content img { + max-width: 100%; +} + +.entry-content figure img { + border: 1px solid #000; +} + +.entry-content figcaption { + font-size: 0.75em; + text-transform: uppercase; + color: #777; +} + +.entry-content p { + margin-top: 15px; + margin-bottom: 15px; + text-align: justify; +} + +.entry-content a:visited { + color: purple; +} + +.entry-content dt { + font-weight: 500; + margin-top: 15px; + color: #555; +} + +.entry-content dd { + margin-left: 15px; + margin-top: 5px; + padding-left: 20px; + border-left: 3px solid #ddd; + color: #777; + font-weight: 300; + line-height: 1.4em; +} + +.entry-content blockquote { + border-left: 4px solid #ddd; + padding-left: 25px; + margin-left: 20px; + margin-top: 20px; + margin-bottom: 20px; + color: #888; + line-height: 1.4em; + font-family: Georgia, serif; +} + +.entry-content blockquote + p { + color: #555; + font-style: italic; + font-weight: 200; +} + +.entry-content q { + color: purple; + font-family: Georgia, serif; + font-style: italic; +} + +.entry-content q:before { + content: "“"; +} + +.entry-content q:after { + content: "”"; +} + +.entry-content pre { + padding: 5px; + background: #f0f0f0; + border: 1px solid #ddd; + overflow: scroll; +} + +.entry-content ul, +.entry-content ol { + margin-left: 30px; +} + +.entry-content ul { + list-style-type: square; +} + +.entry-enclosures h3 { + font-weight: 500; +} + +.entry-enclosure { + border: 1px dotted #ddd; + padding: 5px; + margin-top: 10px; + max-width: 100%; +} + +.entry-enclosure-download { + font-size: 0.85em; +} + +.enclosure-video video, +.enclosure-image img { + max-width: 100%; +} diff --git a/server/static/js.go b/server/static/js.go new file mode 100644 index 00000000..641aa2ca --- /dev/null +++ b/server/static/js.go @@ -0,0 +1,52 @@ +// Code generated by go generate; DO NOT EDIT. +// 2017-11-19 22:01:21.923282889 -0800 PST m=+0.004116032 + +package static + +var Javascript = map[string]string{ + "app": `(function(){'use strict';class KeyboardHandler{constructor(){this.queue=[];this.shortcuts={};} +on(combination,callback){this.shortcuts[combination]=callback;} +listen(){document.onkeydown=(event)=>{if(this.isEventIgnored(event)){return;} +let key=this.getKey(event);this.queue.push(key);for(let combination in this.shortcuts){let keys=combination.split(" ");if(keys.every((value,index)=>value===this.queue[index])){this.queue=[];this.shortcuts[combination]();return;} +if(keys.length===1&&key===keys[0]){this.queue=[];this.shortcuts[combination]();return;}} +if(this.queue.length>=2){this.queue=[];}};} +isEventIgnored(event){return event.target.tagName==="INPUT"||event.target.tagName==="TEXTAREA";} +getKey(event){const mapping={'Esc':'Escape','Up':'ArrowUp','Down':'ArrowDown','Left':'ArrowLeft','Right':'ArrowRight'};for(let key in mapping){if(mapping.hasOwnProperty(key)&&key===event.key){return mapping[key];}} +return event.key;}} +class FormHandler{static handleSubmitButtons(){let elements=document.querySelectorAll("form");elements.forEach(function(element){element.onsubmit=function(){let button=document.querySelector("button");if(button){button.innerHTML=button.dataset.labelLoading;button.disabled=true;}};});}} +class MouseHandler{onClick(selector,callback){let elements=document.querySelectorAll(selector);elements.forEach((element)=>{element.onclick=(event)=>{event.preventDefault();callback(event);};});}} +class App{run(){FormHandler.handleSubmitButtons();let keyboardHandler=new KeyboardHandler();keyboardHandler.on("g u",()=>this.goToPage("unread"));keyboardHandler.on("g h",()=>this.goToPage("history"));keyboardHandler.on("g f",()=>this.goToPage("feeds"));keyboardHandler.on("g c",()=>this.goToPage("categories"));keyboardHandler.on("g s",()=>this.goToPage("settings"));keyboardHandler.on("ArrowLeft",()=>this.goToPrevious());keyboardHandler.on("ArrowRight",()=>this.goToNext());keyboardHandler.on("j",()=>this.goToPrevious());keyboardHandler.on("p",()=>this.goToPrevious());keyboardHandler.on("k",()=>this.goToNext());keyboardHandler.on("n",()=>this.goToNext());keyboardHandler.on("h",()=>this.goToPage("previous"));keyboardHandler.on("l",()=>this.goToPage("next"));keyboardHandler.on("o",()=>this.openSelectedItem());keyboardHandler.on("v",()=>this.openOriginalLink());keyboardHandler.on("m",()=>this.toggleEntryStatus());keyboardHandler.on("A",()=>this.markPageAsRead());keyboardHandler.listen();let mouseHandler=new MouseHandler();mouseHandler.onClick("a[data-on-click=markPageAsRead]",()=>this.markPageAsRead());if(document.documentElement.clientWidth<600){mouseHandler.onClick(".logo",()=>this.toggleMainMenu());mouseHandler.onClick(".header nav li",(event)=>this.clickMenuListItem(event));}} +clickMenuListItem(event){let element=event.target;console.log(element);if(element.tagName==="A"){window.location.href=element.getAttribute("href");}else{window.location.href=element.querySelector("a").getAttribute("href");}} +toggleMainMenu(){let menu=document.querySelector(".header nav ul");if(this.isVisible(menu)){menu.style.display="none";}else{menu.style.display="block";}} +updateEntriesStatus(entryIDs,status){let url=document.body.dataset.entriesStatusUrl;let request=new Request(url,{method:"POST",cache:"no-cache",credentials:"include",body:JSON.stringify({entry_ids:entryIDs,status:status}),headers:new Headers({"Content-Type":"application/json","X-Csrf-Token":this.getCsrfToken()})});fetch(request);} +markPageAsRead(){let items=this.getVisibleElements(".items .item");let entryIDs=[];items.forEach((element)=>{element.classList.add("item-status-read");entryIDs.push(parseInt(element.dataset.id,10));});if(entryIDs.length>0){this.updateEntriesStatus(entryIDs,"read");} +this.goToPage("next");} +toggleEntryStatus(){let currentItem=document.querySelector(".current-item");if(currentItem!==null){let entryID=parseInt(currentItem.dataset.id,10);let statuses={read:"unread",unread:"read"};for(let currentStatus in statuses){let newStatus=statuses[currentStatus];if(currentItem.classList.contains("item-status-"+currentStatus)){this.goToNextListItem();currentItem.classList.remove("item-status-"+currentStatus);currentItem.classList.add("item-status-"+newStatus);this.updateEntriesStatus([entryID],newStatus);break;}}}} +openOriginalLink(){let entryLink=document.querySelector(".entry h1 a");if(entryLink!==null){this.openNewTab(entryLink.getAttribute("href"));return;} +let currentItemOriginalLink=document.querySelector(".current-item a[data-original-link]");if(currentItemOriginalLink!==null){this.openNewTab(currentItemOriginalLink.getAttribute("href"));}} +openSelectedItem(){let currentItemLink=document.querySelector(".current-item .item-title a");if(currentItemLink!==null){window.location.href=currentItemLink.getAttribute("href");}} +goToPage(page){let element=document.querySelector("a[data-page="+page+"]");if(element){document.location.href=element.href;}} +goToPrevious(){if(this.isListView()){this.goToPreviousListItem();}else{this.goToPage("previous");}} +goToNext(){if(this.isListView()){this.goToNextListItem();}else{this.goToPage("next");}} +goToPreviousListItem(){let items=this.getVisibleElements(".items .item");if(items.length===0){return;} +if(document.querySelector(".current-item")===null){items[0].classList.add("current-item");return;} +for(let i=0;i=0){items[i-1].classList.add("current-item");this.scrollPageTo(items[i-1]);} +break;}}} +goToNextListItem(){let items=this.getVisibleElements(".items .item");if(items.length===0){return;} +if(document.querySelector(".current-item")===null){items[0].classList.add("current-item");return;} +for(let i=0;iwindowHeight){window.scrollTo(0,item.offsetTop-10);}} +openNewTab(url){let win=window.open(url,"_blank");win.focus();} +isVisible(element){return element.offsetParent!==null;} +getCsrfToken(){let element=document.querySelector("meta[name=X-CSRF-Token]");if(element!==null){return element.getAttribute("value");} +return "";}} +document.addEventListener("DOMContentLoaded",function(){(new App()).run();});})();`, +} + +var JavascriptChecksums = map[string]string{ + "app": "e250c2af19dea14fd75681a81080cf183919a7a589b0886a093586ee894c8282", +} diff --git a/server/static/js/app.js b/server/static/js/app.js new file mode 100644 index 00000000..46a8f720 --- /dev/null +++ b/server/static/js/app.js @@ -0,0 +1,351 @@ +/*jshint esversion: 6 */ +(function() { +'use strict'; + +class KeyboardHandler { + constructor() { + this.queue = []; + this.shortcuts = {}; + } + + on(combination, callback) { + this.shortcuts[combination] = callback; + } + + listen() { + document.onkeydown = (event) => { + if (this.isEventIgnored(event)) { + return; + } + + let key = this.getKey(event); + this.queue.push(key); + + for (let combination in this.shortcuts) { + let keys = combination.split(" "); + + if (keys.every((value, index) => value === this.queue[index])) { + this.queue = []; + this.shortcuts[combination](); + return; + } + + if (keys.length === 1 && key === keys[0]) { + this.queue = []; + this.shortcuts[combination](); + return; + } + } + + if (this.queue.length >= 2) { + this.queue = []; + } + }; + } + + isEventIgnored(event) { + return event.target.tagName === "INPUT" || event.target.tagName === "TEXTAREA"; + } + + getKey(event) { + const mapping = { + 'Esc': 'Escape', + 'Up': 'ArrowUp', + 'Down': 'ArrowDown', + 'Left': 'ArrowLeft', + 'Right': 'ArrowRight' + }; + + for (let key in mapping) { + if (mapping.hasOwnProperty(key) && key === event.key) { + return mapping[key]; + } + } + + return event.key; + } +} + +class FormHandler { + static handleSubmitButtons() { + let elements = document.querySelectorAll("form"); + elements.forEach(function (element) { + element.onsubmit = function () { + let button = document.querySelector("button"); + + if (button) { + button.innerHTML = button.dataset.labelLoading; + button.disabled = true; + } + }; + }); + } +} + +class MouseHandler { + onClick(selector, callback) { + let elements = document.querySelectorAll(selector); + elements.forEach((element) => { + element.onclick = (event) => { + event.preventDefault(); + callback(event); + }; + }); + } +} + +class App { + run() { + FormHandler.handleSubmitButtons(); + + let keyboardHandler = new KeyboardHandler(); + keyboardHandler.on("g u", () => this.goToPage("unread")); + keyboardHandler.on("g h", () => this.goToPage("history")); + keyboardHandler.on("g f", () => this.goToPage("feeds")); + keyboardHandler.on("g c", () => this.goToPage("categories")); + keyboardHandler.on("g s", () => this.goToPage("settings")); + keyboardHandler.on("ArrowLeft", () => this.goToPrevious()); + keyboardHandler.on("ArrowRight", () => this.goToNext()); + keyboardHandler.on("j", () => this.goToPrevious()); + keyboardHandler.on("p", () => this.goToPrevious()); + keyboardHandler.on("k", () => this.goToNext()); + keyboardHandler.on("n", () => this.goToNext()); + keyboardHandler.on("h", () => this.goToPage("previous")); + keyboardHandler.on("l", () => this.goToPage("next")); + keyboardHandler.on("o", () => this.openSelectedItem()); + keyboardHandler.on("v", () => this.openOriginalLink()); + keyboardHandler.on("m", () => this.toggleEntryStatus()); + keyboardHandler.on("A", () => this.markPageAsRead()); + keyboardHandler.listen(); + + let mouseHandler = new MouseHandler(); + mouseHandler.onClick("a[data-on-click=markPageAsRead]", () => this.markPageAsRead()); + + if (document.documentElement.clientWidth < 600) { + mouseHandler.onClick(".logo", () => this.toggleMainMenu()); + mouseHandler.onClick(".header nav li", (event) => this.clickMenuListItem(event)); + } + } + + clickMenuListItem(event) { + let element = event.target;console.log(element); + + if (element.tagName === "A") { + window.location.href = element.getAttribute("href"); + } else { + window.location.href = element.querySelector("a").getAttribute("href"); + } + } + + toggleMainMenu() { + let menu = document.querySelector(".header nav ul"); + if (this.isVisible(menu)) { + menu.style.display = "none"; + } else { + menu.style.display = "block"; + } + } + + updateEntriesStatus(entryIDs, status) { + let url = document.body.dataset.entriesStatusUrl; + let request = new Request(url, { + method: "POST", + cache: "no-cache", + credentials: "include", + body: JSON.stringify({entry_ids: entryIDs, status: status}), + headers: new Headers({ + "Content-Type": "application/json", + "X-Csrf-Token": this.getCsrfToken() + }) + }); + + fetch(request); + } + + markPageAsRead() { + let items = this.getVisibleElements(".items .item"); + let entryIDs = []; + + items.forEach((element) => { + element.classList.add("item-status-read"); + entryIDs.push(parseInt(element.dataset.id, 10)); + }); + + if (entryIDs.length > 0) { + this.updateEntriesStatus(entryIDs, "read"); + } + + this.goToPage("next"); + } + + toggleEntryStatus() { + let currentItem = document.querySelector(".current-item"); + if (currentItem !== null) { + let entryID = parseInt(currentItem.dataset.id, 10); + let statuses = {read: "unread", unread: "read"}; + + for (let currentStatus in statuses) { + let newStatus = statuses[currentStatus]; + + if (currentItem.classList.contains("item-status-" + currentStatus)) { + this.goToNextListItem(); + + currentItem.classList.remove("item-status-" + currentStatus); + currentItem.classList.add("item-status-" + newStatus); + + this.updateEntriesStatus([entryID], newStatus); + break; + } + } + } + } + + openOriginalLink() { + let entryLink = document.querySelector(".entry h1 a"); + if (entryLink !== null) { + this.openNewTab(entryLink.getAttribute("href")); + return; + } + + let currentItemOriginalLink = document.querySelector(".current-item a[data-original-link]"); + if (currentItemOriginalLink !== null) { + this.openNewTab(currentItemOriginalLink.getAttribute("href")); + } + } + + openSelectedItem() { + let currentItemLink = document.querySelector(".current-item .item-title a"); + if (currentItemLink !== null) { + window.location.href = currentItemLink.getAttribute("href"); + } + } + + goToPage(page) { + let element = document.querySelector("a[data-page=" + page + "]"); + + if (element) { + document.location.href = element.href; + } + } + + goToPrevious() { + if (this.isListView()) { + this.goToPreviousListItem(); + } else { + this.goToPage("previous"); + } + } + + goToNext() { + if (this.isListView()) { + this.goToNextListItem(); + } else { + this.goToPage("next"); + } + } + + goToPreviousListItem() { + let items = this.getVisibleElements(".items .item"); + + if (items.length === 0) { + return; + } + + if (document.querySelector(".current-item") === null) { + items[0].classList.add("current-item"); + return; + } + + for (let i = 0; i < items.length; i++) { + if (items[i].classList.contains("current-item")) { + items[i].classList.remove("current-item"); + + if (i - 1 >= 0) { + items[i - 1].classList.add("current-item"); + this.scrollPageTo(items[i - 1]); + } + + break; + } + } + } + + goToNextListItem() { + let items = this.getVisibleElements(".items .item"); + + if (items.length === 0) { + return; + } + + if (document.querySelector(".current-item") === null) { + items[0].classList.add("current-item"); + return; + } + + for (let i = 0; i < items.length; i++) { + if (items[i].classList.contains("current-item")) { + items[i].classList.remove("current-item"); + + if (i + 1 < items.length) { + items[i + 1].classList.add("current-item"); + this.scrollPageTo(items[i + 1]); + } + + break; + } + } + } + + getVisibleElements(selector) { + let elements = document.querySelectorAll(selector); + let result = []; + + for (let i = 0; i < elements.length; i++) { + if (this.isVisible(elements[i])) { + result.push(elements[i]); + } + } + + return result; + } + + isListView() { + return document.querySelector(".items") !== null; + } + + scrollPageTo(item) { + let windowScrollPosition = window.pageYOffset; + let windowHeight = document.documentElement.clientHeight; + let viewportPosition = windowScrollPosition + windowHeight; + let itemBottomPosition = item.offsetTop + item.offsetHeight; + + if (viewportPosition - itemBottomPosition < 0 || viewportPosition - item.offsetTop > windowHeight) { + window.scrollTo(0, item.offsetTop - 10); + } + } + + openNewTab(url) { + let win = window.open(url, "_blank"); + win.focus(); + } + + isVisible(element) { + return element.offsetParent !== null; + } + + getCsrfToken() { + let element = document.querySelector("meta[name=X-CSRF-Token]"); + + if (element !== null) { + return element.getAttribute("value"); + } + + return ""; + } +} + +document.addEventListener("DOMContentLoaded", function() { + (new App()).run(); +}); + +})(); diff --git a/server/template/common.go b/server/template/common.go new file mode 100644 index 00000000..5423918d --- /dev/null +++ b/server/template/common.go @@ -0,0 +1,111 @@ +// Code generated by go generate; DO NOT EDIT. +// 2017-11-19 22:01:21.924938666 -0800 PST m=+0.005771809 + +package template + +var templateCommonMap = map[string]string{ + "entry_pagination": `{{ define "entry_pagination" }} + +{{ end }}`, + "layout": `{{ define "base" }} + + + + + + + + + {{ if .csrf }} + + {{ end }} + {{template "title" .}} - Miniflux + {{ if .user }} + + {{ else }} + + {{ end }} + + + + {{ if .user }} +
+ +
+ {{ end }} +
+ {{template "content" .}} +
+ + +{{ end }}`, + "pagination": `{{ define "pagination" }} + +{{ end }} +`, +} + +var templateCommonMapChecksums = map[string]string{ + "entry_pagination": "f1465fa70f585ae8043b200ec9de5bf437ffbb0c19fb7aefc015c3555614ee27", + "layout": "8be69cc93fdc99eb36841ae645f58488bd675670507dcdb2de0e593602893178", + "pagination": "6ff462c2b2a53bc5448b651da017f40a39f1d4f16cef4b2f09784f0797286924", +} diff --git a/server/template/helper/LICENSE b/server/template/helper/LICENSE new file mode 100644 index 00000000..036a2a16 --- /dev/null +++ b/server/template/helper/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Hervé GOUCHET + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/server/template/helper/elapsed.go b/server/template/helper/elapsed.go new file mode 100644 index 00000000..bc312062 --- /dev/null +++ b/server/template/helper/elapsed.go @@ -0,0 +1,61 @@ +// Copyright (c) 2017 Hervé Gouchet. All rights reserved. +// Use of this source code is governed by the MIT License +// that can be found in the LICENSE file. + +package helper + +import ( + "github.com/miniflux/miniflux2/locale" + "math" + "time" +) + +// Texts to be translated if necessary. +var ( + NotYet = `not yet` + JustNow = `just now` + LastMinute = `1 minute ago` + Minutes = `%d minutes ago` + LastHour = `1 hour ago` + Hours = `%d hours ago` + Yesterday = `yesterday` + Days = `%d days ago` + Weeks = `%d weeks ago` + Months = `%d months ago` + Years = `%d years ago` +) + +// GetElapsedTime returns in a human readable format the elapsed time +// since the given datetime. +func GetElapsedTime(translator *locale.Language, t time.Time) string { + if t.IsZero() || time.Now().Before(t) { + return translator.Get(NotYet) + } + diff := time.Since(t) + // Duration in seconds + s := diff.Seconds() + // Duration in days + d := int(s / 86400) + switch { + case s < 60: + return translator.Get(JustNow) + case s < 120: + return translator.Get(LastMinute) + case s < 3600: + return translator.Get(Minutes, int(diff.Minutes())) + case s < 7200: + return translator.Get(LastHour) + case s < 86400: + return translator.Get(Hours, int(diff.Hours())) + case d == 1: + return translator.Get(Yesterday) + case d < 7: + return translator.Get(Days, d) + case d < 31: + return translator.Get(Weeks, int(math.Ceil(float64(d)/7))) + case d < 365: + return translator.Get(Months, int(math.Ceil(float64(d)/30))) + default: + return translator.Get(Years, int(math.Ceil(float64(d)/365))) + } +} diff --git a/server/template/helper/elapsed_test.go b/server/template/helper/elapsed_test.go new file mode 100644 index 00000000..67b8d6b7 --- /dev/null +++ b/server/template/helper/elapsed_test.go @@ -0,0 +1,37 @@ +// Copyright (c) 2017 Hervé Gouchet. All rights reserved. +// Use of this source code is governed by the MIT License +// that can be found in the LICENSE file. + +package helper + +import ( + "fmt" + "github.com/miniflux/miniflux2/locale" + "testing" + "time" +) + +func TestElapsedTime(t *testing.T) { + var dt = []struct { + in time.Time + out string + }{ + {time.Time{}, NotYet}, + {time.Now().Add(time.Hour), NotYet}, + {time.Now(), JustNow}, + {time.Now().Add(-time.Minute), LastMinute}, + {time.Now().Add(-time.Minute * 40), fmt.Sprintf(Minutes, 40)}, + {time.Now().Add(-time.Hour), LastHour}, + {time.Now().Add(-time.Hour * 3), fmt.Sprintf(Hours, 3)}, + {time.Now().Add(-time.Hour * 32), Yesterday}, + {time.Now().Add(-time.Hour * 24 * 3), fmt.Sprintf(Days, 3)}, + {time.Now().Add(-time.Hour * 24 * 14), fmt.Sprintf(Weeks, 2)}, + {time.Now().Add(-time.Hour * 24 * 60), fmt.Sprintf(Months, 2)}, + {time.Now().Add(-time.Hour * 24 * 365 * 3), fmt.Sprintf(Years, 3)}, + } + for i, tt := range dt { + if out := GetElapsedTime(&locale.Language{}, tt.in); out != tt.out { + t.Errorf("%d. content mismatch for %v:exp=%q got=%q", i, tt.in, tt.out, out) + } + } +} diff --git a/server/template/html/about.html b/server/template/html/about.html new file mode 100644 index 00000000..3596327f --- /dev/null +++ b/server/template/html/about.html @@ -0,0 +1,37 @@ +{{ define "title"}}{{ t "About" }}{{ end }} + +{{ define "content"}} + + +
+

{{ t "Version" }}

+
    +
  • {{ t "Version:" }} {{ .version }}
  • +
  • {{ t "Build Date:" }} {{ .build_date }}
  • +
+
+ +
+

{{ t "Authors" }}

+
    +
  • {{ t "Author:" }} Frédéric Guillot
  • +
  • {{ t "License:" }} Apache 2.0
  • +
+
+ +{{ end }} diff --git a/server/template/html/add_subscription.html b/server/template/html/add_subscription.html new file mode 100644 index 00000000..99d9e071 --- /dev/null +++ b/server/template/html/add_subscription.html @@ -0,0 +1,45 @@ +{{ define "title"}}{{ t "New Subscription" }}{{ end }} + +{{ define "content"}} + + +{{ if not .categories }} +

{{ t "There is no category. You must have at least one category." }}

+{{ else }} +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + +
+ +
+
+{{ end }} + +{{ end }} diff --git a/server/template/html/categories.html b/server/template/html/categories.html new file mode 100644 index 00000000..88b0ebea --- /dev/null +++ b/server/template/html/categories.html @@ -0,0 +1,50 @@ +{{ define "title"}}{{ t "Categories" }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .categories }} +

{{ t "There is no category." }}

+{{ else }} +
+ {{ range .categories }} +
+
+ + {{ .Title }} + +
+
+
    +
  • + {{ if eq .FeedCount 0 }} + {{ t "No feed." }} + {{ else }} + {{ plural "plural.categories.feed_count" .FeedCount .FeedCount }} + {{ end }} +
  • +
+ +
+
+ {{ end }} +
+{{ end }} + +{{ end }} diff --git a/server/template/html/category_entries.html b/server/template/html/category_entries.html new file mode 100644 index 00000000..d36a5ee5 --- /dev/null +++ b/server/template/html/category_entries.html @@ -0,0 +1,47 @@ +{{ define "title"}}{{ .category.Title }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .entries }} +

{{ t "There is no article in this category." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }} diff --git a/server/template/html/choose_subscription.html b/server/template/html/choose_subscription.html new file mode 100644 index 00000000..72c68fd1 --- /dev/null +++ b/server/template/html/choose_subscription.html @@ -0,0 +1,36 @@ +{{ define "title"}}{{ t "Choose a Subscription" }}{{ end }} + +{{ define "content"}} + + +
+ + + +

{{ t "Choose a Subscription" }}

+ + {{ range .subscriptions }} +
+ ({{ .Type }}) + {{ .URL }} +
+ {{ end }} + +
+ +
+
+{{ end }} diff --git a/server/template/html/common/entry_pagination.html b/server/template/html/common/entry_pagination.html new file mode 100644 index 00000000..6c9f29cb --- /dev/null +++ b/server/template/html/common/entry_pagination.html @@ -0,0 +1,19 @@ +{{ define "entry_pagination" }} + +{{ end }} \ No newline at end of file diff --git a/server/template/html/common/layout.html b/server/template/html/common/layout.html new file mode 100644 index 00000000..defa3c9b --- /dev/null +++ b/server/template/html/common/layout.html @@ -0,0 +1,59 @@ +{{ define "base" }} + + + + + + + + + {{ if .csrf }} + + {{ end }} + {{template "title" .}} - Miniflux + {{ if .user }} + + {{ else }} + + {{ end }} + + + + {{ if .user }} +
+ +
+ {{ end }} +
+ {{template "content" .}} +
+ + +{{ end }} \ No newline at end of file diff --git a/server/template/html/common/pagination.html b/server/template/html/common/pagination.html new file mode 100644 index 00000000..4c6766a9 --- /dev/null +++ b/server/template/html/common/pagination.html @@ -0,0 +1,19 @@ +{{ define "pagination" }} + +{{ end }} diff --git a/server/template/html/create_category.html b/server/template/html/create_category.html new file mode 100644 index 00000000..7c4c93f1 --- /dev/null +++ b/server/template/html/create_category.html @@ -0,0 +1,27 @@ +{{ define "title"}}{{ t "New Category" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} diff --git a/server/template/html/create_user.html b/server/template/html/create_user.html new file mode 100644 index 00000000..36af356f --- /dev/null +++ b/server/template/html/create_user.html @@ -0,0 +1,41 @@ +{{ define "title"}}{{ t "New User" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} diff --git a/server/template/html/edit_category.html b/server/template/html/edit_category.html new file mode 100644 index 00000000..2981fa46 --- /dev/null +++ b/server/template/html/edit_category.html @@ -0,0 +1,30 @@ +{{ define "title"}}{{ t "Edit Category: %s" .category.Title }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} diff --git a/server/template/html/edit_feed.html b/server/template/html/edit_feed.html new file mode 100644 index 00000000..fac2a9b7 --- /dev/null +++ b/server/template/html/edit_feed.html @@ -0,0 +1,61 @@ +{{ define "title"}}{{ t "Edit Feed: %s" .feed.Title }}{{ end }} + +{{ define "content"}} + + +{{ if not .categories }} +

{{ t "There is no category!" }}

+{{ else }} + {{ if ne .feed.ParsingErrorCount 0 }} +
+

{{ t "Last Parsing Error" }}

+ {{ .feed.ParsingErrorMsg }} +
+ {{ end }} + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} + +{{ end }} \ No newline at end of file diff --git a/server/template/html/edit_user.html b/server/template/html/edit_user.html new file mode 100644 index 00000000..8f63307d --- /dev/null +++ b/server/template/html/edit_user.html @@ -0,0 +1,44 @@ +{{ define "title"}}{{ t "Edit user: %s" .selected_user.Username }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} diff --git a/server/template/html/entry.html b/server/template/html/entry.html new file mode 100644 index 00000000..3bb296f2 --- /dev/null +++ b/server/template/html/entry.html @@ -0,0 +1,75 @@ +{{ define "title"}}{{ .entry.Title }}{{ end }} + +{{ define "content"}} +
+
+

+ {{ .entry.Title }} +

+ + +
+
+ {{ template "entry_pagination" . }} +
+
+ {{ noescape (proxyFilter .entry.Content) }} +
+ {{ if .entry.Enclosures }} + + {{ end }} +
+ +
+ {{ template "entry_pagination" . }} +
+{{ end }} diff --git a/server/template/html/feed_entries.html b/server/template/html/feed_entries.html new file mode 100644 index 00000000..5028df4c --- /dev/null +++ b/server/template/html/feed_entries.html @@ -0,0 +1,58 @@ +{{ define "title"}}{{ .feed.Title }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if ne .feed.ParsingErrorCount 0 }} +
+

{{ t "There is a problem with this feed" }}

+ {{ .feed.ParsingErrorMsg }} +
+{{ else if not .entries }} +

{{ t "There is no article for this feed." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }} diff --git a/server/template/html/feeds.html b/server/template/html/feeds.html new file mode 100644 index 00000000..d7537540 --- /dev/null +++ b/server/template/html/feeds.html @@ -0,0 +1,65 @@ +{{ define "title"}}{{ t "Feeds" }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .feeds }} +

{{ t "You don't have any subscription." }}

+{{ else }} +
+ {{ range .feeds }} + + {{ end }} +
+{{ end }} + +{{ end }} diff --git a/server/template/html/history.html b/server/template/html/history.html new file mode 100644 index 00000000..a344da1e --- /dev/null +++ b/server/template/html/history.html @@ -0,0 +1,42 @@ +{{ define "title"}}{{ t "History" }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .entries }} +

{{ t "There is no history at the moment." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }} diff --git a/server/template/html/import.html b/server/template/html/import.html new file mode 100644 index 00000000..dbdb9b0e --- /dev/null +++ b/server/template/html/import.html @@ -0,0 +1,34 @@ +{{ define "title"}}{{ t "Import" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + +
+ +
+
+ +{{ end }} diff --git a/server/template/html/login.html b/server/template/html/login.html new file mode 100644 index 00000000..07a32126 --- /dev/null +++ b/server/template/html/login.html @@ -0,0 +1,23 @@ +{{ define "title"}}{{ t "Sign In" }}{{ end }} + +{{ define "content"}} + +{{ end }} diff --git a/server/template/html/sessions.html b/server/template/html/sessions.html new file mode 100644 index 00000000..048719e3 --- /dev/null +++ b/server/template/html/sessions.html @@ -0,0 +1,42 @@ +{{ define "title"}}{{ t "Sessions" }}{{ end }} + +{{ define "content"}} + + + + + + + + + + {{ range .sessions }} + + + + + + + {{ end }} +
{{ t "Date" }}{{ t "IP Address" }}{{ t "User Agent" }}{{ t "Actions" }}
{{ elapsed .CreatedAt }}{{ .IP }}{{ .UserAgent }} + {{ if eq .Token $.currentSessionToken }} + {{ t "Current session" }} + {{ else }} + {{ t "Remove" }} + {{ end }} +
+ +{{ end }} diff --git a/server/template/html/settings.html b/server/template/html/settings.html new file mode 100644 index 00000000..f916708b --- /dev/null +++ b/server/template/html/settings.html @@ -0,0 +1,63 @@ +{{ define "title"}}{{ t "Settings" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +{{ end }} diff --git a/server/template/html/unread.html b/server/template/html/unread.html new file mode 100644 index 00000000..413965ec --- /dev/null +++ b/server/template/html/unread.html @@ -0,0 +1,47 @@ +{{ define "title"}}{{ t "Unread Items" }} {{ if gt .countUnread 0 }}({{ .countUnread }}){{ end }} {{ end }} + +{{ define "content"}} + + +{{ if not .entries }} +

{{ t "There is no unread article." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }} \ No newline at end of file diff --git a/server/template/html/users.html b/server/template/html/users.html new file mode 100644 index 00000000..69acd008 --- /dev/null +++ b/server/template/html/users.html @@ -0,0 +1,51 @@ +{{ define "title"}}{{ t "Users" }}{{ end }} + +{{ define "content"}} + + +{{ if eq (len .users) 1 }} +

{{ t "You are the only user." }}

+{{ else }} + + + + + + + + {{ range .users }} + {{ if ne .ID $.user.ID }} + + + + + + + {{ end }} + {{ end }} +
{{ t "Username" }}{{ t "Administrator" }}{{ t "Last Login" }}{{ t "Actions" }}
{{ .Username }}{{ if eq .IsAdmin true }}{{ t "Yes" }}{{ else }}{{ t "No" }}{{ end }} + {{ if .LastLoginAt }} + + {{ else }} + {{ t "Never" }} + {{ end }} + + {{ t "Edit" }}, + {{ t "Remove" }} +
+{{ end }} + +{{ end }} diff --git a/server/template/template.go b/server/template/template.go new file mode 100644 index 00000000..086cdc5c --- /dev/null +++ b/server/template/template.go @@ -0,0 +1,117 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package template + +import ( + "bytes" + "github.com/miniflux/miniflux2/errors" + "github.com/miniflux/miniflux2/locale" + "github.com/miniflux/miniflux2/server/route" + "github.com/miniflux/miniflux2/server/template/helper" + "github.com/miniflux/miniflux2/server/ui/filter" + "html/template" + "io" + "log" + "net/url" + "strings" + "time" + + "github.com/gorilla/mux" +) + +type TemplateEngine struct { + templates map[string]*template.Template + router *mux.Router + translator *locale.Translator + currentLocale *locale.Language +} + +func (t *TemplateEngine) ParseAll() { + funcMap := template.FuncMap{ + "route": func(name string, args ...interface{}) string { + return route.GetRoute(t.router, name, args...) + }, + "noescape": func(str string) template.HTML { + return template.HTML(str) + }, + "proxyFilter": func(data string) string { + return filter.ImageProxyFilter(t.router, data) + }, + "domain": func(websiteURL string) string { + parsedURL, err := url.Parse(websiteURL) + if err != nil { + return websiteURL + } + + return parsedURL.Host + }, + "hasPrefix": func(str, prefix string) bool { + return strings.HasPrefix(str, prefix) + }, + "contains": func(str, substr string) bool { + return strings.Contains(str, substr) + }, + "isodate": func(ts time.Time) string { + return ts.Format("2006-01-02 15:04:05") + }, + "elapsed": func(ts time.Time) string { + return helper.GetElapsedTime(t.currentLocale, ts) + }, + "t": func(key interface{}, args ...interface{}) string { + switch key.(type) { + case string, error: + return t.currentLocale.Get(key.(string), args...) + case errors.LocalizedError: + err := key.(errors.LocalizedError) + return err.Localize(t.currentLocale) + default: + return "" + } + }, + "plural": func(key string, n int, args ...interface{}) string { + return t.currentLocale.Plural(key, n, args...) + }, + } + + commonTemplates := "" + for _, content := range templateCommonMap { + commonTemplates += content + } + + for name, content := range templateViewsMap { + log.Println("Parsing template:", name) + t.templates[name] = template.Must(template.New("main").Funcs(funcMap).Parse(commonTemplates + content)) + } +} + +func (t *TemplateEngine) SetLanguage(language string) { + t.currentLocale = t.translator.GetLanguage(language) +} + +func (t *TemplateEngine) Execute(w io.Writer, name string, data interface{}) { + tpl, ok := t.templates[name] + if !ok { + log.Fatalf("The template %s does not exists.\n", name) + } + + var b bytes.Buffer + err := tpl.ExecuteTemplate(&b, "base", data) + if err != nil { + log.Fatalf("Unable to render template: %v\n", err) + } + + b.WriteTo(w) +} + +func NewTemplateEngine(router *mux.Router, translator *locale.Translator) *TemplateEngine { + tpl := &TemplateEngine{ + templates: make(map[string]*template.Template), + router: router, + translator: translator, + } + + tpl.ParseAll() + return tpl +} diff --git a/server/template/views.go b/server/template/views.go new file mode 100644 index 00000000..2f8319e9 --- /dev/null +++ b/server/template/views.go @@ -0,0 +1,966 @@ +// Code generated by go generate; DO NOT EDIT. +// 2017-11-19 22:01:21.923713128 -0800 PST m=+0.004546271 + +package template + +var templateViewsMap = map[string]string{ + "about": `{{ define "title"}}{{ t "About" }}{{ end }} + +{{ define "content"}} + + +
+

{{ t "Version" }}

+
    +
  • {{ t "Version:" }} {{ .version }}
  • +
  • {{ t "Build Date:" }} {{ .build_date }}
  • +
+
+ +
+

{{ t "Authors" }}

+
    +
  • {{ t "Author:" }} Frédéric Guillot
  • +
  • {{ t "License:" }} Apache 2.0
  • +
+
+ +{{ end }} +`, + "add_subscription": `{{ define "title"}}{{ t "New Subscription" }}{{ end }} + +{{ define "content"}} + + +{{ if not .categories }} +

{{ t "There is no category. You must have at least one category." }}

+{{ else }} +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + +
+ +
+
+{{ end }} + +{{ end }} +`, + "categories": `{{ define "title"}}{{ t "Categories" }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .categories }} +

{{ t "There is no category." }}

+{{ else }} +
+ {{ range .categories }} +
+
+ + {{ .Title }} + +
+
+
    +
  • + {{ if eq .FeedCount 0 }} + {{ t "No feed." }} + {{ else }} + {{ plural "plural.categories.feed_count" .FeedCount .FeedCount }} + {{ end }} +
  • +
+ +
+
+ {{ end }} +
+{{ end }} + +{{ end }} +`, + "category_entries": `{{ define "title"}}{{ .category.Title }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .entries }} +

{{ t "There is no article in this category." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }} +`, + "choose_subscription": `{{ define "title"}}{{ t "Choose a Subscription" }}{{ end }} + +{{ define "content"}} + + +
+ + + +

{{ t "Choose a Subscription" }}

+ + {{ range .subscriptions }} +
+ ({{ .Type }}) + {{ .URL }} +
+ {{ end }} + +
+ +
+
+{{ end }} +`, + "create_category": `{{ define "title"}}{{ t "New Category" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} +`, + "create_user": `{{ define "title"}}{{ t "New User" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} +`, + "edit_category": `{{ define "title"}}{{ t "Edit Category: %s" .category.Title }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} +`, + "edit_feed": `{{ define "title"}}{{ t "Edit Feed: %s" .feed.Title }}{{ end }} + +{{ define "content"}} + + +{{ if not .categories }} +

{{ t "There is no category!" }}

+{{ else }} + {{ if ne .feed.ParsingErrorCount 0 }} +
+

{{ t "Last Parsing Error" }}

+ {{ .feed.ParsingErrorMsg }} +
+ {{ end }} + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} + +{{ end }}`, + "edit_user": `{{ define "title"}}{{ t "Edit user: %s" .selected_user.Username }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + +
+ {{ t "or" }} {{ t "cancel" }} +
+
+{{ end }} +`, + "entry": `{{ define "title"}}{{ .entry.Title }}{{ end }} + +{{ define "content"}} +
+
+

+ {{ .entry.Title }} +

+ + +
+
+ {{ template "entry_pagination" . }} +
+
+ {{ noescape (proxyFilter .entry.Content) }} +
+ {{ if .entry.Enclosures }} + + {{ end }} +
+ +
+ {{ template "entry_pagination" . }} +
+{{ end }} +`, + "feed_entries": `{{ define "title"}}{{ .feed.Title }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if ne .feed.ParsingErrorCount 0 }} +
+

{{ t "There is a problem with this feed" }}

+ {{ .feed.ParsingErrorMsg }} +
+{{ else if not .entries }} +

{{ t "There is no article for this feed." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }} +`, + "feeds": `{{ define "title"}}{{ t "Feeds" }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .feeds }} +

{{ t "You don't have any subscription." }}

+{{ else }} +
+ {{ range .feeds }} + + {{ end }} +
+{{ end }} + +{{ end }} +`, + "history": `{{ define "title"}}{{ t "History" }} ({{ .total }}){{ end }} + +{{ define "content"}} + + +{{ if not .entries }} +

{{ t "There is no history at the moment." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }} +`, + "import": `{{ define "title"}}{{ t "Import" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + +
+ +
+
+ +{{ end }} +`, + "login": `{{ define "title"}}{{ t "Sign In" }}{{ end }} + +{{ define "content"}} + +{{ end }} +`, + "sessions": `{{ define "title"}}{{ t "Sessions" }}{{ end }} + +{{ define "content"}} + + + + + + + + + + {{ range .sessions }} + + + + + + + {{ end }} +
{{ t "Date" }}{{ t "IP Address" }}{{ t "User Agent" }}{{ t "Actions" }}
{{ elapsed .CreatedAt }}{{ .IP }}{{ .UserAgent }} + {{ if eq .Token $.currentSessionToken }} + {{ t "Current session" }} + {{ else }} + {{ t "Remove" }} + {{ end }} +
+ +{{ end }} +`, + "settings": `{{ define "title"}}{{ t "Settings" }}{{ end }} + +{{ define "content"}} + + +
+ + + {{ if .errorMessage }} +
{{ t .errorMessage }}
+ {{ end }} + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +{{ end }} +`, + "unread": `{{ define "title"}}{{ t "Unread Items" }} {{ if gt .countUnread 0 }}({{ .countUnread }}){{ end }} {{ end }} + +{{ define "content"}} + + +{{ if not .entries }} +

{{ t "There is no unread article." }}

+{{ else }} +
+ {{ range .entries }} + + {{ end }} +
+ {{ template "pagination" .pagination }} +{{ end }} + +{{ end }}`, + "users": `{{ define "title"}}{{ t "Users" }}{{ end }} + +{{ define "content"}} + + +{{ if eq (len .users) 1 }} +

{{ t "You are the only user." }}

+{{ else }} + + + + + + + + {{ range .users }} + {{ if ne .ID $.user.ID }} + + + + + + + {{ end }} + {{ end }} +
{{ t "Username" }}{{ t "Administrator" }}{{ t "Last Login" }}{{ t "Actions" }}
{{ .Username }}{{ if eq .IsAdmin true }}{{ t "Yes" }}{{ else }}{{ t "No" }}{{ end }} + {{ if .LastLoginAt }} + + {{ else }} + {{ t "Never" }} + {{ end }} + + {{ t "Edit" }}, + {{ t "Remove" }} +
+{{ end }} + +{{ end }} +`, +} + +var templateViewsMapChecksums = map[string]string{ + "about": "56f1d45d8b9944306c66be0712320527e739a0ce4fccbd97a4c414c8f9cfab04", + "add_subscription": "098ea9e492e18242bd414b22c4d8638006d113f728e5ae78c9186663f60ae3f1", + "categories": "721b6bae6aa6461f4e020d667707fabe53c94b399f7d74febef2de5eb9f15071", + "category_entries": "0bdcf28ef29b976b78d1add431896a8c56791476abd7a4240998d52c3efe1f35", + "choose_subscription": "d37682743d8bbd84738a964e238103db2651f95fa340c6e285ffe2e12548d673", + "create_category": "2b82af5d2dcd67898dc5daa57a6461e6ff8121a6089b2a2a1be909f35e4a2275", + "create_user": "966b31d0414e0d0a547ef9ada428cbd24a91100bfed491f780c0461892a2489b", + "edit_category": "cee720faadcec58289b707ad30af623d2ee66c1ce23a732965463250d7ff41c5", + "edit_feed": "c5bc4c22bf7e8348d880395250545595d21fb8c8e723fc5d7cca68e25d250884", + "edit_user": "f0f79704983de3ca7858bd8cda7a372c3999f5e4e0cf951fba5fa2c1752f9111", + "entry": "32e605edd6d43773ac31329d247ebd81d38d974cd43689d91de79fffec7fe04b", + "feed_entries": "9aff923b6c7452dec1514feada7e0d2bbc1ec21c6f5e9f48b2de41d1b731ffe4", + "feeds": "ddcf12a47c850e6a1f3b85c9ab6566b4e45adfcd7a3546381a0c3a7a54f2b7d4", + "history": "439000d0be8fd716f3b89860af4d721e05baef0c2ccd2325ba020c940d6aa847", + "import": "73b5112e20bfd232bf73334544186ea419505936bc237d481517a8622901878f", + "login": "568f2f69f248048f3e55e9bbc719077a74ae23fe18f237aa40e3de37e97b7a41", + "sessions": "7fcd3bb794d4ad01eb9fa515660f04c8e79e1568970fd541cc7b2de8a76e1542", + "settings": "9c89bfd70ff288b4256e5205be78a7645450b364db1df51d10fee3cb915b2c6b", + "unread": "b6f9be1a72188947c75a6fdcac6ff7878db7745f9efa46318e0433102892a722", + "users": "5bd535de3e46d9b14667d8159a5ec1478d6e028a77bf306c89d7b55813eeb625", +} diff --git a/server/ui/controller/about.go b/server/ui/controller/about.go new file mode 100644 index 00000000..dcfe0d7a --- /dev/null +++ b/server/ui/controller/about.go @@ -0,0 +1,24 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/version" +) + +func (c *Controller) AboutPage(ctx *core.Context, request *core.Request, response *core.Response) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("about", args.Merge(tplParams{ + "version": version.Version, + "build_date": version.BuildDate, + "menu": "settings", + })) +} diff --git a/server/ui/controller/category.go b/server/ui/controller/category.go new file mode 100644 index 00000000..dbc80671 --- /dev/null +++ b/server/ui/controller/category.go @@ -0,0 +1,228 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/ui/form" + "log" +) + +func (c *Controller) ShowCategories(ctx *core.Context, request *core.Request, response *core.Response) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + user := ctx.GetLoggedUser() + categories, err := c.store.GetCategoriesWithFeedCount(user.ID) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("categories", args.Merge(tplParams{ + "categories": categories, + "total": len(categories), + "menu": "categories", + })) +} + +func (c *Controller) ShowCategoryEntries(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + offset := request.GetQueryIntegerParam("offset", 0) + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + category, err := c.getCategoryFromURL(ctx, request, response) + if err != nil { + return + } + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithCategoryID(category.ID) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.DefaultSortingDirection) + builder.WithOffset(offset) + builder.WithLimit(NbItemsPerPage) + + entries, err := builder.GetEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + count, err := builder.CountEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("category_entries", args.Merge(tplParams{ + "category": category, + "entries": entries, + "total": count, + "pagination": c.getPagination(ctx.GetRoute("categoryEntries", "categoryID", category.ID), count, offset), + "menu": "categories", + })) +} + +func (c *Controller) CreateCategory(ctx *core.Context, request *core.Request, response *core.Response) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("create_category", args.Merge(tplParams{ + "menu": "categories", + })) +} + +func (c *Controller) SaveCategory(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + categoryForm := form.NewCategoryForm(request.GetRequest()) + if err := categoryForm.Validate(); err != nil { + response.Html().Render("create_category", args.Merge(tplParams{ + "errorMessage": err.Error(), + })) + return + } + + category := model.Category{Title: categoryForm.Title, UserID: user.ID} + err = c.store.CreateCategory(&category) + if err != nil { + log.Println(err) + response.Html().Render("create_category", args.Merge(tplParams{ + "errorMessage": "Unable to create this category.", + })) + return + } + + response.Redirect(ctx.GetRoute("categories")) +} + +func (c *Controller) EditCategory(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + category, err := c.getCategoryFromURL(ctx, request, response) + if err != nil { + log.Println(err) + return + } + + args, err := c.getCategoryFormTemplateArgs(ctx, user, category, nil) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("edit_category", args) +} + +func (c *Controller) UpdateCategory(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + category, err := c.getCategoryFromURL(ctx, request, response) + if err != nil { + log.Println(err) + return + } + + categoryForm := form.NewCategoryForm(request.GetRequest()) + args, err := c.getCategoryFormTemplateArgs(ctx, user, category, categoryForm) + if err != nil { + response.Html().ServerError(err) + return + } + + if err := categoryForm.Validate(); err != nil { + response.Html().Render("edit_category", args.Merge(tplParams{ + "errorMessage": err.Error(), + })) + return + } + + err = c.store.UpdateCategory(categoryForm.Merge(category)) + if err != nil { + log.Println(err) + response.Html().Render("edit_category", args.Merge(tplParams{ + "errorMessage": "Unable to update this category.", + })) + return + } + + response.Redirect(ctx.GetRoute("categories")) +} + +func (c *Controller) RemoveCategory(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + category, err := c.getCategoryFromURL(ctx, request, response) + if err != nil { + return + } + + if err := c.store.RemoveCategory(user.ID, category.ID); err != nil { + response.Html().ServerError(err) + return + } + + response.Redirect(ctx.GetRoute("categories")) +} + +func (c *Controller) getCategoryFromURL(ctx *core.Context, request *core.Request, response *core.Response) (*model.Category, error) { + categoryID, err := request.GetIntegerParam("categoryID") + if err != nil { + response.Html().BadRequest(err) + return nil, err + } + + user := ctx.GetLoggedUser() + category, err := c.store.GetCategory(user.ID, categoryID) + if err != nil { + response.Html().ServerError(err) + return nil, err + } + + if category == nil { + response.Html().NotFound() + return nil, errors.New("Category not found") + } + + return category, nil +} + +func (c *Controller) getCategoryFormTemplateArgs(ctx *core.Context, user *model.User, category *model.Category, categoryForm *form.CategoryForm) (tplParams, error) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + return nil, err + } + + if categoryForm == nil { + args["form"] = form.CategoryForm{ + Title: category.Title, + } + } else { + args["form"] = categoryForm + } + + args["category"] = category + args["menu"] = "categories" + return args, nil +} diff --git a/server/ui/controller/controller.go b/server/ui/controller/controller.go new file mode 100644 index 00000000..aad32582 --- /dev/null +++ b/server/ui/controller/controller.go @@ -0,0 +1,56 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/feed" + "github.com/miniflux/miniflux2/reader/opml" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/storage" +) + +type tplParams map[string]interface{} + +func (t tplParams) Merge(d tplParams) tplParams { + for k, v := range d { + t[k] = v + } + + return t +} + +type Controller struct { + store *storage.Storage + feedHandler *feed.Handler + opmlHandler *opml.OpmlHandler +} + +func (c *Controller) getCommonTemplateArgs(ctx *core.Context) (tplParams, error) { + user := ctx.GetLoggedUser() + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithStatus(model.EntryStatusUnread) + + countUnread, err := builder.CountEntries() + if err != nil { + return nil, err + } + + params := tplParams{ + "menu": "", + "user": user, + "countUnread": countUnread, + "csrf": ctx.GetCsrfToken(), + } + return params, nil +} + +func NewController(store *storage.Storage, feedHandler *feed.Handler, opmlHandler *opml.OpmlHandler) *Controller { + return &Controller{ + store: store, + feedHandler: feedHandler, + opmlHandler: opmlHandler, + } +} diff --git a/server/ui/controller/entry.go b/server/ui/controller/entry.go new file mode 100644 index 00000000..5a3a979c --- /dev/null +++ b/server/ui/controller/entry.go @@ -0,0 +1,375 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/ui/payload" + "log" +) + +func (c *Controller) ShowFeedEntry(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + sortingDirection := model.DefaultSortingDirection + + entryID, err := request.GetIntegerParam("entryID") + if err != nil { + response.Html().BadRequest(err) + return + } + + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Html().BadRequest(err) + return + } + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithFeedID(feedID) + builder.WithEntryID(entryID) + + entry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + if entry == nil { + response.Html().NotFound() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithFeedID(feedID) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", "<=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.DefaultSortingDirection) + nextEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithFeedID(feedID) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", ">=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.GetOppositeDirection(sortingDirection)) + prevEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + nextEntryRoute := "" + if nextEntry != nil { + nextEntryRoute = ctx.GetRoute("feedEntry", "feedID", feedID, "entryID", nextEntry.ID) + } + + prevEntryRoute := "" + if prevEntry != nil { + prevEntryRoute = ctx.GetRoute("feedEntry", "feedID", feedID, "entryID", prevEntry.ID) + } + + if entry.Status == model.EntryStatusUnread { + err = c.store.SetEntriesStatus(user.ID, []int64{entry.ID}, model.EntryStatusRead) + if err != nil { + log.Println(err) + response.Html().ServerError(nil) + return + } + } + + response.Html().Render("entry", args.Merge(tplParams{ + "entry": entry, + "prevEntry": prevEntry, + "nextEntry": nextEntry, + "nextEntryRoute": nextEntryRoute, + "prevEntryRoute": prevEntryRoute, + "menu": "feeds", + })) +} + +func (c *Controller) ShowCategoryEntry(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + sortingDirection := model.DefaultSortingDirection + + categoryID, err := request.GetIntegerParam("categoryID") + if err != nil { + response.Html().BadRequest(err) + return + } + + entryID, err := request.GetIntegerParam("entryID") + if err != nil { + response.Html().BadRequest(err) + return + } + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithCategoryID(categoryID) + builder.WithEntryID(entryID) + + entry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + if entry == nil { + response.Html().NotFound() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithCategoryID(categoryID) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", "<=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(sortingDirection) + nextEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithCategoryID(categoryID) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", ">=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.GetOppositeDirection(sortingDirection)) + prevEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + nextEntryRoute := "" + if nextEntry != nil { + nextEntryRoute = ctx.GetRoute("categoryEntry", "categoryID", categoryID, "entryID", nextEntry.ID) + } + + prevEntryRoute := "" + if prevEntry != nil { + prevEntryRoute = ctx.GetRoute("categoryEntry", "categoryID", categoryID, "entryID", prevEntry.ID) + } + + if entry.Status == model.EntryStatusUnread { + err = c.store.SetEntriesStatus(user.ID, []int64{entry.ID}, model.EntryStatusRead) + if err != nil { + log.Println(err) + response.Html().ServerError(nil) + return + } + } + + response.Html().Render("entry", args.Merge(tplParams{ + "entry": entry, + "prevEntry": prevEntry, + "nextEntry": nextEntry, + "nextEntryRoute": nextEntryRoute, + "prevEntryRoute": prevEntryRoute, + "menu": "categories", + })) +} + +func (c *Controller) ShowUnreadEntry(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + sortingDirection := model.DefaultSortingDirection + + entryID, err := request.GetIntegerParam("entryID") + if err != nil { + response.Html().BadRequest(err) + return + } + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithEntryID(entryID) + + entry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + if entry == nil { + response.Html().NotFound() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithStatus(model.EntryStatusUnread) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", "<=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(sortingDirection) + nextEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithStatus(model.EntryStatusUnread) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", ">=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.GetOppositeDirection(sortingDirection)) + prevEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + nextEntryRoute := "" + if nextEntry != nil { + nextEntryRoute = ctx.GetRoute("unreadEntry", "entryID", nextEntry.ID) + } + + prevEntryRoute := "" + if prevEntry != nil { + prevEntryRoute = ctx.GetRoute("unreadEntry", "entryID", prevEntry.ID) + } + + if entry.Status == model.EntryStatusUnread { + err = c.store.SetEntriesStatus(user.ID, []int64{entry.ID}, model.EntryStatusRead) + if err != nil { + log.Println(err) + response.Html().ServerError(nil) + return + } + } + + response.Html().Render("entry", args.Merge(tplParams{ + "entry": entry, + "prevEntry": prevEntry, + "nextEntry": nextEntry, + "nextEntryRoute": nextEntryRoute, + "prevEntryRoute": prevEntryRoute, + "menu": "unread", + })) +} + +func (c *Controller) ShowReadEntry(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + sortingDirection := model.DefaultSortingDirection + + entryID, err := request.GetIntegerParam("entryID") + if err != nil { + response.Html().BadRequest(err) + return + } + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithEntryID(entryID) + + entry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + if entry == nil { + response.Html().NotFound() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithStatus(model.EntryStatusRead) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", "<=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(sortingDirection) + nextEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + builder = c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithStatus(model.EntryStatusRead) + builder.WithCondition("e.id", "!=", entryID) + builder.WithCondition("e.published_at", ">=", entry.Date) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.GetOppositeDirection(sortingDirection)) + prevEntry, err := builder.GetEntry() + if err != nil { + response.Html().ServerError(err) + return + } + + nextEntryRoute := "" + if nextEntry != nil { + nextEntryRoute = ctx.GetRoute("readEntry", "entryID", nextEntry.ID) + } + + prevEntryRoute := "" + if prevEntry != nil { + prevEntryRoute = ctx.GetRoute("readEntry", "entryID", prevEntry.ID) + } + + response.Html().Render("entry", args.Merge(tplParams{ + "entry": entry, + "prevEntry": prevEntry, + "nextEntry": nextEntry, + "nextEntryRoute": nextEntryRoute, + "prevEntryRoute": prevEntryRoute, + "menu": "history", + })) +} + +func (c *Controller) UpdateEntriesStatus(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + entryIDs, status, err := payload.DecodeEntryStatusPayload(request.GetBody()) + if err != nil { + log.Println(err) + response.Json().BadRequest(nil) + return + } + + if len(entryIDs) == 0 { + response.Html().BadRequest(errors.New("The list of entryID is empty")) + return + } + + err = c.store.SetEntriesStatus(user.ID, entryIDs, status) + if err != nil { + log.Println(err) + response.Html().ServerError(nil) + return + } + + response.Json().Standard("OK") +} diff --git a/server/ui/controller/feed.go b/server/ui/controller/feed.go new file mode 100644 index 00000000..400f81ad --- /dev/null +++ b/server/ui/controller/feed.go @@ -0,0 +1,209 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/ui/form" + "log" +) + +func (c *Controller) ShowFeedsPage(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + feeds, err := c.store.GetFeeds(user.ID) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("feeds", args.Merge(tplParams{ + "feeds": feeds, + "total": len(feeds), + "menu": "feeds", + })) +} + +func (c *Controller) ShowFeedEntries(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + offset := request.GetQueryIntegerParam("offset", 0) + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + feed, err := c.getFeedFromURL(request, response, user) + if err != nil { + return + } + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithFeedID(feed.ID) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.DefaultSortingDirection) + builder.WithOffset(offset) + builder.WithLimit(NbItemsPerPage) + + entries, err := builder.GetEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + count, err := builder.CountEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("feed_entries", args.Merge(tplParams{ + "feed": feed, + "entries": entries, + "total": count, + "pagination": c.getPagination(ctx.GetRoute("feedEntries", "feedID", feed.ID), count, offset), + "menu": "feeds", + })) +} + +func (c *Controller) EditFeed(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + feed, err := c.getFeedFromURL(request, response, user) + if err != nil { + return + } + + args, err := c.getFeedFormTemplateArgs(ctx, user, feed, nil) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("edit_feed", args) +} + +func (c *Controller) UpdateFeed(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + feed, err := c.getFeedFromURL(request, response, user) + if err != nil { + return + } + + feedForm := form.NewFeedForm(request.GetRequest()) + args, err := c.getFeedFormTemplateArgs(ctx, user, feed, feedForm) + if err != nil { + response.Html().ServerError(err) + return + } + + if err := feedForm.ValidateModification(); err != nil { + response.Html().Render("edit_feed", args.Merge(tplParams{ + "errorMessage": err.Error(), + })) + return + } + + err = c.store.UpdateFeed(feedForm.Merge(feed)) + if err != nil { + log.Println(err) + response.Html().Render("edit_feed", args.Merge(tplParams{ + "errorMessage": "Unable to update this feed.", + })) + return + } + + response.Redirect(ctx.GetRoute("feeds")) +} + +func (c *Controller) RemoveFeed(ctx *core.Context, request *core.Request, response *core.Response) { + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Html().ServerError(err) + return + } + + user := ctx.GetLoggedUser() + if err := c.store.RemoveFeed(user.ID, feedID); err != nil { + response.Html().ServerError(err) + return + } + + response.Redirect(ctx.GetRoute("feeds")) +} + +func (c *Controller) RefreshFeed(ctx *core.Context, request *core.Request, response *core.Response) { + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Html().BadRequest(err) + return + } + + user := ctx.GetLoggedUser() + if err := c.feedHandler.RefreshFeed(user.ID, feedID); err != nil { + log.Println("[UI:RefreshFeed]", err) + } + + response.Redirect(ctx.GetRoute("feedEntries", "feedID", feedID)) +} + +func (c *Controller) getFeedFromURL(request *core.Request, response *core.Response, user *model.User) (*model.Feed, error) { + feedID, err := request.GetIntegerParam("feedID") + if err != nil { + response.Html().BadRequest(err) + return nil, err + } + + feed, err := c.store.GetFeedById(user.ID, feedID) + if err != nil { + response.Html().ServerError(err) + return nil, err + } + + if feed == nil { + response.Html().NotFound() + return nil, errors.New("Feed not found") + } + + return feed, nil +} + +func (c *Controller) getFeedFormTemplateArgs(ctx *core.Context, user *model.User, feed *model.Feed, feedForm *form.FeedForm) (tplParams, error) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + return nil, err + } + + categories, err := c.store.GetCategories(user.ID) + if err != nil { + return nil, err + } + + if feedForm == nil { + args["form"] = form.FeedForm{ + SiteURL: feed.SiteURL, + FeedURL: feed.FeedURL, + Title: feed.Title, + CategoryID: feed.Category.ID, + } + } else { + args["form"] = feedForm + } + + args["categories"] = categories + args["feed"] = feed + args["menu"] = "feeds" + return args, nil +} diff --git a/server/ui/controller/history.go b/server/ui/controller/history.go new file mode 100644 index 00000000..2c067373 --- /dev/null +++ b/server/ui/controller/history.go @@ -0,0 +1,47 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/core" +) + +func (c *Controller) ShowHistoryPage(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + offset := request.GetQueryIntegerParam("offset", 0) + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithStatus(model.EntryStatusRead) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.DefaultSortingDirection) + builder.WithOffset(offset) + builder.WithLimit(NbItemsPerPage) + + entries, err := builder.GetEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + count, err := builder.CountEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("history", args.Merge(tplParams{ + "entries": entries, + "total": count, + "pagination": c.getPagination(ctx.GetRoute("history"), count, offset), + "menu": "history", + })) +} diff --git a/server/ui/controller/icon.go b/server/ui/controller/icon.go new file mode 100644 index 00000000..37954c24 --- /dev/null +++ b/server/ui/controller/icon.go @@ -0,0 +1,31 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/server/core" + "time" +) + +func (c *Controller) ShowIcon(ctx *core.Context, request *core.Request, response *core.Response) { + iconID, err := request.GetIntegerParam("iconID") + if err != nil { + response.Html().BadRequest(err) + return + } + + icon, err := c.store.GetIconByID(iconID) + if err != nil { + response.Html().ServerError(err) + return + } + + if icon == nil { + response.Html().NotFound() + return + } + + response.Cache(icon.MimeType, icon.Hash, icon.Content, 72*time.Hour) +} diff --git a/server/ui/controller/login.go b/server/ui/controller/login.go new file mode 100644 index 00000000..225978c1 --- /dev/null +++ b/server/ui/controller/login.go @@ -0,0 +1,91 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/ui/form" + "log" + "net/http" + "time" + + "github.com/tomasen/realip" +) + +func (c *Controller) ShowLoginPage(ctx *core.Context, request *core.Request, response *core.Response) { + if ctx.IsAuthenticated() { + response.Redirect(ctx.GetRoute("unread")) + return + } + + response.Html().Render("login", tplParams{ + "csrf": ctx.GetCsrfToken(), + }) +} + +func (c *Controller) CheckLogin(ctx *core.Context, request *core.Request, response *core.Response) { + authForm := form.NewAuthForm(request.GetRequest()) + tplParams := tplParams{ + "errorMessage": "Invalid username or password.", + "csrf": ctx.GetCsrfToken(), + } + + if err := authForm.Validate(); err != nil { + log.Println(err) + response.Html().Render("login", tplParams) + return + } + + if err := c.store.CheckPassword(authForm.Username, authForm.Password); err != nil { + log.Println(err) + response.Html().Render("login", tplParams) + return + } + + sessionToken, err := c.store.CreateSession( + authForm.Username, + request.GetHeaders().Get("User-Agent"), + realip.RealIP(request.GetRequest()), + ) + if err != nil { + response.Html().ServerError(err) + return + } + + log.Printf("[UI:CheckLogin] username=%s just logged in\n", authForm.Username) + + cookie := &http.Cookie{ + Name: "sessionID", + Value: sessionToken, + Path: "/", + Secure: request.IsHTTPS(), + HttpOnly: true, + } + + response.SetCookie(cookie) + response.Redirect(ctx.GetRoute("unread")) +} + +func (c *Controller) Logout(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + sessionCookie := request.GetCookie("sessionID") + if err := c.store.RemoveSessionByToken(user.ID, sessionCookie); err != nil { + log.Printf("[UI:Logout] %v", err) + } + + cookie := &http.Cookie{ + Name: "sessionID", + Value: "", + Path: "/", + Secure: request.IsHTTPS(), + HttpOnly: true, + MaxAge: -1, + Expires: time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC), + } + + response.SetCookie(cookie) + response.Redirect(ctx.GetRoute("login")) +} diff --git a/server/ui/controller/opml.go b/server/ui/controller/opml.go new file mode 100644 index 00000000..45d34f8e --- /dev/null +++ b/server/ui/controller/opml.go @@ -0,0 +1,63 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/server/core" + "log" +) + +func (c *Controller) Export(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + opml, err := c.opmlHandler.Export(user.ID) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Xml().Download("feeds.opml", opml) +} + +func (c *Controller) Import(ctx *core.Context, request *core.Request, response *core.Response) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("import", args.Merge(tplParams{ + "menu": "feeds", + })) +} + +func (c *Controller) UploadOPML(ctx *core.Context, request *core.Request, response *core.Response) { + file, fileHeader, err := request.GetFile("file") + if err != nil { + log.Println(err) + response.Redirect(ctx.GetRoute("import")) + return + } + defer file.Close() + + user := ctx.GetLoggedUser() + log.Printf("[UI:UploadOPML] User #%d uploaded this file: %s (%d bytes)\n", user.ID, fileHeader.Filename, fileHeader.Size) + + if impErr := c.opmlHandler.Import(user.ID, file); impErr != nil { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("import", args.Merge(tplParams{ + "errorMessage": impErr.Error(), + "menu": "feeds", + })) + + return + } + + response.Redirect(ctx.GetRoute("feeds")) +} diff --git a/server/ui/controller/pagination.go b/server/ui/controller/pagination.go new file mode 100644 index 00000000..b649d900 --- /dev/null +++ b/server/ui/controller/pagination.go @@ -0,0 +1,46 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +const ( + NbItemsPerPage = 100 +) + +type Pagination struct { + Route string + Total int + Offset int + ItemsPerPage int + ShowNext bool + ShowPrev bool + NextOffset int + PrevOffset int +} + +func (c *Controller) getPagination(route string, total, offset int) Pagination { + nextOffset := 0 + prevOffset := 0 + showNext := (total - offset) > NbItemsPerPage + showPrev := offset > 0 + + if showNext { + nextOffset = offset + NbItemsPerPage + } + + if showPrev { + prevOffset = offset - NbItemsPerPage + } + + return Pagination{ + Route: route, + Total: total, + Offset: offset, + ItemsPerPage: NbItemsPerPage, + ShowNext: showNext, + NextOffset: nextOffset, + ShowPrev: showPrev, + PrevOffset: prevOffset, + } +} diff --git a/server/ui/controller/proxy.go b/server/ui/controller/proxy.go new file mode 100644 index 00000000..8a2f2bfa --- /dev/null +++ b/server/ui/controller/proxy.go @@ -0,0 +1,49 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "encoding/base64" + "errors" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/server/core" + "io/ioutil" + "log" + "net/http" + "time" +) + +func (c *Controller) ImageProxy(ctx *core.Context, request *core.Request, response *core.Response) { + encodedURL := request.GetStringParam("encodedURL", "") + if encodedURL == "" { + response.Html().BadRequest(errors.New("No URL provided")) + return + } + + decodedURL, err := base64.StdEncoding.DecodeString(encodedURL) + if err != nil { + response.Html().BadRequest(errors.New("Unable to decode this URL")) + return + } + + resp, err := http.Get(string(decodedURL)) + if err != nil { + log.Println(err) + response.Html().NotFound() + return + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + response.Html().NotFound() + return + } + + body, _ := ioutil.ReadAll(resp.Body) + etag := helper.HashFromBytes(body) + contentType := resp.Header.Get("Content-Type") + + response.Cache(contentType, etag, body, 72*time.Hour) +} diff --git a/server/ui/controller/session.go b/server/ui/controller/session.go new file mode 100644 index 00000000..0255728f --- /dev/null +++ b/server/ui/controller/session.go @@ -0,0 +1,49 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/server/core" + "log" +) + +func (c *Controller) ShowSessions(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + sessions, err := c.store.GetSessions(user.ID) + if err != nil { + response.Html().ServerError(err) + return + } + + sessionCookie := request.GetCookie("sessionID") + response.Html().Render("sessions", args.Merge(tplParams{ + "sessions": sessions, + "currentSessionToken": sessionCookie, + "menu": "settings", + })) +} + +func (c *Controller) RemoveSession(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + sessionID, err := request.GetIntegerParam("sessionID") + if err != nil { + response.Html().BadRequest(err) + return + } + + err = c.store.RemoveSessionByID(user.ID, sessionID) + if err != nil { + log.Println("[UI:RemoveSession]", err) + } + + response.Redirect(ctx.GetRoute("sessions")) +} diff --git a/server/ui/controller/settings.go b/server/ui/controller/settings.go new file mode 100644 index 00000000..a7cca789 --- /dev/null +++ b/server/ui/controller/settings.go @@ -0,0 +1,92 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/locale" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/ui/form" + "log" +) + +func (c *Controller) ShowSettings(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + args, err := c.getSettingsFormTemplateArgs(ctx, user, nil) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("settings", args) +} + +func (c *Controller) UpdateSettings(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + settingsForm := form.NewSettingsForm(request.GetRequest()) + args, err := c.getSettingsFormTemplateArgs(ctx, user, settingsForm) + if err != nil { + response.Html().ServerError(err) + return + } + + if err := settingsForm.Validate(); err != nil { + response.Html().Render("settings", args.Merge(tplParams{ + "form": settingsForm, + "errorMessage": err.Error(), + })) + return + } + + if c.store.AnotherUserExists(user.ID, settingsForm.Username) { + response.Html().Render("settings", args.Merge(tplParams{ + "form": settingsForm, + "errorMessage": "This user already exists.", + })) + return + } + + err = c.store.UpdateUser(settingsForm.Merge(user)) + if err != nil { + log.Println(err) + response.Html().Render("settings", args.Merge(tplParams{ + "form": settingsForm, + "errorMessage": "Unable to update this user.", + })) + return + } + + response.Redirect(ctx.GetRoute("settings")) +} + +func (c *Controller) getSettingsFormTemplateArgs(ctx *core.Context, user *model.User, settingsForm *form.SettingsForm) (tplParams, error) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + return args, err + } + + if settingsForm == nil { + args["form"] = form.SettingsForm{ + Username: user.Username, + Theme: user.Theme, + Language: user.Language, + Timezone: user.Timezone, + } + } else { + args["form"] = settingsForm + } + + args["menu"] = "settings" + args["themes"] = model.GetThemes() + args["languages"] = locale.GetAvailableLanguages() + args["timezones"], err = c.store.GetTimezones() + if err != nil { + return args, err + } + + return args, nil +} diff --git a/server/ui/controller/static.go b/server/ui/controller/static.go new file mode 100644 index 00000000..7b6a1def --- /dev/null +++ b/server/ui/controller/static.go @@ -0,0 +1,41 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "encoding/base64" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/static" + "log" + "time" +) + +func (c *Controller) Stylesheet(ctx *core.Context, request *core.Request, response *core.Response) { + stylesheet := request.GetStringParam("name", "white") + body := static.Stylesheets["common"] + etag := static.StylesheetsChecksums["common"] + + if theme, found := static.Stylesheets[stylesheet]; found { + body += theme + etag += static.StylesheetsChecksums[stylesheet] + } + + response.Cache("text/css", etag, []byte(body), 48*time.Hour) +} + +func (c *Controller) Javascript(ctx *core.Context, request *core.Request, response *core.Response) { + response.Cache("text/javascript", static.JavascriptChecksums["app"], []byte(static.Javascript["app"]), 48*time.Hour) +} + +func (c *Controller) Favicon(ctx *core.Context, request *core.Request, response *core.Response) { + blob, err := base64.StdEncoding.DecodeString(static.Binaries["favicon.ico"]) + if err != nil { + log.Println(err) + response.Html().NotFound() + return + } + + response.Cache("image/x-icon", static.BinariesChecksums["favicon.ico"], blob, 48*time.Hour) +} diff --git a/server/ui/controller/subscription.go b/server/ui/controller/subscription.go new file mode 100644 index 00000000..b1557696 --- /dev/null +++ b/server/ui/controller/subscription.go @@ -0,0 +1,127 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/reader/subscription" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/ui/form" + "log" +) + +func (c *Controller) AddSubscription(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + args, err := c.getSubscriptionFormTemplateArgs(ctx, user) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("add_subscription", args) +} + +func (c *Controller) SubmitSubscription(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + args, err := c.getSubscriptionFormTemplateArgs(ctx, user) + if err != nil { + response.Html().ServerError(err) + return + } + + subscriptionForm := form.NewSubscriptionForm(request.GetRequest()) + if err := subscriptionForm.Validate(); err != nil { + response.Html().Render("add_subscription", args.Merge(tplParams{ + "form": subscriptionForm, + "errorMessage": err.Error(), + })) + return + } + + subscriptions, err := subscription.FindSubscriptions(subscriptionForm.URL) + if err != nil { + log.Println(err) + response.Html().Render("add_subscription", args.Merge(tplParams{ + "form": subscriptionForm, + "errorMessage": err, + })) + return + } + + log.Println("[UI:SubmitSubscription]", subscriptions) + + n := len(subscriptions) + switch { + case n == 0: + response.Html().Render("add_subscription", args.Merge(tplParams{ + "form": subscriptionForm, + "errorMessage": "Unable to find any subscription.", + })) + case n == 1: + feed, err := c.feedHandler.CreateFeed(user.ID, subscriptionForm.CategoryID, subscriptions[0].URL) + if err != nil { + response.Html().Render("add_subscription", args.Merge(tplParams{ + "form": subscriptionForm, + "errorMessage": err, + })) + return + } + + response.Redirect(ctx.GetRoute("feedEntries", "feedID", feed.ID)) + case n > 1: + response.Html().Render("choose_subscription", args.Merge(tplParams{ + "categoryID": subscriptionForm.CategoryID, + "subscriptions": subscriptions, + })) + } +} + +func (c *Controller) ChooseSubscription(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + args, err := c.getSubscriptionFormTemplateArgs(ctx, user) + if err != nil { + response.Html().ServerError(err) + return + } + + subscriptionForm := form.NewSubscriptionForm(request.GetRequest()) + if err := subscriptionForm.Validate(); err != nil { + response.Html().Render("add_subscription", args.Merge(tplParams{ + "form": subscriptionForm, + "errorMessage": err.Error(), + })) + return + } + + feed, err := c.feedHandler.CreateFeed(user.ID, subscriptionForm.CategoryID, subscriptionForm.URL) + if err != nil { + response.Html().Render("add_subscription", args.Merge(tplParams{ + "form": subscriptionForm, + "errorMessage": err, + })) + return + } + + response.Redirect(ctx.GetRoute("feedEntries", "feedID", feed.ID)) +} + +func (c *Controller) getSubscriptionFormTemplateArgs(ctx *core.Context, user *model.User) (tplParams, error) { + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + return nil, err + } + + categories, err := c.store.GetCategories(user.ID) + if err != nil { + return nil, err + } + + args["categories"] = categories + args["menu"] = "feeds" + return args, nil +} diff --git a/server/ui/controller/unread.go b/server/ui/controller/unread.go new file mode 100644 index 00000000..63d7db02 --- /dev/null +++ b/server/ui/controller/unread.go @@ -0,0 +1,43 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/core" +) + +func (c *Controller) ShowUnreadPage(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + offset := request.GetQueryIntegerParam("offset", 0) + + builder := c.store.GetEntryQueryBuilder(user.ID, user.Timezone) + builder.WithStatus(model.EntryStatusUnread) + builder.WithOrder(model.DefaultSortingOrder) + builder.WithDirection(model.DefaultSortingDirection) + builder.WithOffset(offset) + builder.WithLimit(NbItemsPerPage) + + entries, err := builder.GetEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + countUnread, err := builder.CountEntries() + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("unread", tplParams{ + "user": user, + "countUnread": countUnread, + "entries": entries, + "pagination": c.getPagination(ctx.GetRoute("unread"), countUnread, offset), + "menu": "unread", + "csrf": ctx.GetCsrfToken(), + }) +} diff --git a/server/ui/controller/user.go b/server/ui/controller/user.go new file mode 100644 index 00000000..c69b0f8d --- /dev/null +++ b/server/ui/controller/user.go @@ -0,0 +1,231 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package controller + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "github.com/miniflux/miniflux2/server/core" + "github.com/miniflux/miniflux2/server/ui/form" + "log" +) + +func (c *Controller) ShowUsers(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + if !user.IsAdmin { + response.Html().Forbidden() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + users, err := c.store.GetUsers() + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("users", args.Merge(tplParams{ + "users": users, + "menu": "settings", + })) +} + +func (c *Controller) CreateUser(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + if !user.IsAdmin { + response.Html().Forbidden() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + response.Html().Render("create_user", args.Merge(tplParams{ + "menu": "settings", + "form": &form.UserForm{}, + })) +} + +func (c *Controller) SaveUser(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + if !user.IsAdmin { + response.Html().Forbidden() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + userForm := form.NewUserForm(request.GetRequest()) + if err := userForm.ValidateCreation(); err != nil { + response.Html().Render("create_user", args.Merge(tplParams{ + "menu": "settings", + "form": userForm, + "errorMessage": err.Error(), + })) + return + } + + if c.store.UserExists(userForm.Username) { + response.Html().Render("create_user", args.Merge(tplParams{ + "menu": "settings", + "form": userForm, + "errorMessage": "This user already exists.", + })) + return + } + + newUser := userForm.ToUser() + if err := c.store.CreateUser(newUser); err != nil { + log.Println(err) + response.Html().Render("edit_user", args.Merge(tplParams{ + "menu": "settings", + "form": userForm, + "errorMessage": "Unable to create this user.", + })) + return + } + + response.Redirect(ctx.GetRoute("users")) +} + +func (c *Controller) EditUser(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + if !user.IsAdmin { + response.Html().Forbidden() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + selectedUser, err := c.getUserFromURL(ctx, request, response) + if err != nil { + return + } + + response.Html().Render("edit_user", args.Merge(tplParams{ + "menu": "settings", + "selected_user": selectedUser, + "form": &form.UserForm{ + Username: selectedUser.Username, + IsAdmin: selectedUser.IsAdmin, + }, + })) +} + +func (c *Controller) UpdateUser(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + + if !user.IsAdmin { + response.Html().Forbidden() + return + } + + args, err := c.getCommonTemplateArgs(ctx) + if err != nil { + response.Html().ServerError(err) + return + } + + selectedUser, err := c.getUserFromURL(ctx, request, response) + if err != nil { + return + } + + userForm := form.NewUserForm(request.GetRequest()) + if err := userForm.ValidateModification(); err != nil { + response.Html().Render("edit_user", args.Merge(tplParams{ + "menu": "settings", + "selected_user": selectedUser, + "form": userForm, + "errorMessage": err.Error(), + })) + return + } + + if c.store.AnotherUserExists(selectedUser.ID, userForm.Username) { + response.Html().Render("edit_user", args.Merge(tplParams{ + "menu": "settings", + "selected_user": selectedUser, + "form": userForm, + "errorMessage": "This user already exists.", + })) + return + } + + userForm.Merge(selectedUser) + if err := c.store.UpdateUser(selectedUser); err != nil { + log.Println(err) + response.Html().Render("edit_user", args.Merge(tplParams{ + "menu": "settings", + "selected_user": selectedUser, + "form": userForm, + "errorMessage": "Unable to update this user.", + })) + return + } + + response.Redirect(ctx.GetRoute("users")) +} + +func (c *Controller) RemoveUser(ctx *core.Context, request *core.Request, response *core.Response) { + user := ctx.GetLoggedUser() + if !user.IsAdmin { + response.Html().Forbidden() + return + } + + selectedUser, err := c.getUserFromURL(ctx, request, response) + if err != nil { + return + } + + if err := c.store.RemoveUser(selectedUser.ID); err != nil { + response.Html().ServerError(err) + return + } + + response.Redirect(ctx.GetRoute("users")) +} + +func (c *Controller) getUserFromURL(ctx *core.Context, request *core.Request, response *core.Response) (*model.User, error) { + userID, err := request.GetIntegerParam("userID") + if err != nil { + response.Html().BadRequest(err) + return nil, err + } + + user, err := c.store.GetUserById(userID) + if err != nil { + response.Html().ServerError(err) + return nil, err + } + + if user == nil { + response.Html().NotFound() + return nil, errors.New("User not found") + } + + return user, nil +} diff --git a/server/ui/filter/image_proxy_filter.go b/server/ui/filter/image_proxy_filter.go new file mode 100644 index 00000000..71da8691 --- /dev/null +++ b/server/ui/filter/image_proxy_filter.go @@ -0,0 +1,35 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package filter + +import ( + "encoding/base64" + "github.com/miniflux/miniflux2/reader/url" + "github.com/miniflux/miniflux2/server/route" + "strings" + + "github.com/PuerkitoBio/goquery" + "github.com/gorilla/mux" +) + +// ImageProxyFilter rewrites image tag URLs without HTTPS to local proxy URL +func ImageProxyFilter(r *mux.Router, data string) string { + doc, err := goquery.NewDocumentFromReader(strings.NewReader(data)) + if err != nil { + return data + } + + doc.Find("img").Each(func(i int, img *goquery.Selection) { + if srcAttr, ok := img.Attr("src"); ok { + if !url.IsHTTPS(srcAttr) { + path := route.GetRoute(r, "proxy", "encodedURL", base64.StdEncoding.EncodeToString([]byte(srcAttr))) + img.SetAttr("src", path) + } + } + }) + + output, _ := doc.Find("body").First().Html() + return output +} diff --git a/server/ui/filter/image_proxy_filter_test.go b/server/ui/filter/image_proxy_filter_test.go new file mode 100644 index 00000000..992516eb --- /dev/null +++ b/server/ui/filter/image_proxy_filter_test.go @@ -0,0 +1,38 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package filter + +import ( + "net/http" + "testing" + + "github.com/gorilla/mux" +) + +func TestProxyFilterWithHttp(t *testing.T) { + r := mux.NewRouter() + r.HandleFunc("/proxy/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy") + + input := `

Test

` + output := ImageProxyFilter(r, input) + expected := `

Test

` + + if expected != output { + t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) + } +} + +func TestProxyFilterWithHttps(t *testing.T) { + r := mux.NewRouter() + r.HandleFunc("/proxy/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy") + + input := `

Test

` + output := ImageProxyFilter(r, input) + expected := `

Test

` + + if expected != output { + t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) + } +} diff --git a/server/ui/form/auth.go b/server/ui/form/auth.go new file mode 100644 index 00000000..3cfc2171 --- /dev/null +++ b/server/ui/form/auth.go @@ -0,0 +1,30 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package form + +import ( + "errors" + "net/http" +) + +type AuthForm struct { + Username string + Password string +} + +func (a AuthForm) Validate() error { + if a.Username == "" || a.Password == "" { + return errors.New("All fields are mandatory.") + } + + return nil +} + +func NewAuthForm(r *http.Request) *AuthForm { + return &AuthForm{ + Username: r.FormValue("username"), + Password: r.FormValue("password"), + } +} diff --git a/server/ui/form/category.go b/server/ui/form/category.go new file mode 100644 index 00000000..510d1b43 --- /dev/null +++ b/server/ui/form/category.go @@ -0,0 +1,34 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package form + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "net/http" +) + +// CategoryForm represents a feed form in the UI +type CategoryForm struct { + Title string +} + +func (c CategoryForm) Validate() error { + if c.Title == "" { + return errors.New("The title is mandatory.") + } + return nil +} + +func (c CategoryForm) Merge(category *model.Category) *model.Category { + category.Title = c.Title + return category +} + +func NewCategoryForm(r *http.Request) *CategoryForm { + return &CategoryForm{ + Title: r.FormValue("title"), + } +} diff --git a/server/ui/form/feed.go b/server/ui/form/feed.go new file mode 100644 index 00000000..e21e6ca0 --- /dev/null +++ b/server/ui/form/feed.go @@ -0,0 +1,53 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package form + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "net/http" + "strconv" +) + +// FeedForm represents a feed form in the UI +type FeedForm struct { + FeedURL string + SiteURL string + Title string + CategoryID int64 +} + +// ValidateModification validates FeedForm fields +func (f FeedForm) ValidateModification() error { + if f.FeedURL == "" || f.SiteURL == "" || f.Title == "" || f.CategoryID == 0 { + return errors.New("All fields are mandatory.") + } + return nil +} + +func (f FeedForm) Merge(feed *model.Feed) *model.Feed { + feed.Category.ID = f.CategoryID + feed.Title = f.Title + feed.SiteURL = f.SiteURL + feed.FeedURL = f.FeedURL + feed.ParsingErrorCount = 0 + feed.ParsingErrorMsg = "" + return feed +} + +// NewFeedForm parses the HTTP request and returns a FeedForm +func NewFeedForm(r *http.Request) *FeedForm { + categoryID, err := strconv.Atoi(r.FormValue("category_id")) + if err != nil { + categoryID = 0 + } + + return &FeedForm{ + FeedURL: r.FormValue("feed_url"), + SiteURL: r.FormValue("site_url"), + Title: r.FormValue("title"), + CategoryID: int64(categoryID), + } +} diff --git a/server/ui/form/settings.go b/server/ui/form/settings.go new file mode 100644 index 00000000..1e40b97d --- /dev/null +++ b/server/ui/form/settings.go @@ -0,0 +1,62 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package form + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "net/http" +) + +type SettingsForm struct { + Username string + Password string + Confirmation string + Theme string + Language string + Timezone string +} + +func (s *SettingsForm) Merge(user *model.User) *model.User { + user.Username = s.Username + user.Theme = s.Theme + user.Language = s.Language + user.Timezone = s.Timezone + + if s.Password != "" { + user.Password = s.Password + } + + return user +} + +func (s *SettingsForm) Validate() error { + if s.Username == "" || s.Theme == "" || s.Language == "" || s.Timezone == "" { + return errors.New("The username, theme, language and timezone fields are mandatory.") + } + + if s.Password != "" { + if s.Password != s.Confirmation { + return errors.New("Passwords are not the same.") + } + + if len(s.Password) < 6 { + return errors.New("You must use at least 6 characters") + } + } + + return nil +} + +func NewSettingsForm(r *http.Request) *SettingsForm { + return &SettingsForm{ + Username: r.FormValue("username"), + Password: r.FormValue("password"), + Confirmation: r.FormValue("confirmation"), + Theme: r.FormValue("theme"), + Language: r.FormValue("language"), + Timezone: r.FormValue("timezone"), + } +} diff --git a/server/ui/form/subscription.go b/server/ui/form/subscription.go new file mode 100644 index 00000000..6696b22f --- /dev/null +++ b/server/ui/form/subscription.go @@ -0,0 +1,36 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package form + +import ( + "errors" + "net/http" + "strconv" +) + +type SubscriptionForm struct { + URL string + CategoryID int64 +} + +func (s *SubscriptionForm) Validate() error { + if s.URL == "" || s.CategoryID == 0 { + return errors.New("The URL and the category are mandatory.") + } + + return nil +} + +func NewSubscriptionForm(r *http.Request) *SubscriptionForm { + categoryID, err := strconv.Atoi(r.FormValue("category_id")) + if err != nil { + categoryID = 0 + } + + return &SubscriptionForm{ + URL: r.FormValue("url"), + CategoryID: int64(categoryID), + } +} diff --git a/server/ui/form/user.go b/server/ui/form/user.go new file mode 100644 index 00000000..1197b484 --- /dev/null +++ b/server/ui/form/user.go @@ -0,0 +1,80 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package form + +import ( + "errors" + "github.com/miniflux/miniflux2/model" + "net/http" +) + +type UserForm struct { + Username string + Password string + Confirmation string + IsAdmin bool +} + +func (u UserForm) ValidateCreation() error { + if u.Username == "" || u.Password == "" || u.Confirmation == "" { + return errors.New("All fields are mandatory.") + } + + if u.Password != u.Confirmation { + return errors.New("Passwords are not the same.") + } + + if len(u.Password) < 6 { + return errors.New("You must use at least 6 characters.") + } + + return nil +} + +func (u UserForm) ValidateModification() error { + if u.Username == "" { + return errors.New("The username is mandatory.") + } + + if u.Password != "" { + if u.Password != u.Confirmation { + return errors.New("Passwords are not the same.") + } + + if len(u.Password) < 6 { + return errors.New("You must use at least 6 characters.") + } + } + + return nil +} + +func (u UserForm) ToUser() *model.User { + return &model.User{ + Username: u.Username, + Password: u.Password, + IsAdmin: u.IsAdmin, + } +} + +func (u UserForm) Merge(user *model.User) *model.User { + user.Username = u.Username + user.IsAdmin = u.IsAdmin + + if u.Password != "" { + user.Password = u.Password + } + + return user +} + +func NewUserForm(r *http.Request) *UserForm { + return &UserForm{ + Username: r.FormValue("username"), + Password: r.FormValue("password"), + Confirmation: r.FormValue("confirmation"), + IsAdmin: r.FormValue("is_admin") == "1", + } +} diff --git a/server/ui/payload/payload.go b/server/ui/payload/payload.go new file mode 100644 index 00000000..b2fef954 --- /dev/null +++ b/server/ui/payload/payload.go @@ -0,0 +1,31 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package payload + +import ( + "encoding/json" + "fmt" + "github.com/miniflux/miniflux2/model" + "io" +) + +func DecodeEntryStatusPayload(data io.Reader) (entryIDs []int64, status string, err error) { + type payload struct { + EntryIDs []int64 `json:"entry_ids"` + Status string `json:"status"` + } + + var p payload + decoder := json.NewDecoder(data) + if err = decoder.Decode(&p); err != nil { + return nil, "", fmt.Errorf("invalid JSON payload: %v", err) + } + + if err := model.ValidateEntryStatus(p.Status); err != nil { + return nil, "", err + } + + return p.EntryIDs, p.Status, nil +} diff --git a/sql/schema_version_1.sql b/sql/schema_version_1.sql new file mode 100644 index 00000000..e32a38c7 --- /dev/null +++ b/sql/schema_version_1.sql @@ -0,0 +1,103 @@ +create table schema_version ( + version text not null +); + +create table users ( + id serial not null, + username text not null unique, + password text, + is_admin bool default 'f', + language text default 'en_US', + timezone text default 'UTC', + theme text default 'default', + last_login_at timestamp with time zone, + primary key (id) +); + +create table sessions ( + id serial not null, + user_id int not null, + token text not null unique, + created_at timestamp with time zone default now(), + user_agent text, + ip text, + primary key (id), + unique (user_id, token), + foreign key (user_id) references users(id) on delete cascade +); + +create table categories ( + id serial not null, + user_id int not null, + title text not null, + primary key (id), + unique (user_id, title), + foreign key (user_id) references users(id) on delete cascade +); + +create table feeds ( + id bigserial not null, + user_id int not null, + category_id int not null, + title text not null, + feed_url text not null, + site_url text not null, + checked_at timestamp with time zone default now(), + etag_header text, + last_modified_header text, + parsing_error_msg text default '', + parsing_error_count int default 0, + primary key (id), + unique (user_id, feed_url), + foreign key (user_id) references users(id) on delete cascade, + foreign key (category_id) references categories(id) on delete cascade +); + +create type entry_status as enum ('unread', 'read', 'removed'); + +create table entries ( + id bigserial not null, + user_id int not null, + feed_id bigint not null, + hash text not null, + published_at timestamp with time zone not null, + title text not null, + url text not null, + author text, + content text, + status entry_status default 'unread', + primary key (id), + unique (feed_id, hash), + foreign key (user_id) references users(id) on delete cascade, + foreign key (feed_id) references feeds(id) on delete cascade +); + +create index entries_feed_idx on entries using btree(feed_id); + +create table enclosures ( + id bigserial not null, + user_id int not null, + entry_id bigint not null, + url text not null, + size int default 0, + mime_type text default '', + primary key (id), + foreign key (user_id) references users(id) on delete cascade, + foreign key (entry_id) references entries(id) on delete cascade +); + +create table icons ( + id bigserial not null, + hash text not null unique, + mime_type text not null, + content bytea not null, + primary key (id) +); + +create table feed_icons ( + feed_id bigint not null, + icon_id bigint not null, + primary key(feed_id, icon_id), + foreign key (feed_id) references feeds(id) on delete cascade, + foreign key (icon_id) references icons(id) on delete cascade +); diff --git a/sql/sql.go b/sql/sql.go new file mode 100644 index 00000000..6998b1b7 --- /dev/null +++ b/sql/sql.go @@ -0,0 +1,115 @@ +// Code generated by go generate; DO NOT EDIT. +// 2017-11-19 22:01:21.921648993 -0800 PST m=+0.002482136 + +package sql + +var SqlMap = map[string]string{ + "schema_version_1": `create table schema_version ( + version text not null +); + +create table users ( + id serial not null, + username text not null unique, + password text, + is_admin bool default 'f', + language text default 'en_US', + timezone text default 'UTC', + theme text default 'default', + last_login_at timestamp with time zone, + primary key (id) +); + +create table sessions ( + id serial not null, + user_id int not null, + token text not null unique, + created_at timestamp with time zone default now(), + user_agent text, + ip text, + primary key (id), + unique (user_id, token), + foreign key (user_id) references users(id) on delete cascade +); + +create table categories ( + id serial not null, + user_id int not null, + title text not null, + primary key (id), + unique (user_id, title), + foreign key (user_id) references users(id) on delete cascade +); + +create table feeds ( + id bigserial not null, + user_id int not null, + category_id int not null, + title text not null, + feed_url text not null, + site_url text not null, + checked_at timestamp with time zone default now(), + etag_header text, + last_modified_header text, + parsing_error_msg text default '', + parsing_error_count int default 0, + primary key (id), + unique (user_id, feed_url), + foreign key (user_id) references users(id) on delete cascade, + foreign key (category_id) references categories(id) on delete cascade +); + +create type entry_status as enum ('unread', 'read', 'removed'); + +create table entries ( + id bigserial not null, + user_id int not null, + feed_id bigint not null, + hash text not null, + published_at timestamp with time zone not null, + title text not null, + url text not null, + author text, + content text, + status entry_status default 'unread', + primary key (id), + unique (feed_id, hash), + foreign key (user_id) references users(id) on delete cascade, + foreign key (feed_id) references feeds(id) on delete cascade +); + +create index entries_feed_idx on entries using btree(feed_id); + +create table enclosures ( + id bigserial not null, + user_id int not null, + entry_id bigint not null, + url text not null, + size int default 0, + mime_type text default '', + primary key (id), + foreign key (user_id) references users(id) on delete cascade, + foreign key (entry_id) references entries(id) on delete cascade +); + +create table icons ( + id bigserial not null, + hash text not null unique, + mime_type text not null, + content bytea not null, + primary key (id) +); + +create table feed_icons ( + feed_id bigint not null, + icon_id bigint not null, + primary key(feed_id, icon_id), + foreign key (feed_id) references feeds(id) on delete cascade, + foreign key (icon_id) references icons(id) on delete cascade +); +`, +} + +var SqlMapChecksums = map[string]string{ + "schema_version_1": "cb85ca7dd97a6e1348e00b65ea004253a7165bed9a772746613276e47ef93213", +} diff --git a/storage/category.go b/storage/category.go new file mode 100644 index 00000000..3d08c4da --- /dev/null +++ b/storage/category.go @@ -0,0 +1,178 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "database/sql" + "errors" + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "time" +) + +func (s *Storage) CategoryExists(userID, categoryID int64) bool { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:CategoryExists] userID=%d, categoryID=%d", userID, categoryID)) + + var result int + query := `SELECT count(*) as c FROM categories WHERE user_id=$1 AND id=$2` + s.db.QueryRow(query, userID, categoryID).Scan(&result) + return result >= 1 +} + +func (s *Storage) GetCategory(userID, categoryID int64) (*model.Category, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetCategory] userID=%d, getCategory=%d", userID, categoryID)) + var category model.Category + + query := `SELECT id, user_id, title FROM categories WHERE user_id=$1 AND id=$2` + err := s.db.QueryRow(query, userID, categoryID).Scan(&category.ID, &category.UserID, &category.Title) + if err == sql.ErrNoRows { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("Unable to fetch category: %v", err) + } + + return &category, nil +} + +func (s *Storage) GetFirstCategory(userID int64) (*model.Category, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetFirstCategory] userID=%d", userID)) + var category model.Category + + query := `SELECT id, user_id, title FROM categories WHERE user_id=$1 ORDER BY title ASC` + err := s.db.QueryRow(query, userID).Scan(&category.ID, &category.UserID, &category.Title) + if err == sql.ErrNoRows { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("Unable to fetch category: %v", err) + } + + return &category, nil +} + +func (s *Storage) GetCategoryByTitle(userID int64, title string) (*model.Category, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetCategoryByTitle] userID=%d, title=%s", userID, title)) + var category model.Category + + query := `SELECT id, user_id, title FROM categories WHERE user_id=$1 AND title=$2` + err := s.db.QueryRow(query, userID, title).Scan(&category.ID, &category.UserID, &category.Title) + if err == sql.ErrNoRows { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("Unable to fetch category: %v", err) + } + + return &category, nil +} + +func (s *Storage) GetCategories(userID int64) (model.Categories, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetCategories] userID=%d", userID)) + + query := `SELECT id, user_id, title FROM categories WHERE user_id=$1` + rows, err := s.db.Query(query, userID) + if err != nil { + return nil, fmt.Errorf("Unable to fetch categories: %v", err) + } + defer rows.Close() + + categories := make(model.Categories, 0) + for rows.Next() { + var category model.Category + if err := rows.Scan(&category.ID, &category.UserID, &category.Title); err != nil { + return nil, fmt.Errorf("Unable to fetch categories row: %v", err) + } + + categories = append(categories, &category) + } + + return categories, nil +} + +func (s *Storage) GetCategoriesWithFeedCount(userID int64) (model.Categories, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetCategoriesWithFeedCount] userID=%d", userID)) + query := `SELECT + c.id, c.user_id, c.title, + (SELECT count(*) FROM feeds WHERE feeds.category_id=c.id) AS count + FROM categories c WHERE user_id=$1` + + rows, err := s.db.Query(query, userID) + if err != nil { + return nil, fmt.Errorf("Unable to fetch categories: %v", err) + } + defer rows.Close() + + categories := make(model.Categories, 0) + for rows.Next() { + var category model.Category + if err := rows.Scan(&category.ID, &category.UserID, &category.Title, &category.FeedCount); err != nil { + return nil, fmt.Errorf("Unable to fetch categories row: %v", err) + } + + categories = append(categories, &category) + } + + return categories, nil +} + +func (s *Storage) CreateCategory(category *model.Category) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:CreateCategory] title=%s", category.Title)) + + query := ` + INSERT INTO categories + (user_id, title) + VALUES + ($1, $2) + RETURNING id + ` + err := s.db.QueryRow( + query, + category.UserID, + category.Title, + ).Scan(&category.ID) + + if err != nil { + return fmt.Errorf("Unable to create category: %v", err) + } + + return nil +} + +func (s *Storage) UpdateCategory(category *model.Category) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:UpdateCategory] categoryID=%d", category.ID)) + + query := `UPDATE categories SET title=$1 WHERE id=$2 AND user_id=$3` + _, err := s.db.Exec( + query, + category.Title, + category.ID, + category.UserID, + ) + + if err != nil { + return fmt.Errorf("Unable to update category: %v", err) + } + + return nil +} + +func (s *Storage) RemoveCategory(userID, categoryID int64) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:RemoveCategory] userID=%d, categoryID=%d", userID, categoryID)) + + result, err := s.db.Exec("DELETE FROM categories WHERE id = $1 AND user_id = $2", categoryID, userID) + if err != nil { + return fmt.Errorf("Unable to remove this category: %v", err) + } + + count, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("Unable to remove this category: %v", err) + } + + if count == 0 { + return errors.New("no category has been removed") + } + + return nil +} diff --git a/storage/enclosure.go b/storage/enclosure.go new file mode 100644 index 00000000..ac85cb74 --- /dev/null +++ b/storage/enclosure.go @@ -0,0 +1,68 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "fmt" + "github.com/miniflux/miniflux2/model" +) + +func (s *Storage) GetEnclosures(entryID int64) (model.EnclosureList, error) { + query := `SELECT + id, user_id, entry_id, url, size, mime_type + FROM enclosures + WHERE entry_id = $1 ORDER BY id ASC` + + rows, err := s.db.Query(query, entryID) + if err != nil { + return nil, fmt.Errorf("Unable to get enclosures: %v", err) + } + defer rows.Close() + + enclosures := make(model.EnclosureList, 0) + for rows.Next() { + var enclosure model.Enclosure + err := rows.Scan( + &enclosure.ID, + &enclosure.UserID, + &enclosure.EntryID, + &enclosure.URL, + &enclosure.Size, + &enclosure.MimeType, + ) + + if err != nil { + return nil, fmt.Errorf("Unable to fetch enclosure row: %v", err) + } + + enclosures = append(enclosures, &enclosure) + } + + return enclosures, nil +} + +func (s *Storage) CreateEnclosure(enclosure *model.Enclosure) error { + query := ` + INSERT INTO enclosures + (url, size, mime_type, entry_id, user_id) + VALUES + ($1, $2, $3, $4, $5) + RETURNING id + ` + err := s.db.QueryRow( + query, + enclosure.URL, + enclosure.Size, + enclosure.MimeType, + enclosure.EntryID, + enclosure.UserID, + ).Scan(&enclosure.ID) + + if err != nil { + return fmt.Errorf("Unable to create enclosure: %v", err) + } + + return nil +} diff --git a/storage/entry.go b/storage/entry.go new file mode 100644 index 00000000..84cfb0f2 --- /dev/null +++ b/storage/entry.go @@ -0,0 +1,124 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "errors" + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "time" + + "github.com/lib/pq" +) + +func (s *Storage) GetEntryQueryBuilder(userID int64, timezone string) *EntryQueryBuilder { + return NewEntryQueryBuilder(s, userID, timezone) +} + +func (s *Storage) CreateEntry(entry *model.Entry) error { + query := ` + INSERT INTO entries + (title, hash, url, published_at, content, author, user_id, feed_id) + VALUES + ($1, $2, $3, $4, $5, $6, $7, $8) + RETURNING id + ` + err := s.db.QueryRow( + query, + entry.Title, + entry.Hash, + entry.URL, + entry.Date, + entry.Content, + entry.Author, + entry.UserID, + entry.FeedID, + ).Scan(&entry.ID) + + if err != nil { + return fmt.Errorf("Unable to create entry: %v", err) + } + + entry.Status = "unread" + for i := 0; i < len(entry.Enclosures); i++ { + entry.Enclosures[i].EntryID = entry.ID + entry.Enclosures[i].UserID = entry.UserID + err := s.CreateEnclosure(entry.Enclosures[i]) + if err != nil { + return err + } + } + + return nil +} + +func (s *Storage) UpdateEntry(entry *model.Entry) error { + query := ` + UPDATE entries SET + title=$1, url=$2, published_at=$3, content=$4, author=$5 + WHERE user_id=$6 AND feed_id=$7 AND hash=$8 + ` + _, err := s.db.Exec( + query, + entry.Title, + entry.URL, + entry.Date, + entry.Content, + entry.Author, + entry.UserID, + entry.FeedID, + entry.Hash, + ) + + return err +} + +func (s *Storage) EntryExists(entry *model.Entry) bool { + var result int + query := `SELECT count(*) as c FROM entries WHERE user_id=$1 AND feed_id=$2 AND hash=$3` + s.db.QueryRow(query, entry.UserID, entry.FeedID, entry.Hash).Scan(&result) + return result >= 1 +} + +func (s *Storage) UpdateEntries(userID, feedID int64, entries model.Entries) (err error) { + for _, entry := range entries { + entry.UserID = userID + entry.FeedID = feedID + + if s.EntryExists(entry) { + err = s.UpdateEntry(entry) + } else { + err = s.CreateEntry(entry) + } + + if err != nil { + return err + } + } + + return nil +} + +func (s *Storage) SetEntriesStatus(userID int64, entryIDs []int64, status string) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:SetEntriesStatus] userID=%d, entryIDs=%v, status=%s", userID, entryIDs, status)) + + query := `UPDATE entries SET status=$1 WHERE user_id=$2 AND id=ANY($3)` + result, err := s.db.Exec(query, status, userID, pq.Array(entryIDs)) + if err != nil { + return fmt.Errorf("Unable to update entry status: %v", err) + } + + count, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("Unable to update this entry: %v", err) + } + + if count == 0 { + return errors.New("Nothing has been updated") + } + + return nil +} diff --git a/storage/entry_query_builder.go b/storage/entry_query_builder.go new file mode 100644 index 00000000..0c210c30 --- /dev/null +++ b/storage/entry_query_builder.go @@ -0,0 +1,268 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "strings" + "time" +) + +type EntryQueryBuilder struct { + store *Storage + feedID int64 + userID int64 + timezone string + categoryID int64 + status string + order string + direction string + limit int + offset int + entryID int64 + gtEntryID int64 + ltEntryID int64 + conditions []string + args []interface{} +} + +func (e *EntryQueryBuilder) WithCondition(column, operator string, value interface{}) *EntryQueryBuilder { + e.args = append(e.args, value) + e.conditions = append(e.conditions, fmt.Sprintf("%s %s $%d", column, operator, len(e.args)+1)) + return e +} + +func (e *EntryQueryBuilder) WithEntryID(entryID int64) *EntryQueryBuilder { + e.entryID = entryID + return e +} + +func (e *EntryQueryBuilder) WithEntryIDGreaterThan(entryID int64) *EntryQueryBuilder { + e.gtEntryID = entryID + return e +} + +func (e *EntryQueryBuilder) WithEntryIDLowerThan(entryID int64) *EntryQueryBuilder { + e.ltEntryID = entryID + return e +} + +func (e *EntryQueryBuilder) WithFeedID(feedID int64) *EntryQueryBuilder { + e.feedID = feedID + return e +} + +func (e *EntryQueryBuilder) WithCategoryID(categoryID int64) *EntryQueryBuilder { + e.categoryID = categoryID + return e +} + +func (e *EntryQueryBuilder) WithStatus(status string) *EntryQueryBuilder { + e.status = status + return e +} + +func (e *EntryQueryBuilder) WithOrder(order string) *EntryQueryBuilder { + e.order = order + return e +} + +func (e *EntryQueryBuilder) WithDirection(direction string) *EntryQueryBuilder { + e.direction = direction + return e +} + +func (e *EntryQueryBuilder) WithLimit(limit int) *EntryQueryBuilder { + e.limit = limit + return e +} + +func (e *EntryQueryBuilder) WithOffset(offset int) *EntryQueryBuilder { + e.offset = offset + return e +} + +func (e *EntryQueryBuilder) CountEntries() (count int, err error) { + defer helper.ExecutionTime( + time.Now(), + fmt.Sprintf("[EntryQueryBuilder:CountEntries] userID=%d, feedID=%d, status=%s", e.userID, e.feedID, e.status), + ) + + query := `SELECT count(*) FROM entries e LEFT JOIN feeds f ON f.id=e.feed_id WHERE %s` + args, condition := e.buildCondition() + err = e.store.db.QueryRow(fmt.Sprintf(query, condition), args...).Scan(&count) + if err != nil { + return 0, fmt.Errorf("unable to count entries: %v", err) + } + + return count, nil +} + +func (e *EntryQueryBuilder) GetEntry() (*model.Entry, error) { + e.limit = 1 + entries, err := e.GetEntries() + if err != nil { + return nil, err + } + + if len(entries) != 1 { + return nil, nil + } + + entries[0].Enclosures, err = e.store.GetEnclosures(entries[0].ID) + if err != nil { + return nil, err + } + + return entries[0], nil +} + +func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) { + debugStr := "[EntryQueryBuilder:GetEntries] userID=%d, feedID=%d, categoryID=%d, status=%s, order=%s, direction=%s, offset=%d, limit=%d" + defer helper.ExecutionTime(time.Now(), fmt.Sprintf(debugStr, e.userID, e.feedID, e.categoryID, e.status, e.order, e.direction, e.offset, e.limit)) + + query := ` + SELECT + e.id, e.user_id, e.feed_id, e.hash, e.published_at at time zone '%s', e.title, e.url, e.author, e.content, e.status, + f.title as feed_title, f.feed_url, f.site_url, f.checked_at, + f.category_id, c.title as category_title, + fi.icon_id + FROM entries e + LEFT JOIN feeds f ON f.id=e.feed_id + LEFT JOIN categories c ON c.id=f.category_id + LEFT JOIN feed_icons fi ON fi.feed_id=f.id + WHERE %s %s + ` + + args, conditions := e.buildCondition() + query = fmt.Sprintf(query, e.timezone, conditions, e.buildSorting()) + // log.Println(query) + + rows, err := e.store.db.Query(query, args...) + if err != nil { + return nil, fmt.Errorf("unable to get entries: %v", err) + } + defer rows.Close() + + entries := make(model.Entries, 0) + for rows.Next() { + var entry model.Entry + var iconID interface{} + + entry.Feed = &model.Feed{UserID: e.userID} + entry.Feed.Category = &model.Category{UserID: e.userID} + entry.Feed.Icon = &model.FeedIcon{} + + err := rows.Scan( + &entry.ID, + &entry.UserID, + &entry.FeedID, + &entry.Hash, + &entry.Date, + &entry.Title, + &entry.URL, + &entry.Author, + &entry.Content, + &entry.Status, + &entry.Feed.Title, + &entry.Feed.FeedURL, + &entry.Feed.SiteURL, + &entry.Feed.CheckedAt, + &entry.Feed.Category.ID, + &entry.Feed.Category.Title, + &iconID, + ) + + if err != nil { + return nil, fmt.Errorf("Unable to fetch entry row: %v", err) + } + + if iconID == nil { + entry.Feed.Icon.IconID = 0 + } else { + entry.Feed.Icon.IconID = iconID.(int64) + } + + entry.Feed.ID = entry.FeedID + entry.Feed.Icon.FeedID = entry.FeedID + entries = append(entries, &entry) + } + + return entries, nil +} + +func (e *EntryQueryBuilder) buildCondition() ([]interface{}, string) { + args := []interface{}{e.userID} + conditions := []string{"e.user_id = $1"} + + if len(e.conditions) > 0 { + conditions = append(conditions, e.conditions...) + args = append(args, e.args...) + } + + if e.categoryID != 0 { + conditions = append(conditions, fmt.Sprintf("f.category_id=$%d", len(args)+1)) + args = append(args, e.categoryID) + } + + if e.feedID != 0 { + conditions = append(conditions, fmt.Sprintf("e.feed_id=$%d", len(args)+1)) + args = append(args, e.feedID) + } + + if e.entryID != 0 { + conditions = append(conditions, fmt.Sprintf("e.id=$%d", len(args)+1)) + args = append(args, e.entryID) + } + + if e.gtEntryID != 0 { + conditions = append(conditions, fmt.Sprintf("e.id > $%d", len(args)+1)) + args = append(args, e.gtEntryID) + } + + if e.ltEntryID != 0 { + conditions = append(conditions, fmt.Sprintf("e.id < $%d", len(args)+1)) + args = append(args, e.ltEntryID) + } + + if e.status != "" { + conditions = append(conditions, fmt.Sprintf("e.status=$%d", len(args)+1)) + args = append(args, e.status) + } + + return args, strings.Join(conditions, " AND ") +} + +func (e *EntryQueryBuilder) buildSorting() string { + var queries []string + + if e.order != "" { + queries = append(queries, fmt.Sprintf(`ORDER BY "%s"`, e.order)) + } + + if e.direction != "" { + queries = append(queries, fmt.Sprintf(`%s`, e.direction)) + } + + if e.limit != 0 { + queries = append(queries, fmt.Sprintf(`LIMIT %d`, e.limit)) + } + + if e.offset != 0 { + queries = append(queries, fmt.Sprintf(`OFFSET %d`, e.offset)) + } + + return strings.Join(queries, " ") +} + +func NewEntryQueryBuilder(store *Storage, userID int64, timezone string) *EntryQueryBuilder { + return &EntryQueryBuilder{ + store: store, + userID: userID, + timezone: timezone, + } +} diff --git a/storage/feed.go b/storage/feed.go new file mode 100644 index 00000000..ec085807 --- /dev/null +++ b/storage/feed.go @@ -0,0 +1,223 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "database/sql" + "errors" + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "time" +) + +func (s *Storage) FeedExists(userID, feedID int64) bool { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:FeedExists] userID=%d, feedID=%d", userID, feedID)) + + var result int + query := `SELECT count(*) as c FROM feeds WHERE user_id=$1 AND id=$2` + s.db.QueryRow(query, userID, feedID).Scan(&result) + return result >= 1 +} + +func (s *Storage) FeedURLExists(userID int64, feedURL string) bool { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:FeedURLExists] userID=%d, feedURL=%s", userID, feedURL)) + + var result int + query := `SELECT count(*) as c FROM feeds WHERE user_id=$1 AND feed_url=$2` + s.db.QueryRow(query, userID, feedURL).Scan(&result) + return result >= 1 +} + +func (s *Storage) GetFeeds(userID int64) (model.Feeds, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetFeeds] userID=%d", userID)) + + feeds := make(model.Feeds, 0) + query := `SELECT + f.id, f.feed_url, f.site_url, f.title, f.etag_header, f.last_modified_header, + f.user_id, f.checked_at, f.parsing_error_count, f.parsing_error_msg, + f.category_id, c.title as category_title, + fi.icon_id + FROM feeds f + LEFT JOIN categories c ON c.id=f.category_id + LEFT JOIN feed_icons fi ON fi.feed_id=f.id + WHERE f.user_id=$1 + ORDER BY f.id ASC` + + rows, err := s.db.Query(query, userID) + if err != nil { + return nil, fmt.Errorf("Unable to fetch feeds: %v", err) + } + defer rows.Close() + + for rows.Next() { + var feed model.Feed + var iconID, errorMsg interface{} + feed.Category = &model.Category{UserID: userID} + feed.Icon = &model.FeedIcon{} + + err := rows.Scan( + &feed.ID, + &feed.FeedURL, + &feed.SiteURL, + &feed.Title, + &feed.EtagHeader, + &feed.LastModifiedHeader, + &feed.UserID, + &feed.CheckedAt, + &feed.ParsingErrorCount, + &errorMsg, + &feed.Category.ID, + &feed.Category.Title, + &iconID, + ) + + if err != nil { + return nil, fmt.Errorf("Unable to fetch feeds row: %v", err) + } + + if iconID == nil { + feed.Icon.IconID = 0 + } else { + feed.Icon.IconID = iconID.(int64) + } + + if errorMsg == nil { + feed.ParsingErrorMsg = "" + } else { + feed.ParsingErrorMsg = errorMsg.(string) + } + + feed.Icon.FeedID = feed.ID + feeds = append(feeds, &feed) + } + + return feeds, nil +} + +func (s *Storage) GetFeedById(userID, feedID int64) (*model.Feed, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetFeedById] feedID=%d", feedID)) + + var feed model.Feed + feed.Category = &model.Category{UserID: userID} + + query := ` + SELECT + f.id, f.feed_url, f.site_url, f.title, f.etag_header, f.last_modified_header, + f.user_id, f.checked_at, f.parsing_error_count, f.parsing_error_msg, + f.category_id, c.title as category_title + FROM feeds f + LEFT JOIN categories c ON c.id=f.category_id + WHERE f.user_id=$1 AND f.id=$2` + + err := s.db.QueryRow(query, userID, feedID).Scan( + &feed.ID, + &feed.FeedURL, + &feed.SiteURL, + &feed.Title, + &feed.EtagHeader, + &feed.LastModifiedHeader, + &feed.UserID, + &feed.CheckedAt, + &feed.ParsingErrorCount, + &feed.ParsingErrorMsg, + &feed.Category.ID, + &feed.Category.Title, + ) + + switch { + case err == sql.ErrNoRows: + return nil, nil + case err != nil: + return nil, fmt.Errorf("Unable to fetch feed: %v", err) + } + + return &feed, nil +} + +func (s *Storage) CreateFeed(feed *model.Feed) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:CreateFeed] feedURL=%s", feed.FeedURL)) + sql := ` + INSERT INTO feeds + (feed_url, site_url, title, category_id, user_id, etag_header, last_modified_header) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id + ` + + err := s.db.QueryRow( + sql, + feed.FeedURL, + feed.SiteURL, + feed.Title, + feed.Category.ID, + feed.UserID, + feed.EtagHeader, + feed.LastModifiedHeader, + ).Scan(&feed.ID) + + if err != nil { + return fmt.Errorf("Unable to create feed: %v", err) + } + + for i := 0; i < len(feed.Entries); i++ { + feed.Entries[i].FeedID = feed.ID + feed.Entries[i].UserID = feed.UserID + err := s.CreateEntry(feed.Entries[i]) + if err != nil { + return err + } + } + + return nil +} + +func (s *Storage) UpdateFeed(feed *model.Feed) (err error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:UpdateFeed] feedURL=%s", feed.FeedURL)) + + query := `UPDATE feeds SET + feed_url=$1, site_url=$2, title=$3, category_id=$4, etag_header=$5, last_modified_header=$6, checked_at=$7, + parsing_error_msg=$8, parsing_error_count=$9 + WHERE id=$10 AND user_id=$11` + + _, err = s.db.Exec(query, + feed.FeedURL, + feed.SiteURL, + feed.Title, + feed.Category.ID, + feed.EtagHeader, + feed.LastModifiedHeader, + feed.CheckedAt, + feed.ParsingErrorMsg, + feed.ParsingErrorCount, + feed.ID, + feed.UserID, + ) + + if err != nil { + return fmt.Errorf("Unable to update feed: %v", err) + } + + return nil +} + +func (s *Storage) RemoveFeed(userID, feedID int64) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:RemoveFeed] userID=%d, feedID=%d", userID, feedID)) + + result, err := s.db.Exec("DELETE FROM feeds WHERE id = $1 AND user_id = $2", feedID, userID) + if err != nil { + return fmt.Errorf("Unable to remove this feed: %v", err) + } + + count, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("Unable to remove this feed: %v", err) + } + + if count == 0 { + return errors.New("no feed has been removed") + } + + return nil +} diff --git a/storage/icon.go b/storage/icon.go new file mode 100644 index 00000000..993e4a7f --- /dev/null +++ b/storage/icon.go @@ -0,0 +1,106 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "database/sql" + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "strings" + "time" +) + +func (s *Storage) HasIcon(feedID int64) bool { + var result int + query := `SELECT count(*) as c FROM feed_icons WHERE feed_id=$1` + s.db.QueryRow(query, feedID).Scan(&result) + return result == 1 +} + +func (s *Storage) GetIconByID(iconID int64) (*model.Icon, error) { + defer helper.ExecutionTime(time.Now(), "[Storage:GetIconByID]") + + var icon model.Icon + query := `SELECT id, hash, mime_type, content FROM icons WHERE id=$1` + err := s.db.QueryRow(query, iconID).Scan(&icon.ID, &icon.Hash, &icon.MimeType, &icon.Content) + if err == sql.ErrNoRows { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("Unable to fetch icon by hash: %v", err) + } + + return &icon, nil +} + +func (s *Storage) GetIconByHash(icon *model.Icon) error { + defer helper.ExecutionTime(time.Now(), "[Storage:GetIconByHash]") + + err := s.db.QueryRow(`SELECT id FROM icons WHERE hash=$1`, icon.Hash).Scan(&icon.ID) + if err == sql.ErrNoRows { + return nil + } else if err != nil { + return fmt.Errorf("Unable to fetch icon by hash: %v", err) + } + + return nil +} + +func (s *Storage) CreateIcon(icon *model.Icon) error { + defer helper.ExecutionTime(time.Now(), "[Storage:CreateIcon]") + + query := ` + INSERT INTO icons + (hash, mime_type, content) + VALUES + ($1, $2, $3) + RETURNING id + ` + err := s.db.QueryRow( + query, + icon.Hash, + normalizeMimeType(icon.MimeType), + icon.Content, + ).Scan(&icon.ID) + + if err != nil { + return fmt.Errorf("Unable to create icon: %v", err) + } + + return nil +} + +func (s *Storage) CreateFeedIcon(feed *model.Feed, icon *model.Icon) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:CreateFeedIcon] feedID=%d", feed.ID)) + + err := s.GetIconByHash(icon) + if err != nil { + return err + } + + if icon.ID == 0 { + err := s.CreateIcon(icon) + if err != nil { + return err + } + } + + _, err = s.db.Exec(`INSERT INTO feed_icons (feed_id, icon_id) VALUES ($1, $2)`, feed.ID, icon.ID) + if err != nil { + return fmt.Errorf("Unable to create feed icon: %v", err) + } + + return nil +} + +func normalizeMimeType(mimeType string) string { + mimeType = strings.ToLower(mimeType) + switch mimeType { + case "image/png", "image/jpeg", "image/jpg", "image/webp", "image/svg+xml", "image/x-icon", "image/gif": + return mimeType + default: + return "image/x-icon" + } +} diff --git a/storage/job.go b/storage/job.go new file mode 100644 index 00000000..5383a5b5 --- /dev/null +++ b/storage/job.go @@ -0,0 +1,44 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "log" + "time" +) + +const maxParsingError = 3 + +func (s *Storage) GetJobs(batchSize int) []model.Job { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("storage.GetJobs[%d]", batchSize)) + + var jobs []model.Job + query := `SELECT + id, user_id + FROM feeds + WHERE parsing_error_count < $1 + ORDER BY checked_at ASC LIMIT %d` + + rows, err := s.db.Query(fmt.Sprintf(query, batchSize), maxParsingError) + if err != nil { + log.Println("Unable to fetch feed jobs:", err) + } + defer rows.Close() + + for rows.Next() { + var job model.Job + if err := rows.Scan(&job.FeedID, &job.UserID); err != nil { + log.Println("Unable to fetch feed job:", err) + break + } + + jobs = append(jobs, job) + } + + return jobs +} diff --git a/storage/migration.go b/storage/migration.go new file mode 100644 index 00000000..a41e812c --- /dev/null +++ b/storage/migration.go @@ -0,0 +1,53 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "fmt" + "github.com/miniflux/miniflux2/sql" + "log" + "strconv" +) + +const schemaVersion = 1 + +func (s *Storage) Migrate() { + var currentVersion int + s.db.QueryRow(`select version from schema_version`).Scan(¤tVersion) + + fmt.Println("Current schema version:", currentVersion) + fmt.Println("Latest schema version:", schemaVersion) + + for version := currentVersion + 1; version <= schemaVersion; version++ { + fmt.Println("Migrating to version:", version) + + tx, err := s.db.Begin() + if err != nil { + log.Fatalln(err) + } + + rawSQL := sql.SqlMap["schema_version_"+strconv.Itoa(version)] + // fmt.Println(rawSQL) + _, err = tx.Exec(rawSQL) + if err != nil { + tx.Rollback() + log.Fatalln(err) + } + + if _, err := tx.Exec(`delete from schema_version`); err != nil { + tx.Rollback() + log.Fatalln(err) + } + + if _, err := tx.Exec(`insert into schema_version (version) values($1)`, version); err != nil { + tx.Rollback() + log.Fatalln(err) + } + + if err := tx.Commit(); err != nil { + log.Fatalln(err) + } + } +} diff --git a/storage/session.go b/storage/session.go new file mode 100644 index 00000000..296711de --- /dev/null +++ b/storage/session.go @@ -0,0 +1,125 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "database/sql" + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" +) + +func (s *Storage) GetSessions(userID int64) (model.Sessions, error) { + query := `SELECT id, user_id, token, created_at, user_agent, ip FROM sessions WHERE user_id=$1 ORDER BY id DESC` + rows, err := s.db.Query(query, userID) + if err != nil { + return nil, fmt.Errorf("unable to fetch sessions: %v", err) + } + defer rows.Close() + + var sessions model.Sessions + for rows.Next() { + var session model.Session + err := rows.Scan( + &session.ID, + &session.UserID, + &session.Token, + &session.CreatedAt, + &session.UserAgent, + &session.IP, + ) + + if err != nil { + return nil, fmt.Errorf("unable to fetch session row: %v", err) + } + + sessions = append(sessions, &session) + } + + return sessions, nil +} + +func (s *Storage) CreateSession(username, userAgent, ip string) (sessionID string, err error) { + var userID int64 + + err = s.db.QueryRow("SELECT id FROM users WHERE username = $1", username).Scan(&userID) + if err != nil { + return "", fmt.Errorf("unable to fetch UserID: %v", err) + } + + token := helper.GenerateRandomString(64) + query := "INSERT INTO sessions (token, user_id, user_agent, ip) VALUES ($1, $2, $3, $4)" + _, err = s.db.Exec(query, token, userID, userAgent, ip) + if err != nil { + return "", fmt.Errorf("unable to create session: %v", err) + } + + s.SetLastLogin(userID) + + return token, nil +} + +func (s *Storage) GetSessionByToken(token string) (*model.Session, error) { + var session model.Session + + query := "SELECT id, user_id, token, created_at, user_agent, ip FROM sessions WHERE token = $1" + err := s.db.QueryRow(query, token).Scan( + &session.ID, + &session.UserID, + &session.Token, + &session.CreatedAt, + &session.UserAgent, + &session.IP, + ) + + if err == sql.ErrNoRows { + return nil, fmt.Errorf("session not found: %s", token) + } else if err != nil { + return nil, fmt.Errorf("unable to fetch session: %v", err) + } + + return &session, nil +} + +func (s *Storage) RemoveSessionByToken(userID int64, token string) error { + result, err := s.db.Exec(`DELETE FROM sessions WHERE user_id=$1 AND token=$2`, userID, token) + if err != nil { + return fmt.Errorf("unable to remove this session: %v", err) + } + + count, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("unable to remove this session: %v", err) + } + + if count != 1 { + return fmt.Errorf("nothing has been removed") + } + + return nil +} + +func (s *Storage) RemoveSessionByID(userID, sessionID int64) error { + result, err := s.db.Exec(`DELETE FROM sessions WHERE user_id=$1 AND id=$2`, userID, sessionID) + if err != nil { + return fmt.Errorf("unable to remove this session: %v", err) + } + + count, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("unable to remove this session: %v", err) + } + + if count != 1 { + return fmt.Errorf("nothing has been removed") + } + + return nil +} + +func (s *Storage) FlushAllSessions() (err error) { + _, err = s.db.Exec(`delete from sessions`) + return +} diff --git a/storage/storage.go b/storage/storage.go new file mode 100644 index 00000000..ebefe912 --- /dev/null +++ b/storage/storage.go @@ -0,0 +1,32 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "database/sql" + "log" + + _ "github.com/lib/pq" +) + +type Storage struct { + db *sql.DB +} + +func (s *Storage) Close() { + s.db.Close() +} + +func NewStorage(databaseUrl string, maxOpenConns int) *Storage { + db, err := sql.Open("postgres", databaseUrl) + if err != nil { + log.Fatalf("Unable to connect to the database: %v", err) + } + + db.SetMaxOpenConns(maxOpenConns) + db.SetMaxIdleConns(2) + + return &Storage{db: db} +} diff --git a/storage/timezone.go b/storage/timezone.go new file mode 100644 index 00000000..8edfc1cc --- /dev/null +++ b/storage/timezone.go @@ -0,0 +1,34 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "fmt" + "github.com/miniflux/miniflux2/helper" + "time" +) + +func (s *Storage) GetTimezones() (map[string]string, error) { + defer helper.ExecutionTime(time.Now(), "[Storage:GetTimezones]") + + timezones := make(map[string]string) + query := `select name from pg_timezone_names() order by name asc` + rows, err := s.db.Query(query) + if err != nil { + return nil, fmt.Errorf("unable to fetch timezones: %v", err) + } + defer rows.Close() + + for rows.Next() { + var timezone string + if err := rows.Scan(&timezone); err != nil { + return nil, fmt.Errorf("unable to fetch timezones row: %v", err) + } + + timezones[timezone] = timezone + } + + return timezones, nil +} diff --git a/storage/user.go b/storage/user.go new file mode 100644 index 00000000..736f7ac6 --- /dev/null +++ b/storage/user.go @@ -0,0 +1,195 @@ +// Copyright 2017 Frédéric Guillot. All rights reserved. +// Use of this source code is governed by the Apache 2.0 +// license that can be found in the LICENSE file. + +package storage + +import ( + "database/sql" + "errors" + "fmt" + "github.com/miniflux/miniflux2/helper" + "github.com/miniflux/miniflux2/model" + "strings" + "time" + + "golang.org/x/crypto/bcrypt" +) + +func (s *Storage) SetLastLogin(userID int64) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:SetLastLogin] userID=%d", userID)) + query := "UPDATE users SET last_login_at=now() WHERE id=$1" + _, err := s.db.Exec(query, userID) + if err != nil { + return fmt.Errorf("unable to update last login date: %v", err) + } + + return nil +} + +func (s *Storage) UserExists(username string) bool { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:UserExists] username=%s", username)) + + var result int + s.db.QueryRow(`SELECT count(*) as c FROM users WHERE username=$1`, username).Scan(&result) + return result >= 1 +} + +func (s *Storage) AnotherUserExists(userID int64, username string) bool { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:AnotherUserExists] userID=%d, username=%s", userID, username)) + + var result int + s.db.QueryRow(`SELECT count(*) as c FROM users WHERE id != $1 AND username=$2`, userID, username).Scan(&result) + return result >= 1 +} + +func (s *Storage) CreateUser(user *model.User) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:CreateUser] username=%s", user.Username)) + + password, err := hashPassword(user.Password) + if err != nil { + return err + } + + query := "INSERT INTO users (username, password, is_admin) VALUES ($1, $2, $3) RETURNING id" + err = s.db.QueryRow(query, strings.ToLower(user.Username), password, user.IsAdmin).Scan(&user.ID) + if err != nil { + return fmt.Errorf("unable to create user: %v", err) + } + + s.CreateCategory(&model.Category{Title: "All", UserID: user.ID}) + return nil +} + +func (s *Storage) UpdateUser(user *model.User) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:UpdateUser] username=%s", user.Username)) + user.Username = strings.ToLower(user.Username) + + if user.Password != "" { + hashedPassword, err := hashPassword(user.Password) + if err != nil { + return err + } + + query := "UPDATE users SET username=$1, password=$2, is_admin=$3, theme=$4, language=$5, timezone=$6 WHERE id=$7" + _, err = s.db.Exec(query, user.Username, hashedPassword, user.IsAdmin, user.Theme, user.Language, user.Timezone, user.ID) + if err != nil { + return fmt.Errorf("unable to update user: %v", err) + } + } else { + query := "UPDATE users SET username=$1, is_admin=$2, theme=$3, language=$4, timezone=$5 WHERE id=$6" + _, err := s.db.Exec(query, user.Username, user.IsAdmin, user.Theme, user.Language, user.Timezone, user.ID) + if err != nil { + return fmt.Errorf("unable to update user: %v", err) + } + } + + return nil +} + +func (s *Storage) GetUserById(userID int64) (*model.User, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetUserById] userID=%d", userID)) + + var user model.User + row := s.db.QueryRow("SELECT id, username, is_admin, theme, language, timezone FROM users WHERE id = $1", userID) + err := row.Scan(&user.ID, &user.Username, &user.IsAdmin, &user.Theme, &user.Language, &user.Timezone) + if err == sql.ErrNoRows { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("unable to fetch user: %v", err) + } + + return &user, nil +} + +func (s *Storage) GetUserByUsername(username string) (*model.User, error) { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:GetUserByUsername] username=%s", username)) + + var user model.User + row := s.db.QueryRow("SELECT id, username, is_admin, theme, language, timezone FROM users WHERE username=$1", username) + err := row.Scan(&user.ID, &user.Username, &user.IsAdmin, &user.Theme, &user.Language, &user.Timezone) + if err == sql.ErrNoRows { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("unable to fetch user: %v", err) + } + + return &user, nil +} + +func (s *Storage) RemoveUser(userID int64) error { + defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:RemoveUser] userID=%d", userID)) + + result, err := s.db.Exec("DELETE FROM users WHERE id = $1", userID) + if err != nil { + return fmt.Errorf("unable to remove this user: %v", err) + } + + count, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("unable to remove this user: %v", err) + } + + if count == 0 { + return errors.New("nothing has been removed.") + } + + return nil +} + +func (s *Storage) GetUsers() (model.Users, error) { + defer helper.ExecutionTime(time.Now(), "[Storage:GetUsers]") + + var users model.Users + rows, err := s.db.Query("SELECT id, username, is_admin, theme, language, timezone, last_login_at FROM users ORDER BY username ASC") + if err != nil { + return nil, fmt.Errorf("unable to fetch users: %v", err) + } + defer rows.Close() + + for rows.Next() { + var user model.User + err := rows.Scan( + &user.ID, + &user.Username, + &user.IsAdmin, + &user.Theme, + &user.Language, + &user.Timezone, + &user.LastLoginAt, + ) + + if err != nil { + return nil, fmt.Errorf("unable to fetch users row: %v", err) + } + + users = append(users, &user) + } + + return users, nil +} + +func (s *Storage) CheckPassword(username, password string) error { + defer helper.ExecutionTime(time.Now(), "[Storage:CheckPassword]") + + var hash string + username = strings.ToLower(username) + + err := s.db.QueryRow("SELECT password FROM users WHERE username=$1", username).Scan(&hash) + if err == sql.ErrNoRows { + return fmt.Errorf("Unable to find this user: %s\n", username) + } else if err != nil { + return fmt.Errorf("Unable to fetch user: %v\n", err) + } + + if err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)); err != nil { + return fmt.Errorf("Invalid password for %s\n", username) + } + + return nil +} + +func hashPassword(password string) (string, error) { + bytes, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + return string(bytes), err +} diff --git a/vendor/github.com/PuerkitoBio/goquery/.gitattributes b/vendor/github.com/PuerkitoBio/goquery/.gitattributes new file mode 100644 index 00000000..0cc26ec0 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/.gitattributes @@ -0,0 +1 @@ +testdata/* linguist-vendored diff --git a/vendor/github.com/PuerkitoBio/goquery/.gitignore b/vendor/github.com/PuerkitoBio/goquery/.gitignore new file mode 100644 index 00000000..970381cd --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/.gitignore @@ -0,0 +1,16 @@ +# editor temporary files +*.sublime-* +.DS_Store +*.swp +#*.*# +tags + +# direnv config +.env* + +# test binaries +*.test + +# coverage and profilte outputs +*.out + diff --git a/vendor/github.com/PuerkitoBio/goquery/.travis.yml b/vendor/github.com/PuerkitoBio/goquery/.travis.yml new file mode 100644 index 00000000..bc1e7cb0 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/.travis.yml @@ -0,0 +1,11 @@ +language: go + +go: + - 1.1 + - 1.2 + - 1.3 + - 1.4 + - 1.5 + - 1.6 + - 1.7 + - tip diff --git a/vendor/github.com/PuerkitoBio/goquery/LICENSE b/vendor/github.com/PuerkitoBio/goquery/LICENSE new file mode 100644 index 00000000..f743d372 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/LICENSE @@ -0,0 +1,12 @@ +Copyright (c) 2012-2016, Martin Angers & Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/PuerkitoBio/goquery/README.md b/vendor/github.com/PuerkitoBio/goquery/README.md new file mode 100644 index 00000000..ce09cb9c --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/README.md @@ -0,0 +1,124 @@ +# goquery - a little like that j-thing, only in Go [![build status](https://secure.travis-ci.org/PuerkitoBio/goquery.png)](http://travis-ci.org/PuerkitoBio/goquery) [![GoDoc](https://godoc.org/github.com/PuerkitoBio/goquery?status.png)](http://godoc.org/github.com/PuerkitoBio/goquery) + +goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off. + +Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this. + +Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...). + +## Installation + +Please note that because of the net/html dependency, goquery requires Go1.1+. + + $ go get github.com/PuerkitoBio/goquery + +(optional) To run unit tests: + + $ cd $GOPATH/src/github.com/PuerkitoBio/goquery + $ go test + +(optional) To run benchmarks (warning: it runs for a few minutes): + + $ cd $GOPATH/src/github.com/PuerkitoBio/goquery + $ go test -bench=".*" + +## Changelog + +**Note that goquery's API is now stable, and will not break.** + +* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv). +* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb). +* **2016-08-28 (v1.0.1)** : Optimize performance for large documents. +* **2016-07-27 (v1.0.0)** : Tag version 1.0.0. +* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object. +* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see godoc for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`). +* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr]. +* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone]. +* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone]. +* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used. +* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s. +* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader. +* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response. +* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility. +* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out). +* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method. +* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases). +* **v0.1.0** : Initial release. + +## API + +goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate. + +jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention: + +* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`) +* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`) +* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`) +* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`) +* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`) +* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`) + +Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour). + +The complete [godoc reference documentation can be found here][doc]. + +Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string): + +* `Find("~")` returns an empty selection because the selector string doesn't match anything. +* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything). +* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything. +* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element. + +## Examples + +See some tips and tricks in the [wiki][]. + +Adapted from example_test.go: + +```Go +package main + +import ( + "fmt" + "log" + + "github.com/PuerkitoBio/goquery" +) + +func ExampleScrape() { + doc, err := goquery.NewDocument("http://metalsucks.net") + if err != nil { + log.Fatal(err) + } + + // Find the review items + doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) { + // For each item found, get the band and title + band := s.Find("a").Text() + title := s.Find("i").Text() + fmt.Printf("Review %d: %s - %s\n", i, band, title) + }) +} + +func main() { + ExampleScrape() +} +``` + +## License + +The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic]. + +[jquery]: http://jquery.com/ +[go]: http://golang.org/ +[cascadia]: https://github.com/andybalholm/cascadia +[bsd]: http://opensource.org/licenses/BSD-3-Clause +[golic]: http://golang.org/LICENSE +[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE +[doc]: http://godoc.org/github.com/PuerkitoBio/goquery +[index]: http://api.jquery.com/index/ +[gonet]: https://github.com/golang/net/ +[html]: http://godoc.org/golang.org/x/net/html +[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks +[thatguystone]: https://github.com/thatguystone +[piotr]: https://github.com/piotrkowalczuk diff --git a/vendor/github.com/PuerkitoBio/goquery/array.go b/vendor/github.com/PuerkitoBio/goquery/array.go new file mode 100644 index 00000000..d7af5eee --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/array.go @@ -0,0 +1,103 @@ +package goquery + +import ( + "golang.org/x/net/html" +) + +// First reduces the set of matched elements to the first in the set. +// It returns a new Selection object, and an empty Selection object if the +// the selection is empty. +func (s *Selection) First() *Selection { + return s.Eq(0) +} + +// Last reduces the set of matched elements to the last in the set. +// It returns a new Selection object, and an empty Selection object if +// the selection is empty. +func (s *Selection) Last() *Selection { + return s.Eq(-1) +} + +// Eq reduces the set of matched elements to the one at the specified index. +// If a negative index is given, it counts backwards starting at the end of the +// set. It returns a new Selection object, and an empty Selection object if the +// index is invalid. +func (s *Selection) Eq(index int) *Selection { + if index < 0 { + index += len(s.Nodes) + } + + if index >= len(s.Nodes) || index < 0 { + return newEmptySelection(s.document) + } + + return s.Slice(index, index+1) +} + +// Slice reduces the set of matched elements to a subset specified by a range +// of indices. +func (s *Selection) Slice(start, end int) *Selection { + if start < 0 { + start += len(s.Nodes) + } + if end < 0 { + end += len(s.Nodes) + } + return pushStack(s, s.Nodes[start:end]) +} + +// Get retrieves the underlying node at the specified index. +// Get without parameter is not implemented, since the node array is available +// on the Selection object. +func (s *Selection) Get(index int) *html.Node { + if index < 0 { + index += len(s.Nodes) // Negative index gets from the end + } + return s.Nodes[index] +} + +// Index returns the position of the first element within the Selection object +// relative to its sibling elements. +func (s *Selection) Index() int { + if len(s.Nodes) > 0 { + return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length() + } + return -1 +} + +// IndexSelector returns the position of the first element within the +// Selection object relative to the elements matched by the selector, or -1 if +// not found. +func (s *Selection) IndexSelector(selector string) int { + if len(s.Nodes) > 0 { + sel := s.document.Find(selector) + return indexInSlice(sel.Nodes, s.Nodes[0]) + } + return -1 +} + +// IndexMatcher returns the position of the first element within the +// Selection object relative to the elements matched by the matcher, or -1 if +// not found. +func (s *Selection) IndexMatcher(m Matcher) int { + if len(s.Nodes) > 0 { + sel := s.document.FindMatcher(m) + return indexInSlice(sel.Nodes, s.Nodes[0]) + } + return -1 +} + +// IndexOfNode returns the position of the specified node within the Selection +// object, or -1 if not found. +func (s *Selection) IndexOfNode(node *html.Node) int { + return indexInSlice(s.Nodes, node) +} + +// IndexOfSelection returns the position of the first node in the specified +// Selection object within this Selection object, or -1 if not found. +func (s *Selection) IndexOfSelection(sel *Selection) int { + if sel != nil && len(sel.Nodes) > 0 { + return indexInSlice(s.Nodes, sel.Nodes[0]) + } + return -1 +} diff --git a/vendor/github.com/PuerkitoBio/goquery/array_test.go b/vendor/github.com/PuerkitoBio/goquery/array_test.go new file mode 100644 index 00000000..8e50f758 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/array_test.go @@ -0,0 +1,212 @@ +package goquery + +import ( + "testing" +) + +func TestFirst(t *testing.T) { + sel := Doc().Find(".pvk-content").First() + assertLength(t, sel.Nodes, 1) +} + +func TestFirstEmpty(t *testing.T) { + sel := Doc().Find(".pvk-zzcontentzz").First() + assertLength(t, sel.Nodes, 0) +} + +func TestFirstInvalid(t *testing.T) { + sel := Doc().Find("").First() + assertLength(t, sel.Nodes, 0) +} + +func TestFirstRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.First().End() + assertEqual(t, sel, sel2) +} + +func TestLast(t *testing.T) { + sel := Doc().Find(".pvk-content").Last() + assertLength(t, sel.Nodes, 1) + + // Should contain Footer + foot := Doc().Find(".footer") + if !sel.Contains(foot.Nodes[0]) { + t.Error("Last .pvk-content should contain .footer.") + } +} + +func TestLastEmpty(t *testing.T) { + sel := Doc().Find(".pvk-zzcontentzz").Last() + assertLength(t, sel.Nodes, 0) +} + +func TestLastInvalid(t *testing.T) { + sel := Doc().Find("").Last() + assertLength(t, sel.Nodes, 0) +} + +func TestLastRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Last().End() + assertEqual(t, sel, sel2) +} + +func TestEq(t *testing.T) { + sel := Doc().Find(".pvk-content").Eq(1) + assertLength(t, sel.Nodes, 1) +} + +func TestEqNegative(t *testing.T) { + sel := Doc().Find(".pvk-content").Eq(-1) + assertLength(t, sel.Nodes, 1) + + // Should contain Footer + foot := Doc().Find(".footer") + if !sel.Contains(foot.Nodes[0]) { + t.Error("Index -1 of .pvk-content should contain .footer.") + } +} + +func TestEqEmpty(t *testing.T) { + sel := Doc().Find("something_random_that_does_not_exists").Eq(0) + assertLength(t, sel.Nodes, 0) +} + +func TestEqInvalid(t *testing.T) { + sel := Doc().Find("").Eq(0) + assertLength(t, sel.Nodes, 0) +} + +func TestEqInvalidPositive(t *testing.T) { + sel := Doc().Find(".pvk-content").Eq(3) + assertLength(t, sel.Nodes, 0) +} + +func TestEqInvalidNegative(t *testing.T) { + sel := Doc().Find(".pvk-content").Eq(-4) + assertLength(t, sel.Nodes, 0) +} + +func TestEqRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Eq(1).End() + assertEqual(t, sel, sel2) +} + +func TestSlice(t *testing.T) { + sel := Doc().Find(".pvk-content").Slice(0, 2) + + assertLength(t, sel.Nodes, 2) +} + +func TestSliceEmpty(t *testing.T) { + defer assertPanic(t) + Doc().Find("x").Slice(0, 2) +} + +func TestSliceInvalid(t *testing.T) { + defer assertPanic(t) + Doc().Find("").Slice(0, 2) +} + +func TestSliceOutOfBounds(t *testing.T) { + defer assertPanic(t) + Doc().Find(".pvk-content").Slice(2, 12) +} + +func TestNegativeSliceStart(t *testing.T) { + sel := Doc().Find(".container-fluid").Slice(-2, 3) + assertLength(t, sel.Nodes, 1) + assertSelectionIs(t, sel.Eq(0), "#cf3") +} + +func TestNegativeSliceEnd(t *testing.T) { + sel := Doc().Find(".container-fluid").Slice(1, -1) + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel.Eq(0), "#cf2") + assertSelectionIs(t, sel.Eq(1), "#cf3") +} + +func TestNegativeSliceBoth(t *testing.T) { + sel := Doc().Find(".container-fluid").Slice(-3, -1) + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel.Eq(0), "#cf2") + assertSelectionIs(t, sel.Eq(1), "#cf3") +} + +func TestNegativeSliceOutOfBounds(t *testing.T) { + defer assertPanic(t) + Doc().Find(".container-fluid").Slice(-12, -7) +} + +func TestSliceRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Slice(0, 2).End() + assertEqual(t, sel, sel2) +} + +func TestGet(t *testing.T) { + sel := Doc().Find(".pvk-content") + node := sel.Get(1) + if sel.Nodes[1] != node { + t.Errorf("Expected node %v to be %v.", node, sel.Nodes[1]) + } +} + +func TestGetNegative(t *testing.T) { + sel := Doc().Find(".pvk-content") + node := sel.Get(-3) + if sel.Nodes[0] != node { + t.Errorf("Expected node %v to be %v.", node, sel.Nodes[0]) + } +} + +func TestGetInvalid(t *testing.T) { + defer assertPanic(t) + sel := Doc().Find(".pvk-content") + sel.Get(129) +} + +func TestIndex(t *testing.T) { + sel := Doc().Find(".pvk-content") + if i := sel.Index(); i != 1 { + t.Errorf("Expected index of 1, got %v.", i) + } +} + +func TestIndexSelector(t *testing.T) { + sel := Doc().Find(".hero-unit") + if i := sel.IndexSelector("div"); i != 4 { + t.Errorf("Expected index of 4, got %v.", i) + } +} + +func TestIndexSelectorInvalid(t *testing.T) { + sel := Doc().Find(".hero-unit") + if i := sel.IndexSelector(""); i != -1 { + t.Errorf("Expected index of -1, got %v.", i) + } +} + +func TestIndexOfNode(t *testing.T) { + sel := Doc().Find("div.pvk-gutter") + if i := sel.IndexOfNode(sel.Nodes[1]); i != 1 { + t.Errorf("Expected index of 1, got %v.", i) + } +} + +func TestIndexOfNilNode(t *testing.T) { + sel := Doc().Find("div.pvk-gutter") + if i := sel.IndexOfNode(nil); i != -1 { + t.Errorf("Expected index of -1, got %v.", i) + } +} + +func TestIndexOfSelection(t *testing.T) { + sel := Doc().Find("div") + sel2 := Doc().Find(".hero-unit") + if i := sel.IndexOfSelection(sel2); i != 4 { + t.Errorf("Expected index of 4, got %v.", i) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.0 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.0 new file mode 100644 index 00000000..eb584783 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.0 @@ -0,0 +1,436 @@ +PASS +BenchmarkFirst 20000000 92.9 ns/op +BenchmarkLast 20000000 91.6 ns/op +BenchmarkEq 20000000 90.6 ns/op +BenchmarkSlice 20000000 86.7 ns/op +BenchmarkGet 1000000000 2.14 ns/op +BenchmarkIndex 500000 5308 ns/op +--- BENCH: BenchmarkIndex + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 50000 54962 ns/op +--- BENCH: BenchmarkIndexSelector + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 100000000 11.4 ns/op +--- BENCH: BenchmarkIndexOfNode + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 12.1 ns/op +--- BENCH: BenchmarkIndexOfSelection + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 5000 327144 ns/op +--- BENCH: BenchmarkMetalReviewExample + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 50000 52945 ns/op +--- BENCH: BenchmarkAdd + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 205 ns/op +--- BENCH: BenchmarkAddSelection + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 203 ns/op +--- BENCH: BenchmarkAddNodes + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2639 ns/op +--- BENCH: BenchmarkAndSelf + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 50000 30182 ns/op +--- BENCH: BenchmarkFilter + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 +BenchmarkNot 50000 34855 ns/op +--- BENCH: BenchmarkNot + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 66052 ns/op +--- BENCH: BenchmarkFilterFunction + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 69721 ns/op +--- BENCH: BenchmarkNotFunction + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 66077 ns/op +--- BENCH: BenchmarkFilterNodes + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 20000 80021 ns/op +--- BENCH: BenchmarkNotNodes + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 66256 ns/op +--- BENCH: BenchmarkFilterSelection + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 20000 79568 ns/op +--- BENCH: BenchmarkNotSelection + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 569441 ns/op +--- BENCH: BenchmarkHas + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 230585 ns/op +--- BENCH: BenchmarkHasNodes + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 231470 ns/op +--- BENCH: BenchmarkHasSelection + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 4.65 ns/op +--- BENCH: BenchmarkEnd + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 +BenchmarkEach 200000 9558 ns/op +--- BENCH: BenchmarkEach + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 +BenchmarkMap 100000 16809 ns/op +--- BENCH: BenchmarkMap + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 +BenchmarkAttr 50000000 37.5 ns/op +--- BENCH: BenchmarkAttr + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading +BenchmarkText 100000 18583 ns/op +BenchmarkLength 2000000000 0.80 ns/op +--- BENCH: BenchmarkLength + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 666 ns/op +BenchmarkIs 50000 34328 ns/op +--- BENCH: BenchmarkIs + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true +BenchmarkIsPositional 50000 32423 ns/op +--- BENCH: BenchmarkIsPositional + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2707 ns/op +--- BENCH: BenchmarkIsFunction + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 66976 ns/op +--- BENCH: BenchmarkIsSelection + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 66740 ns/op +--- BENCH: BenchmarkIsNodes + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 701722 ns/op +--- BENCH: BenchmarkHasClass + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 11.9 ns/op +--- BENCH: BenchmarkContains + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true +BenchmarkFind 50000 55444 ns/op +--- BENCH: BenchmarkFind + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 10000 127984 ns/op +--- BENCH: BenchmarkFindWithinSelection + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 355944 ns/op +--- BENCH: BenchmarkFindSelection + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 355596 ns/op +--- BENCH: BenchmarkFindNodes + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 500000 5656 ns/op +--- BENCH: BenchmarkContents + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 200000 9007 ns/op +--- BENCH: BenchmarkContentsFiltered + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 1000000 1237 ns/op +--- BENCH: BenchmarkChildren + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 5613 ns/op +--- BENCH: BenchmarkChildrenFiltered + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 47026 ns/op +--- BENCH: BenchmarkParent + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 51438 ns/op +--- BENCH: BenchmarkParentFiltered + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 91820 ns/op +--- BENCH: BenchmarkParents + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 95156 ns/op +--- BENCH: BenchmarkParentsFiltered + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 10000 134383 ns/op +--- BENCH: BenchmarkParentsUntil + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 235456 ns/op +--- BENCH: BenchmarkParentsUntilSelection + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 235936 ns/op +--- BENCH: BenchmarkParentsUntilNodes + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 50000 32451 ns/op +--- BENCH: BenchmarkParentsFilteredUntil + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 50000 30570 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 50000 30729 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 10000 106704 ns/op +--- BENCH: BenchmarkSiblings + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 10000 115592 ns/op +--- BENCH: BenchmarkSiblingsFiltered + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 50000 54449 ns/op +--- BENCH: BenchmarkNext + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 50000 58503 ns/op +--- BENCH: BenchmarkNextFiltered + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 20000 77698 ns/op +--- BENCH: BenchmarkNextAll + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 20000 85034 ns/op +--- BENCH: BenchmarkNextAllFiltered + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 50000 56458 ns/op +--- BENCH: BenchmarkPrev + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 50000 60163 ns/op +--- BENCH: BenchmarkPrevFiltered + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 50000 47679 ns/op +--- BENCH: BenchmarkPrevAll + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 50000 51563 ns/op +--- BENCH: BenchmarkPrevAllFiltered + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 10000 213998 ns/op +--- BENCH: BenchmarkNextUntil + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 10000 140720 ns/op +--- BENCH: BenchmarkNextUntilSelection + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 20000 90702 ns/op +--- BENCH: BenchmarkNextUntilNodes + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 5000 456039 ns/op +--- BENCH: BenchmarkPrevUntil + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 10000 167944 ns/op +--- BENCH: BenchmarkPrevUntilSelection + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 20000 82059 ns/op +--- BENCH: BenchmarkPrevUntilNodes + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 10000 150883 ns/op +--- BENCH: BenchmarkNextFilteredUntil + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 10000 146578 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 10000 148284 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 10000 154303 ns/op +--- BENCH: BenchmarkPrevFilteredUntil + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 10000 149062 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 10000 150584 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +ok github.com/PuerkitoBio/goquery 188.326s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.1 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.1 new file mode 100644 index 00000000..c0d9d06c --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.1 @@ -0,0 +1,438 @@ +PASS +BenchmarkFirst 20000000 96.2 ns/op +BenchmarkLast 20000000 95.8 ns/op +BenchmarkEq 20000000 94.4 ns/op +BenchmarkSlice 20000000 89.9 ns/op +BenchmarkGet 1000000000 2.31 ns/op +BenchmarkIndex 1000000 1911 ns/op +--- BENCH: BenchmarkIndex + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 50000 56034 ns/op +--- BENCH: BenchmarkIndexSelector + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 100000000 11.8 ns/op +--- BENCH: BenchmarkIndexOfNode + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 12.1 ns/op +--- BENCH: BenchmarkIndexOfSelection + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 5000 336823 ns/op +--- BENCH: BenchmarkMetalReviewExample + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 50000 54709 ns/op +--- BENCH: BenchmarkAdd + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 209 ns/op +--- BENCH: BenchmarkAddSelection + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 202 ns/op +--- BENCH: BenchmarkAddNodes + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2634 ns/op +--- BENCH: BenchmarkAndSelf + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 50000 31049 ns/op +--- BENCH: BenchmarkFilter + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 +BenchmarkNot 50000 35167 ns/op +--- BENCH: BenchmarkNot + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 68974 ns/op +--- BENCH: BenchmarkFilterFunction + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 74760 ns/op +--- BENCH: BenchmarkNotFunction + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 68670 ns/op +--- BENCH: BenchmarkFilterNodes + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 20000 81357 ns/op +--- BENCH: BenchmarkNotNodes + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 68388 ns/op +--- BENCH: BenchmarkFilterSelection + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 20000 82108 ns/op +--- BENCH: BenchmarkNotSelection + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 582934 ns/op +--- BENCH: BenchmarkHas + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 241602 ns/op +--- BENCH: BenchmarkHasNodes + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 243612 ns/op +--- BENCH: BenchmarkHasSelection + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 4.14 ns/op +--- BENCH: BenchmarkEnd + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 +BenchmarkEach 200000 9848 ns/op +--- BENCH: BenchmarkEach + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 +BenchmarkMap 100000 17569 ns/op +--- BENCH: BenchmarkMap + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 +BenchmarkAttr 50000000 37.6 ns/op +--- BENCH: BenchmarkAttr + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading +BenchmarkText 100000 19345 ns/op +BenchmarkLength 2000000000 0.80 ns/op +--- BENCH: BenchmarkLength + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 688 ns/op +BenchmarkIs 50000 35061 ns/op +--- BENCH: BenchmarkIs + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true +BenchmarkIsPositional 50000 32789 ns/op +--- BENCH: BenchmarkIsPositional + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2816 ns/op +--- BENCH: BenchmarkIsFunction + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 68272 ns/op +--- BENCH: BenchmarkIsSelection + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 68107 ns/op +--- BENCH: BenchmarkIsNodes + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 709386 ns/op +--- BENCH: BenchmarkHasClass + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 12.4 ns/op +--- BENCH: BenchmarkContains + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true +BenchmarkFind 50000 56342 ns/op +--- BENCH: BenchmarkFind + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 10000 131878 ns/op +--- BENCH: BenchmarkFindWithinSelection + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 374240 ns/op +--- BENCH: BenchmarkFindSelection + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 374447 ns/op +--- BENCH: BenchmarkFindNodes + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 200000 9721 ns/op +--- BENCH: BenchmarkContents + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 200000 12909 ns/op +--- BENCH: BenchmarkContentsFiltered + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 1000000 1869 ns/op +--- BENCH: BenchmarkChildren + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 5941 ns/op +--- BENCH: BenchmarkChildrenFiltered + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 46223 ns/op +--- BENCH: BenchmarkParent + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 51452 ns/op +--- BENCH: BenchmarkParentFiltered + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 93967 ns/op +--- BENCH: BenchmarkParents + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 97617 ns/op +--- BENCH: BenchmarkParentsFiltered + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 10000 138898 ns/op +--- BENCH: BenchmarkParentsUntil + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 247817 ns/op +--- BENCH: BenchmarkParentsUntilSelection + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 246055 ns/op +--- BENCH: BenchmarkParentsUntilNodes + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 50000 33201 ns/op +--- BENCH: BenchmarkParentsFilteredUntil + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 50000 31486 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 50000 31754 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 20000 94749 ns/op +--- BENCH: BenchmarkSiblings + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 10000 103926 ns/op +--- BENCH: BenchmarkSiblingsFiltered + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 50000 33782 ns/op +--- BENCH: BenchmarkNext + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 50000 37108 ns/op +--- BENCH: BenchmarkNextFiltered + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 50000 64769 ns/op +--- BENCH: BenchmarkNextAll + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 50000 71050 ns/op +--- BENCH: BenchmarkNextAllFiltered + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 50000 33908 ns/op +--- BENCH: BenchmarkPrev + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 50000 37353 ns/op +--- BENCH: BenchmarkPrevFiltered + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 50000 31056 ns/op +--- BENCH: BenchmarkPrevAll + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 50000 34286 ns/op +--- BENCH: BenchmarkPrevAllFiltered + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 10000 202553 ns/op +--- BENCH: BenchmarkNextUntil + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 20000 98693 ns/op +--- BENCH: BenchmarkNextUntilSelection + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 50000 45532 ns/op +--- BENCH: BenchmarkNextUntilNodes + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 5000 454378 ns/op +--- BENCH: BenchmarkPrevUntil + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 10000 123594 ns/op +--- BENCH: BenchmarkPrevUntilSelection + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 50000 37509 ns/op +--- BENCH: BenchmarkPrevUntilNodes + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 10000 109317 ns/op +--- BENCH: BenchmarkNextFilteredUntil + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 10000 105959 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 10000 107132 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 10000 114474 ns/op +--- BENCH: BenchmarkPrevFilteredUntil + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 10000 107592 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 10000 107495 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +ok github.com/PuerkitoBio/goquery 187.652s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.1-v0.2.1-go1.1rc1.svg b/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.1-v0.2.1-go1.1rc1.svg new file mode 100644 index 00000000..849a70b7 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.1.1-v0.2.1-go1.1rc1.svg @@ -0,0 +1,405 @@ + + + + + + + + ++0.10% + +BenchmarkFirst + + +-0.10% + +BenchmarkLast + + ++2.86% + +BenchmarkEq + + ++2.67% + +BenchmarkSlice + + +-10.82% + +BenchmarkGet + + +-44.06% + +BenchmarkIndex + + +-52.26% + +BenchmarkIndexSelector + + +-9.32% + +BenchmarkIndexOfNode + + +-2.48% + +BenchmarkIndexOfSelection + + +-34.96% + +BenchmarkMetalReviewExample + + +-59.16% + +BenchmarkAdd + + ++0.00% + +BenchmarkAddSelection + + ++3.47% + +BenchmarkAddNodes + + +-2.51% + +BenchmarkAndSelf + + +-13.96% + +BenchmarkFilter + + +-14.53% + +BenchmarkNot + + +-3.81% + +BenchmarkFilterFunction + + +-2.83% + +BenchmarkNotFunction + + +-4.63% + +BenchmarkFilterNodes + + +-2.32% + +BenchmarkNotNodes + + +-4.23% + +BenchmarkFilterSelection + + +-3.31% + +BenchmarkNotSelection + + +-33.69% + +BenchmarkHas + + +-4.53% + +BenchmarkHasNodes + + +-5.30% + +BenchmarkHasSelection + + ++11.35% + +BenchmarkEnd + + +-3.33% + +BenchmarkEach + + +-9.16% + +BenchmarkMap + + +-19.15% + +BenchmarkAttr + + +-3.24% + +BenchmarkText + + +-61.25% + +BenchmarkLength + + +-11.92% + +BenchmarkHtml + + +-16.46% + +BenchmarkIs + + +-27.31% + +BenchmarkIsPositional + + +-13.49% + +BenchmarkIsFunction + + +-1.71% + +BenchmarkIsSelection + + +-3.03% + +BenchmarkIsNodes + + +-20.30% + +BenchmarkHasClass + + +-11.29% + +BenchmarkContains + + +-50.88% + +BenchmarkFind + + +-45.28% + +BenchmarkFindWithinSelection + + ++22.95% + +BenchmarkFindSelection + + ++22.68% + +BenchmarkFindNodes + + +-65.08% + +BenchmarkContents + + +-59.77% + +BenchmarkContentsFiltered + + +-64.31% + +BenchmarkChildren + + +-39.56% + +BenchmarkChildrenFiltered + + +-6.61% + +BenchmarkParent + + +-10.92% + +BenchmarkParentFiltered + + +-1.72% + +BenchmarkParents + + +-2.37% + +BenchmarkParentsFiltered + + +-47.60% + +BenchmarkParentsUntil + + +-36.39% + +BenchmarkParentsUntilSelection + + +-35.99% + +BenchmarkParentsUntilNodes + + +-48.84% + +BenchmarkParentsFilteredUntil + + +-32.75% + +BenchmarkParentsFilteredUntilSelection + + +-33.12% + +BenchmarkParentsFilteredUntilNodes + + +-29.78% + +BenchmarkSiblings + + +-30.89% + +BenchmarkSiblingsFiltered + + +-68.19% + +BenchmarkNext + + +-66.59% + +BenchmarkNextFiltered + + +-31.08% + +BenchmarkNextAll + + +-30.03% + +BenchmarkNextAllFiltered + + +-68.32% + +BenchmarkPrev + + +-66.65% + +BenchmarkPrevFiltered + + +-43.33% + +BenchmarkPrevAll + + +-43.24% + +BenchmarkPrevAllFiltered + + +-54.27% + +BenchmarkNextUntil + + +-37.90% + +BenchmarkNextUntilSelection + + +-43.21% + +BenchmarkNextUntilNodes + + +-49.33% + +BenchmarkPrevUntil + + +-37.02% + +BenchmarkPrevUntilSelection + + +-44.59% + +BenchmarkPrevUntilNodes + + +-57.79% + +BenchmarkNextFilteredUntil + + +-36.61% + +BenchmarkNextFilteredUntilSelection + + +-37.81% + +BenchmarkNextFilteredUntilNodes + + +-59.21% + +BenchmarkPrevFilteredUntil + + +-36.20% + +BenchmarkPrevFilteredUntilSelection + + +-36.05% + +BenchmarkPrevFilteredUntilNodes + + diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.0 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.0 new file mode 100644 index 00000000..3ea8dcb1 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.0 @@ -0,0 +1,459 @@ +PASS +BenchmarkFirst 20000000 94.3 ns/op +BenchmarkLast 20000000 94.7 ns/op +BenchmarkEq 20000000 93.7 ns/op +BenchmarkSlice 20000000 89.9 ns/op +BenchmarkGet 1000000000 2.72 ns/op +BenchmarkIndex 1000000 1834 ns/op +--- BENCH: BenchmarkIndex +bench_array_test.go:73: Index=3 +bench_array_test.go:73: Index=3 +bench_array_test.go:73: Index=3 +bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 50000 53958 ns/op +--- BENCH: BenchmarkIndexSelector +bench_array_test.go:85: IndexSelector=4 +bench_array_test.go:85: IndexSelector=4 +bench_array_test.go:85: IndexSelector=4 +bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 100000000 10.1 ns/op +--- BENCH: BenchmarkIndexOfNode +bench_array_test.go:99: IndexOfNode=2 +bench_array_test.go:99: IndexOfNode=2 +bench_array_test.go:99: IndexOfNode=2 +bench_array_test.go:99: IndexOfNode=2 +bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 10.9 ns/op +--- BENCH: BenchmarkIndexOfSelection +bench_array_test.go:111: IndexOfSelection=2 +bench_array_test.go:111: IndexOfSelection=2 +bench_array_test.go:111: IndexOfSelection=2 +bench_array_test.go:111: IndexOfSelection=2 +bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 5000 326712 ns/op +--- BENCH: BenchmarkMetalReviewExample +bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 +bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 50000 51776 ns/op +--- BENCH: BenchmarkAdd +bench_expand_test.go:20: Add=43 +bench_expand_test.go:20: Add=43 +bench_expand_test.go:20: Add=43 +bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 196 ns/op +--- BENCH: BenchmarkAddSelection +bench_expand_test.go:37: AddSelection=43 +bench_expand_test.go:37: AddSelection=43 +bench_expand_test.go:37: AddSelection=43 +bench_expand_test.go:37: AddSelection=43 +bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 191 ns/op +--- BENCH: BenchmarkAddNodes +bench_expand_test.go:55: AddNodes=43 +bench_expand_test.go:55: AddNodes=43 +bench_expand_test.go:55: AddNodes=43 +bench_expand_test.go:55: AddNodes=43 +bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2495 ns/op +--- BENCH: BenchmarkAndSelf +bench_expand_test.go:71: AndSelf=44 +bench_expand_test.go:71: AndSelf=44 +bench_expand_test.go:71: AndSelf=44 +bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 50000 30974 ns/op +--- BENCH: BenchmarkFilter +bench_filter_test.go:20: Filter=13 +bench_filter_test.go:20: Filter=13 +bench_filter_test.go:20: Filter=13 +bench_filter_test.go:20: Filter=13 +BenchmarkNot 50000 35322 ns/op +--- BENCH: BenchmarkNot +bench_filter_test.go:36: Not=371 +bench_filter_test.go:36: Not=371 +bench_filter_test.go:36: Not=371 +bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 65644 ns/op +--- BENCH: BenchmarkFilterFunction +bench_filter_test.go:55: FilterFunction=112 +bench_filter_test.go:55: FilterFunction=112 +bench_filter_test.go:55: FilterFunction=112 +bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 69245 ns/op +--- BENCH: BenchmarkNotFunction +bench_filter_test.go:74: NotFunction=261 +bench_filter_test.go:74: NotFunction=261 +bench_filter_test.go:74: NotFunction=261 +bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 64824 ns/op +--- BENCH: BenchmarkFilterNodes +bench_filter_test.go:92: FilterNodes=2 +bench_filter_test.go:92: FilterNodes=2 +bench_filter_test.go:92: FilterNodes=2 +bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 20000 76247 ns/op +--- BENCH: BenchmarkNotNodes +bench_filter_test.go:110: NotNodes=360 +bench_filter_test.go:110: NotNodes=360 +bench_filter_test.go:110: NotNodes=360 +bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 66154 ns/op +--- BENCH: BenchmarkFilterSelection +bench_filter_test.go:127: FilterSelection=2 +bench_filter_test.go:127: FilterSelection=2 +bench_filter_test.go:127: FilterSelection=2 +bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 20000 76336 ns/op +--- BENCH: BenchmarkNotSelection +bench_filter_test.go:144: NotSelection=360 +bench_filter_test.go:144: NotSelection=360 +bench_filter_test.go:144: NotSelection=360 +bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 569495 ns/op +--- BENCH: BenchmarkHas +bench_filter_test.go:160: Has=13 +bench_filter_test.go:160: Has=13 +bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 227059 ns/op +--- BENCH: BenchmarkHasNodes +bench_filter_test.go:178: HasNodes=15 +bench_filter_test.go:178: HasNodes=15 +bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 227167 ns/op +--- BENCH: BenchmarkHasSelection +bench_filter_test.go:195: HasSelection=15 +bench_filter_test.go:195: HasSelection=15 +bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 3.99 ns/op +--- BENCH: BenchmarkEnd +bench_filter_test.go:211: End=373 +bench_filter_test.go:211: End=373 +bench_filter_test.go:211: End=373 +bench_filter_test.go:211: End=373 +bench_filter_test.go:211: End=373 +bench_filter_test.go:211: End=373 +BenchmarkEach 200000 9354 ns/op +--- BENCH: BenchmarkEach +bench_iteration_test.go:22: Each=59 +bench_iteration_test.go:22: Each=59 +bench_iteration_test.go:22: Each=59 +bench_iteration_test.go:22: Each=59 +BenchmarkMap 100000 16557 ns/op +--- BENCH: BenchmarkMap +bench_iteration_test.go:41: Map=59 +bench_iteration_test.go:41: Map=59 +bench_iteration_test.go:41: Map=59 +bench_iteration_test.go:41: Map=59 +BenchmarkAttr 50000000 36.4 ns/op +--- BENCH: BenchmarkAttr +bench_property_test.go:16: Attr=firstHeading +bench_property_test.go:16: Attr=firstHeading +bench_property_test.go:16: Attr=firstHeading +bench_property_test.go:16: Attr=firstHeading +bench_property_test.go:16: Attr=firstHeading +BenchmarkText 100000 18473 ns/op +BenchmarkLength 2000000000 0.76 ns/op +--- BENCH: BenchmarkLength +bench_property_test.go:37: Length=14 +bench_property_test.go:37: Length=14 +bench_property_test.go:37: Length=14 +bench_property_test.go:37: Length=14 +bench_property_test.go:37: Length=14 +bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 666 ns/op +BenchmarkIs 50000 35174 ns/op +--- BENCH: BenchmarkIs +bench_query_test.go:16: Is=true +bench_query_test.go:16: Is=true +bench_query_test.go:16: Is=true +bench_query_test.go:16: Is=true +BenchmarkIsPositional 50000 31814 ns/op +--- BENCH: BenchmarkIsPositional +bench_query_test.go:28: IsPositional=true +bench_query_test.go:28: IsPositional=true +bench_query_test.go:28: IsPositional=true +bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2754 ns/op +--- BENCH: BenchmarkIsFunction +bench_query_test.go:43: IsFunction=true +bench_query_test.go:43: IsFunction=true +bench_query_test.go:43: IsFunction=true +bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 66260 ns/op +--- BENCH: BenchmarkIsSelection +bench_query_test.go:56: IsSelection=true +bench_query_test.go:56: IsSelection=true +bench_query_test.go:56: IsSelection=true +bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 64682 ns/op +--- BENCH: BenchmarkIsNodes +bench_query_test.go:70: IsNodes=true +bench_query_test.go:70: IsNodes=true +bench_query_test.go:70: IsNodes=true +bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 672953 ns/op +--- BENCH: BenchmarkHasClass +bench_query_test.go:82: HasClass=true +bench_query_test.go:82: HasClass=true +bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 11.3 ns/op +--- BENCH: BenchmarkContains +bench_query_test.go:96: Contains=true +bench_query_test.go:96: Contains=true +bench_query_test.go:96: Contains=true +bench_query_test.go:96: Contains=true +bench_query_test.go:96: Contains=true +BenchmarkFind 50000 53780 ns/op +--- BENCH: BenchmarkFind +bench_traversal_test.go:18: Find=41 +bench_traversal_test.go:18: Find=41 +bench_traversal_test.go:18: Find=41 +bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 10000 125963 ns/op +--- BENCH: BenchmarkFindWithinSelection +bench_traversal_test.go:34: FindWithinSelection=39 +bench_traversal_test.go:34: FindWithinSelection=39 +bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 357318 ns/op +--- BENCH: BenchmarkFindSelection +bench_traversal_test.go:51: FindSelection=73 +bench_traversal_test.go:51: FindSelection=73 +bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 357587 ns/op +--- BENCH: BenchmarkFindNodes +bench_traversal_test.go:69: FindNodes=73 +bench_traversal_test.go:69: FindNodes=73 +bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 200000 9135 ns/op +--- BENCH: BenchmarkContents +bench_traversal_test.go:85: Contents=16 +bench_traversal_test.go:85: Contents=16 +bench_traversal_test.go:85: Contents=16 +bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 200000 12383 ns/op +--- BENCH: BenchmarkContentsFiltered +bench_traversal_test.go:101: ContentsFiltered=1 +bench_traversal_test.go:101: ContentsFiltered=1 +bench_traversal_test.go:101: ContentsFiltered=1 +bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 1000000 1809 ns/op +--- BENCH: BenchmarkChildren +bench_traversal_test.go:117: Children=2 +bench_traversal_test.go:117: Children=2 +bench_traversal_test.go:117: Children=2 +bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 5814 ns/op +--- BENCH: BenchmarkChildrenFiltered +bench_traversal_test.go:133: ChildrenFiltered=2 +bench_traversal_test.go:133: ChildrenFiltered=2 +bench_traversal_test.go:133: ChildrenFiltered=2 +bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 44810 ns/op +--- BENCH: BenchmarkParent +bench_traversal_test.go:149: Parent=55 +bench_traversal_test.go:149: Parent=55 +bench_traversal_test.go:149: Parent=55 +bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 48795 ns/op +--- BENCH: BenchmarkParentFiltered +bench_traversal_test.go:165: ParentFiltered=4 +bench_traversal_test.go:165: ParentFiltered=4 +bench_traversal_test.go:165: ParentFiltered=4 +bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 89102 ns/op +--- BENCH: BenchmarkParents +bench_traversal_test.go:181: Parents=73 +bench_traversal_test.go:181: Parents=73 +bench_traversal_test.go:181: Parents=73 +bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 93953 ns/op +--- BENCH: BenchmarkParentsFiltered +bench_traversal_test.go:197: ParentsFiltered=18 +bench_traversal_test.go:197: ParentsFiltered=18 +bench_traversal_test.go:197: ParentsFiltered=18 +bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 10000 130783 ns/op +--- BENCH: BenchmarkParentsUntil +bench_traversal_test.go:213: ParentsUntil=52 +bench_traversal_test.go:213: ParentsUntil=52 +bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 231797 ns/op +--- BENCH: BenchmarkParentsUntilSelection +bench_traversal_test.go:230: ParentsUntilSelection=70 +bench_traversal_test.go:230: ParentsUntilSelection=70 +bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 233761 ns/op +--- BENCH: BenchmarkParentsUntilNodes +bench_traversal_test.go:248: ParentsUntilNodes=70 +bench_traversal_test.go:248: ParentsUntilNodes=70 +bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 50000 31360 ns/op +--- BENCH: BenchmarkParentsFilteredUntil +bench_traversal_test.go:264: ParentsFilteredUntil=2 +bench_traversal_test.go:264: ParentsFilteredUntil=2 +bench_traversal_test.go:264: ParentsFilteredUntil=2 +bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 50000 30272 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection +bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 50000 30327 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes +bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 20000 89862 ns/op +--- BENCH: BenchmarkSiblings +bench_traversal_test.go:315: Siblings=293 +bench_traversal_test.go:315: Siblings=293 +bench_traversal_test.go:315: Siblings=293 +bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 20000 97948 ns/op +--- BENCH: BenchmarkSiblingsFiltered +bench_traversal_test.go:331: SiblingsFiltered=46 +bench_traversal_test.go:331: SiblingsFiltered=46 +bench_traversal_test.go:331: SiblingsFiltered=46 +bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 50000 31975 ns/op +--- BENCH: BenchmarkNext +bench_traversal_test.go:347: Next=49 +bench_traversal_test.go:347: Next=49 +bench_traversal_test.go:347: Next=49 +bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 50000 34887 ns/op +--- BENCH: BenchmarkNextFiltered +bench_traversal_test.go:363: NextFiltered=6 +bench_traversal_test.go:363: NextFiltered=6 +bench_traversal_test.go:363: NextFiltered=6 +bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 50000 60734 ns/op +--- BENCH: BenchmarkNextAll +bench_traversal_test.go:379: NextAll=234 +bench_traversal_test.go:379: NextAll=234 +bench_traversal_test.go:379: NextAll=234 +bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 50000 67428 ns/op +--- BENCH: BenchmarkNextAllFiltered +bench_traversal_test.go:395: NextAllFiltered=33 +bench_traversal_test.go:395: NextAllFiltered=33 +bench_traversal_test.go:395: NextAllFiltered=33 +bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 50000 32399 ns/op +--- BENCH: BenchmarkPrev +bench_traversal_test.go:411: Prev=49 +bench_traversal_test.go:411: Prev=49 +bench_traversal_test.go:411: Prev=49 +bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 50000 34944 ns/op +--- BENCH: BenchmarkPrevFiltered +bench_traversal_test.go:429: PrevFiltered=7 +bench_traversal_test.go:429: PrevFiltered=7 +bench_traversal_test.go:429: PrevFiltered=7 +bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 100000 29360 ns/op +--- BENCH: BenchmarkPrevAll +bench_traversal_test.go:445: PrevAll=78 +bench_traversal_test.go:445: PrevAll=78 +bench_traversal_test.go:445: PrevAll=78 +bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 50000 32291 ns/op +--- BENCH: BenchmarkPrevAllFiltered +bench_traversal_test.go:461: PrevAllFiltered=6 +bench_traversal_test.go:461: PrevAllFiltered=6 +bench_traversal_test.go:461: PrevAllFiltered=6 +bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 10000 191890 ns/op +--- BENCH: BenchmarkNextUntil +bench_traversal_test.go:477: NextUntil=84 +bench_traversal_test.go:477: NextUntil=84 +bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 20000 92054 ns/op +--- BENCH: BenchmarkNextUntilSelection +bench_traversal_test.go:494: NextUntilSelection=42 +bench_traversal_test.go:494: NextUntilSelection=42 +bench_traversal_test.go:494: NextUntilSelection=42 +bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 50000 43401 ns/op +--- BENCH: BenchmarkNextUntilNodes +bench_traversal_test.go:512: NextUntilNodes=12 +bench_traversal_test.go:512: NextUntilNodes=12 +bench_traversal_test.go:512: NextUntilNodes=12 +bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 5000 433383 ns/op +--- BENCH: BenchmarkPrevUntil +bench_traversal_test.go:528: PrevUntil=238 +bench_traversal_test.go:528: PrevUntil=238 +bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 10000 116423 ns/op +--- BENCH: BenchmarkPrevUntilSelection +bench_traversal_test.go:545: PrevUntilSelection=49 +bench_traversal_test.go:545: PrevUntilSelection=49 +bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 50000 35338 ns/op +--- BENCH: BenchmarkPrevUntilNodes +bench_traversal_test.go:563: PrevUntilNodes=11 +bench_traversal_test.go:563: PrevUntilNodes=11 +bench_traversal_test.go:563: PrevUntilNodes=11 +bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 10000 104686 ns/op +--- BENCH: BenchmarkNextFilteredUntil +bench_traversal_test.go:579: NextFilteredUntil=22 +bench_traversal_test.go:579: NextFilteredUntil=22 +bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 20000 99485 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection +bench_traversal_test.go:596: NextFilteredUntilSelection=22 +bench_traversal_test.go:596: NextFilteredUntilSelection=22 +bench_traversal_test.go:596: NextFilteredUntilSelection=22 +bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 20000 99452 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes +bench_traversal_test.go:614: NextFilteredUntilNodes=22 +bench_traversal_test.go:614: NextFilteredUntilNodes=22 +bench_traversal_test.go:614: NextFilteredUntilNodes=22 +bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 10000 112640 ns/op +--- BENCH: BenchmarkPrevFilteredUntil +bench_traversal_test.go:630: PrevFilteredUntil=20 +bench_traversal_test.go:630: PrevFilteredUntil=20 +bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 10000 103702 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection +bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 10000 103277 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes +bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +BenchmarkClosest 500000 6530 ns/op +--- BENCH: BenchmarkClosest +bench_traversal_test.go:681: Closest=2 +bench_traversal_test.go:681: Closest=2 +bench_traversal_test.go:681: Closest=2 +bench_traversal_test.go:681: Closest=2 +BenchmarkClosestSelection 1000000 1135 ns/op +--- BENCH: BenchmarkClosestSelection +bench_traversal_test.go:698: ClosestSelection=2 +bench_traversal_test.go:698: ClosestSelection=2 +bench_traversal_test.go:698: ClosestSelection=2 +bench_traversal_test.go:698: ClosestSelection=2 +BenchmarkClosestNodes 1000000 1133 ns/op +--- BENCH: BenchmarkClosestNodes +bench_traversal_test.go:715: ClosestNodes=2 +bench_traversal_test.go:715: ClosestNodes=2 +bench_traversal_test.go:715: ClosestNodes=2 +bench_traversal_test.go:715: ClosestNodes=2 +ok github.com/PuerkitoBio/goquery 192.541s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.0-v0.2.1-go1.1rc1.svg b/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.0-v0.2.1-go1.1rc1.svg new file mode 100644 index 00000000..eaabc618 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.0-v0.2.1-go1.1rc1.svg @@ -0,0 +1,420 @@ + + + + + + + + ++2.12% + +BenchmarkFirst + + ++1.06% + +BenchmarkLast + + ++3.63% + +BenchmarkEq + + ++2.67% + +BenchmarkSlice + + +-24.26% + +BenchmarkGet + + +-41.71% + +BenchmarkIndex + + +-50.42% + +BenchmarkIndexSelector + + ++5.94% + +BenchmarkIndexOfNode + + ++8.26% + +BenchmarkIndexOfSelection + + +-32.94% + +BenchmarkMetalReviewExample + + +-56.84% + +BenchmarkAdd + + ++6.63% + +BenchmarkAddSelection + + ++9.42% + +BenchmarkAddNodes + + ++2.93% + +BenchmarkAndSelf + + +-13.75% + +BenchmarkFilter + + +-14.90% + +BenchmarkNot + + ++1.07% + +BenchmarkFilterFunction + + ++4.91% + +BenchmarkNotFunction + + ++1.03% + +BenchmarkFilterNodes + + ++4.22% + +BenchmarkNotNodes + + +-1.00% + +BenchmarkFilterSelection + + ++4.00% + +BenchmarkNotSelection + + +-32.12% + +BenchmarkHas + + ++1.59% + +BenchmarkHasNodes + + ++1.56% + +BenchmarkHasSelection + + ++15.54% + +BenchmarkEnd + + ++1.77% + +BenchmarkEach + + +-3.61% + +BenchmarkMap + + +-16.48% + +BenchmarkAttr + + ++1.33% + +BenchmarkText + + +-59.21% + +BenchmarkLength + + +-9.01% + +BenchmarkHtml + + +-16.73% + +BenchmarkIs + + +-25.08% + +BenchmarkIsPositional + + +-11.55% + +BenchmarkIsFunction + + ++1.28% + +BenchmarkIsSelection + + ++2.10% + +BenchmarkIsNodes + + +-15.99% + +BenchmarkHasClass + + +-2.65% + +BenchmarkContains + + +-48.54% + +BenchmarkFind + + +-42.71% + +BenchmarkFindWithinSelection + + ++28.77% + +BenchmarkFindSelection + + ++28.47% + +BenchmarkFindNodes + + +-62.84% + +BenchmarkContents + + +-58.06% + +BenchmarkContentsFiltered + + +-63.13% + +BenchmarkChildren + + +-38.24% + +BenchmarkChildrenFiltered + + +-3.66% + +BenchmarkParent + + +-6.06% + +BenchmarkParentFiltered + + ++3.64% + +BenchmarkParents + + ++1.44% + +BenchmarkParentsFiltered + + +-44.35% + +BenchmarkParentsUntil + + +-31.99% + +BenchmarkParentsUntilSelection + + +-32.62% + +BenchmarkParentsUntilNodes + + +-45.83% + +BenchmarkParentsFilteredUntil + + +-30.05% + +BenchmarkParentsFilteredUntilSelection + + +-29.97% + +BenchmarkParentsFilteredUntilNodes + + +-25.96% + +BenchmarkSiblings + + +-26.67% + +BenchmarkSiblingsFiltered + + +-66.40% + +BenchmarkNext + + +-64.46% + +BenchmarkNextFiltered + + +-26.50% + +BenchmarkNextAll + + +-26.27% + +BenchmarkNextAllFiltered + + +-66.84% + +BenchmarkPrev + + +-64.35% + +BenchmarkPrevFiltered + + +-40.05% + +BenchmarkPrevAll + + +-39.74% + +BenchmarkPrevAllFiltered + + +-51.73% + +BenchmarkNextUntil + + +-33.42% + +BenchmarkNextUntilSelection + + +-40.42% + +BenchmarkNextUntilNodes + + +-46.87% + +BenchmarkPrevUntil + + +-33.14% + +BenchmarkPrevUntilSelection + + +-41.19% + +BenchmarkPrevUntilNodes + + +-55.92% + +BenchmarkNextFilteredUntil + + +-32.49% + +BenchmarkNextFilteredUntilSelection + + +-33.00% + +BenchmarkNextFilteredUntilNodes + + +-58.54% + +BenchmarkPrevFilteredUntil + + +-33.80% + +BenchmarkPrevFilteredUntilSelection + + +-33.44% + +BenchmarkPrevFilteredUntilNodes + + +-24.82% + +BenchmarkClosest + + +-34.45% + +BenchmarkClosestSelection + + +-35.30% + +BenchmarkClosestNodes + + diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.1-go1.1rc1 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.1-go1.1rc1 new file mode 100644 index 00000000..1f2a426d --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.2.1-go1.1rc1 @@ -0,0 +1,470 @@ +PASS +BenchmarkFirst 20000000 96.3 ns/op +BenchmarkLast 20000000 95.7 ns/op +BenchmarkEq 20000000 97.1 ns/op +BenchmarkSlice 20000000 92.3 ns/op +BenchmarkGet 1000000000 2.06 ns/op +BenchmarkIndex 1000000 1069 ns/op +--- BENCH: BenchmarkIndex + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 100000 26750 ns/op +--- BENCH: BenchmarkIndexSelector + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 100000000 10.7 ns/op +--- BENCH: BenchmarkIndexOfNode + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 11.8 ns/op +--- BENCH: BenchmarkIndexOfSelection + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 10000 219078 ns/op +--- BENCH: BenchmarkMetalReviewExample + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 100000 22345 ns/op +--- BENCH: BenchmarkAdd + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 209 ns/op +--- BENCH: BenchmarkAddSelection + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 209 ns/op +--- BENCH: BenchmarkAddNodes + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2568 ns/op +--- BENCH: BenchmarkAndSelf + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 100000 26715 ns/op +--- BENCH: BenchmarkFilter + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 +BenchmarkNot 50000 30058 ns/op +--- BENCH: BenchmarkNot + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 66346 ns/op +--- BENCH: BenchmarkFilterFunction + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 72646 ns/op +--- BENCH: BenchmarkNotFunction + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 65493 ns/op +--- BENCH: BenchmarkFilterNodes + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 20000 79466 ns/op +--- BENCH: BenchmarkNotNodes + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 65494 ns/op +--- BENCH: BenchmarkFilterSelection + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 20000 79387 ns/op +--- BENCH: BenchmarkNotSelection + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 386571 ns/op +--- BENCH: BenchmarkHas + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 230664 ns/op +--- BENCH: BenchmarkHasNodes + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 230705 ns/op +--- BENCH: BenchmarkHasSelection + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 4.61 ns/op +--- BENCH: BenchmarkEnd + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 +BenchmarkEach 200000 9520 ns/op +--- BENCH: BenchmarkEach + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 +BenchmarkMap 100000 15960 ns/op +--- BENCH: BenchmarkMap + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 +BenchmarkAttr 50000000 30.4 ns/op +--- BENCH: BenchmarkAttr + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading +BenchmarkText 100000 18718 ns/op +BenchmarkLength 2000000000 0.31 ns/op +--- BENCH: BenchmarkLength + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 606 ns/op +BenchmarkIs 100000 29289 ns/op +--- BENCH: BenchmarkIs + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true +BenchmarkIsPositional 100000 23834 ns/op +--- BENCH: BenchmarkIsPositional + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2436 ns/op +--- BENCH: BenchmarkIsFunction + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 67106 ns/op +--- BENCH: BenchmarkIsSelection + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 66042 ns/op +--- BENCH: BenchmarkIsNodes + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 565347 ns/op +--- BENCH: BenchmarkHasClass + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 11.0 ns/op +--- BENCH: BenchmarkContains + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true +BenchmarkFind 100000 27677 ns/op +--- BENCH: BenchmarkFind + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 50000 72162 ns/op +--- BENCH: BenchmarkFindWithinSelection + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 460124 ns/op +--- BENCH: BenchmarkFindSelection + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 459390 ns/op +--- BENCH: BenchmarkFindNodes + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 500000 3395 ns/op +--- BENCH: BenchmarkContents + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 500000 5193 ns/op +--- BENCH: BenchmarkContentsFiltered + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 5000000 667 ns/op +--- BENCH: BenchmarkChildren + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 3591 ns/op +--- BENCH: BenchmarkChildrenFiltered + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 43168 ns/op +--- BENCH: BenchmarkParent + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 45836 ns/op +--- BENCH: BenchmarkParentFiltered + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 92348 ns/op +--- BENCH: BenchmarkParents + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 95306 ns/op +--- BENCH: BenchmarkParentsFiltered + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 50000 72782 ns/op +--- BENCH: BenchmarkParentsUntil + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 157639 ns/op +--- BENCH: BenchmarkParentsUntilSelection + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 157510 ns/op +--- BENCH: BenchmarkParentsUntilNodes + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 100000 16987 ns/op +--- BENCH: BenchmarkParentsFilteredUntil + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 100000 21174 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 100000 21238 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 50000 66536 ns/op +--- BENCH: BenchmarkSiblings + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 50000 71822 ns/op +--- BENCH: BenchmarkSiblingsFiltered + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 200000 10745 ns/op +--- BENCH: BenchmarkNext + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 200000 12399 ns/op +--- BENCH: BenchmarkNextFiltered + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 50000 44640 ns/op +--- BENCH: BenchmarkNextAll + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 50000 49713 ns/op +--- BENCH: BenchmarkNextAllFiltered + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 200000 10743 ns/op +--- BENCH: BenchmarkPrev + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 200000 12456 ns/op +--- BENCH: BenchmarkPrevFiltered + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 100000 17600 ns/op +--- BENCH: BenchmarkPrevAll + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 100000 19460 ns/op +--- BENCH: BenchmarkPrevAllFiltered + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 20000 92630 ns/op +--- BENCH: BenchmarkNextUntil + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 50000 61285 ns/op +--- BENCH: BenchmarkNextUntilSelection + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 100000 25859 ns/op +--- BENCH: BenchmarkNextUntilNodes + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 10000 230236 ns/op +--- BENCH: BenchmarkPrevUntil + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 20000 77837 ns/op +--- BENCH: BenchmarkPrevUntilSelection + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 100000 20784 ns/op +--- BENCH: BenchmarkPrevUntilNodes + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 50000 46147 ns/op +--- BENCH: BenchmarkNextFilteredUntil + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 50000 67164 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 50000 66628 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 50000 46697 ns/op +--- BENCH: BenchmarkPrevFilteredUntil + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 50000 68646 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 50000 68745 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +BenchmarkClosest 500000 4909 ns/op +--- BENCH: BenchmarkClosest + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 +BenchmarkClosestSelection 5000000 744 ns/op +--- BENCH: BenchmarkClosestSelection + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 +BenchmarkClosestNodes 5000000 733 ns/op +--- BENCH: BenchmarkClosestNodes + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 +ok github.com/PuerkitoBio/goquery 220.793s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.0 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.0 new file mode 100644 index 00000000..037c9a9d --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.0 @@ -0,0 +1,476 @@ +PASS +BenchmarkFirst 20000000 95.5 ns/op +BenchmarkLast 20000000 94.9 ns/op +BenchmarkEq 20000000 95.7 ns/op +BenchmarkSlice 20000000 91.7 ns/op +BenchmarkGet 1000000000 2.05 ns/op +BenchmarkIndex 1000000 1079 ns/op +--- BENCH: BenchmarkIndex + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 100000 26972 ns/op +--- BENCH: BenchmarkIndexSelector + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 100000000 10.8 ns/op +--- BENCH: BenchmarkIndexOfNode + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 11.7 ns/op +--- BENCH: BenchmarkIndexOfSelection + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 10000 213800 ns/op +--- BENCH: BenchmarkMetalReviewExample + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 100000 21811 ns/op +--- BENCH: BenchmarkAdd + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 205 ns/op +--- BENCH: BenchmarkAddSelection + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 202 ns/op +--- BENCH: BenchmarkAddNodes + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2467 ns/op +--- BENCH: BenchmarkAndSelf + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 100000 25643 ns/op +--- BENCH: BenchmarkFilter + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 +BenchmarkNot 100000 29566 ns/op +--- BENCH: BenchmarkNot + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 66894 ns/op +--- BENCH: BenchmarkFilterFunction + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 72183 ns/op +--- BENCH: BenchmarkNotFunction + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 65516 ns/op +--- BENCH: BenchmarkFilterNodes + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 20000 78880 ns/op +--- BENCH: BenchmarkNotNodes + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 65232 ns/op +--- BENCH: BenchmarkFilterSelection + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 20000 78813 ns/op +--- BENCH: BenchmarkNotSelection + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 388834 ns/op +--- BENCH: BenchmarkHas + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 228552 ns/op +--- BENCH: BenchmarkHasNodes + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 228365 ns/op +--- BENCH: BenchmarkHasSelection + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 4.62 ns/op +--- BENCH: BenchmarkEnd + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 +BenchmarkEach 200000 9548 ns/op +--- BENCH: BenchmarkEach + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 +BenchmarkMap 100000 15900 ns/op +--- BENCH: BenchmarkMap + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 +BenchmarkEachWithBreak 1000000 1650 ns/op +--- BENCH: BenchmarkEachWithBreak + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 +BenchmarkAttr 50000000 30.5 ns/op +--- BENCH: BenchmarkAttr + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading +BenchmarkText 100000 18873 ns/op +BenchmarkLength 2000000000 0.31 ns/op +--- BENCH: BenchmarkLength + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 616 ns/op +BenchmarkIs 100000 29499 ns/op +--- BENCH: BenchmarkIs + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true +BenchmarkIsPositional 100000 23733 ns/op +--- BENCH: BenchmarkIsPositional + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2404 ns/op +--- BENCH: BenchmarkIsFunction + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 65376 ns/op +--- BENCH: BenchmarkIsSelection + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 65322 ns/op +--- BENCH: BenchmarkIsNodes + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 558933 ns/op +--- BENCH: BenchmarkHasClass + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 11.1 ns/op +--- BENCH: BenchmarkContains + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true +BenchmarkFind 100000 27841 ns/op +--- BENCH: BenchmarkFind + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 50000 72096 ns/op +--- BENCH: BenchmarkFindWithinSelection + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 457349 ns/op +--- BENCH: BenchmarkFindSelection + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 459324 ns/op +--- BENCH: BenchmarkFindNodes + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 500000 3435 ns/op +--- BENCH: BenchmarkContents + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 500000 5241 ns/op +--- BENCH: BenchmarkContentsFiltered + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 5000000 667 ns/op +--- BENCH: BenchmarkChildren + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 3639 ns/op +--- BENCH: BenchmarkChildrenFiltered + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 44867 ns/op +--- BENCH: BenchmarkParent + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 46476 ns/op +--- BENCH: BenchmarkParentFiltered + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 92559 ns/op +--- BENCH: BenchmarkParents + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 96142 ns/op +--- BENCH: BenchmarkParentsFiltered + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 50000 73931 ns/op +--- BENCH: BenchmarkParentsUntil + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 159820 ns/op +--- BENCH: BenchmarkParentsUntilSelection + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 158811 ns/op +--- BENCH: BenchmarkParentsUntilNodes + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 100000 17203 ns/op +--- BENCH: BenchmarkParentsFilteredUntil + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 100000 21358 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 100000 21338 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 50000 66463 ns/op +--- BENCH: BenchmarkSiblings + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 50000 72503 ns/op +--- BENCH: BenchmarkSiblingsFiltered + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 200000 10881 ns/op +--- BENCH: BenchmarkNext + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 200000 12588 ns/op +--- BENCH: BenchmarkNextFiltered + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 50000 45075 ns/op +--- BENCH: BenchmarkNextAll + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 50000 50455 ns/op +--- BENCH: BenchmarkNextAllFiltered + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 200000 10933 ns/op +--- BENCH: BenchmarkPrev + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 200000 12579 ns/op +--- BENCH: BenchmarkPrevFiltered + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 100000 17751 ns/op +--- BENCH: BenchmarkPrevAll + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 100000 19702 ns/op +--- BENCH: BenchmarkPrevAllFiltered + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 20000 93586 ns/op +--- BENCH: BenchmarkNextUntil + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 50000 61155 ns/op +--- BENCH: BenchmarkNextUntilSelection + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 100000 25805 ns/op +--- BENCH: BenchmarkNextUntilNodes + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 10000 232225 ns/op +--- BENCH: BenchmarkPrevUntil + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 20000 78316 ns/op +--- BENCH: BenchmarkPrevUntilSelection + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 100000 20657 ns/op +--- BENCH: BenchmarkPrevUntilNodes + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 50000 46567 ns/op +--- BENCH: BenchmarkNextFilteredUntil + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 50000 67227 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 50000 66995 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 50000 47361 ns/op +--- BENCH: BenchmarkPrevFilteredUntil + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 50000 68802 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 50000 68928 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +BenchmarkClosest 500000 4922 ns/op +--- BENCH: BenchmarkClosest + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 +BenchmarkClosestSelection 5000000 738 ns/op +--- BENCH: BenchmarkClosestSelection + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 +BenchmarkClosestNodes 5000000 737 ns/op +--- BENCH: BenchmarkClosestNodes + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 +ok github.com/PuerkitoBio/goquery 224.003s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2 new file mode 100644 index 00000000..0a8704c8 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2 @@ -0,0 +1,478 @@ +PASS +BenchmarkFirst 20000000 88.4 ns/op +BenchmarkLast 20000000 88.2 ns/op +BenchmarkEq 20000000 87.4 ns/op +BenchmarkSlice 20000000 84.9 ns/op +BenchmarkGet 2000000000 1.99 ns/op +BenchmarkIndex 2000000 906 ns/op +--- BENCH: BenchmarkIndex + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 100000 22276 ns/op +--- BENCH: BenchmarkIndexSelector + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 200000000 9.72 ns/op +--- BENCH: BenchmarkIndexOfNode + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 10.4 ns/op +--- BENCH: BenchmarkIndexOfSelection + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 10000 199277 ns/op +--- BENCH: BenchmarkMetalReviewExample + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 100000 18277 ns/op +--- BENCH: BenchmarkAdd + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 200 ns/op +--- BENCH: BenchmarkAddSelection + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 189 ns/op +--- BENCH: BenchmarkAddNodes + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2569 ns/op +--- BENCH: BenchmarkAndSelf + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 100000 25195 ns/op +--- BENCH: BenchmarkFilter + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 +BenchmarkNot 100000 29003 ns/op +--- BENCH: BenchmarkNot + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 60690 ns/op +--- BENCH: BenchmarkFilterFunction + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 66008 ns/op +--- BENCH: BenchmarkNotFunction + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 59723 ns/op +--- BENCH: BenchmarkFilterNodes + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 50000 72698 ns/op +--- BENCH: BenchmarkNotNodes + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 59598 ns/op +--- BENCH: BenchmarkFilterSelection + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 50000 72526 ns/op +--- BENCH: BenchmarkNotSelection + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 367076 ns/op +--- BENCH: BenchmarkHas + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 219710 ns/op +--- BENCH: BenchmarkHasNodes + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 219105 ns/op +--- BENCH: BenchmarkHasSelection + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 4.58 ns/op +--- BENCH: BenchmarkEnd + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 +BenchmarkEach 200000 8615 ns/op +--- BENCH: BenchmarkEach + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 +BenchmarkMap 200000 14271 ns/op +--- BENCH: BenchmarkMap + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 +BenchmarkEachWithBreak 1000000 1497 ns/op +--- BENCH: BenchmarkEachWithBreak + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 +BenchmarkAttr 50000000 30.9 ns/op +--- BENCH: BenchmarkAttr + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading +BenchmarkText 200000 13729 ns/op +BenchmarkLength 2000000000 0.31 ns/op +--- BENCH: BenchmarkLength + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 537 ns/op +BenchmarkIs 100000 28904 ns/op +--- BENCH: BenchmarkIs + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true +BenchmarkIsPositional 100000 23556 ns/op +--- BENCH: BenchmarkIsPositional + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2195 ns/op +--- BENCH: BenchmarkIsFunction + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 60100 ns/op +--- BENCH: BenchmarkIsSelection + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 59962 ns/op +--- BENCH: BenchmarkIsNodes + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 388679 ns/op +--- BENCH: BenchmarkHasClass + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 11.0 ns/op +--- BENCH: BenchmarkContains + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true +BenchmarkFind 100000 22779 ns/op +--- BENCH: BenchmarkFind + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 50000 62033 ns/op +--- BENCH: BenchmarkFindWithinSelection + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 446918 ns/op +--- BENCH: BenchmarkFindSelection + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 441753 ns/op +--- BENCH: BenchmarkFindNodes + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 1000000 2807 ns/op +--- BENCH: BenchmarkContents + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 500000 4477 ns/op +--- BENCH: BenchmarkContentsFiltered + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 5000000 548 ns/op +--- BENCH: BenchmarkChildren + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 3304 ns/op +--- BENCH: BenchmarkChildrenFiltered + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 38248 ns/op +--- BENCH: BenchmarkParent + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 40677 ns/op +--- BENCH: BenchmarkParentFiltered + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 83043 ns/op +--- BENCH: BenchmarkParents + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 85391 ns/op +--- BENCH: BenchmarkParentsFiltered + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 50000 65118 ns/op +--- BENCH: BenchmarkParentsUntil + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 144028 ns/op +--- BENCH: BenchmarkParentsUntilSelection + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 146713 ns/op +--- BENCH: BenchmarkParentsUntilNodes + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 100000 15113 ns/op +--- BENCH: BenchmarkParentsFilteredUntil + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 100000 18881 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 100000 18926 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 50000 63221 ns/op +--- BENCH: BenchmarkSiblings + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 50000 69028 ns/op +--- BENCH: BenchmarkSiblingsFiltered + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 200000 9133 ns/op +--- BENCH: BenchmarkNext + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 200000 10601 ns/op +--- BENCH: BenchmarkNextFiltered + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 50000 43089 ns/op +--- BENCH: BenchmarkNextAll + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 50000 47867 ns/op +--- BENCH: BenchmarkNextAllFiltered + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 200000 9104 ns/op +--- BENCH: BenchmarkPrev + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 200000 10579 ns/op +--- BENCH: BenchmarkPrevFiltered + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 100000 15185 ns/op +--- BENCH: BenchmarkPrevAll + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 100000 17108 ns/op +--- BENCH: BenchmarkPrevAllFiltered + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 20000 81087 ns/op +--- BENCH: BenchmarkNextUntil + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 50000 55831 ns/op +--- BENCH: BenchmarkNextUntilSelection + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 100000 23130 ns/op +--- BENCH: BenchmarkNextUntilNodes + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 10000 204673 ns/op +--- BENCH: BenchmarkPrevUntil + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 50000 70965 ns/op +--- BENCH: BenchmarkPrevUntilSelection + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 100000 18591 ns/op +--- BENCH: BenchmarkPrevUntilNodes + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 50000 42004 ns/op +--- BENCH: BenchmarkNextFilteredUntil + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 50000 61953 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 50000 62124 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 50000 42861 ns/op +--- BENCH: BenchmarkPrevFilteredUntil + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 50000 62451 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 50000 62631 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +BenchmarkClosest 500000 4684 ns/op +--- BENCH: BenchmarkClosest + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 +BenchmarkClosestSelection 5000000 622 ns/op +--- BENCH: BenchmarkClosestSelection + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 +BenchmarkClosestNodes 5000000 617 ns/op +--- BENCH: BenchmarkClosestNodes + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 +ok github.com/PuerkitoBio/goquery 218.724s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2-take2 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2-take2 new file mode 100644 index 00000000..02a81187 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2-take2 @@ -0,0 +1,477 @@ +PASS +BenchmarkFirst 20000000 88.3 ns/op +BenchmarkLast 20000000 88.9 ns/op +BenchmarkEq 20000000 86.7 ns/op +BenchmarkSlice 20000000 84.1 ns/op +BenchmarkGet 2000000000 1.99 ns/op +BenchmarkIndex 2000000 907 ns/op +--- BENCH: BenchmarkIndex + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 200000 13052 ns/op +--- BENCH: BenchmarkIndexSelector + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 100000000 10.5 ns/op +--- BENCH: BenchmarkIndexOfNode + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 11.6 ns/op +--- BENCH: BenchmarkIndexOfSelection + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 10000 189556 ns/op +--- BENCH: BenchmarkMetalReviewExample + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 200000 13714 ns/op +--- BENCH: BenchmarkAdd + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 200 ns/op +--- BENCH: BenchmarkAddSelection + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 186 ns/op +--- BENCH: BenchmarkAddNodes + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2532 ns/op +--- BENCH: BenchmarkAndSelf + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 100000 25199 ns/op +--- BENCH: BenchmarkFilter + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 +BenchmarkNot 100000 29162 ns/op +--- BENCH: BenchmarkNot + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 60733 ns/op +--- BENCH: BenchmarkFilterFunction + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 66124 ns/op +--- BENCH: BenchmarkNotFunction + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 59489 ns/op +--- BENCH: BenchmarkFilterNodes + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 50000 73623 ns/op +--- BENCH: BenchmarkNotNodes + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 60053 ns/op +--- BENCH: BenchmarkFilterSelection + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 50000 73477 ns/op +--- BENCH: BenchmarkNotSelection + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 364859 ns/op +--- BENCH: BenchmarkHas + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 226980 ns/op +--- BENCH: BenchmarkHasNodes + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 220471 ns/op +--- BENCH: BenchmarkHasSelection + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 4.64 ns/op +--- BENCH: BenchmarkEnd + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 +BenchmarkEach 200000 8811 ns/op +--- BENCH: BenchmarkEach + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 +BenchmarkMap 100000 15365 ns/op +--- BENCH: BenchmarkMap + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 +BenchmarkEachWithBreak 1000000 1559 ns/op +--- BENCH: BenchmarkEachWithBreak + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 +BenchmarkAttr 50000000 31.7 ns/op +--- BENCH: BenchmarkAttr + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading +BenchmarkText 200000 13901 ns/op +BenchmarkLength 2000000000 0.31 ns/op +--- BENCH: BenchmarkLength + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 541 ns/op +BenchmarkIs 100000 29435 ns/op +--- BENCH: BenchmarkIs + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true +BenchmarkIsPositional 100000 22938 ns/op +--- BENCH: BenchmarkIsPositional + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2185 ns/op +--- BENCH: BenchmarkIsFunction + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 60607 ns/op +--- BENCH: BenchmarkIsSelection + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 61599 ns/op +--- BENCH: BenchmarkIsNodes + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 395436 ns/op +--- BENCH: BenchmarkHasClass + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 11.0 ns/op +--- BENCH: BenchmarkContains + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true +BenchmarkFind 200000 13788 ns/op +--- BENCH: BenchmarkFind + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 50000 54253 ns/op +--- BENCH: BenchmarkFindWithinSelection + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 438879 ns/op +--- BENCH: BenchmarkFindSelection + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 437225 ns/op +--- BENCH: BenchmarkFindNodes + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 1000000 2844 ns/op +--- BENCH: BenchmarkContents + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 500000 4528 ns/op +--- BENCH: BenchmarkContentsFiltered + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 5000000 552 ns/op +--- BENCH: BenchmarkChildren + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 3345 ns/op +--- BENCH: BenchmarkChildrenFiltered + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 39482 ns/op +--- BENCH: BenchmarkParent + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 42113 ns/op +--- BENCH: BenchmarkParentFiltered + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 84136 ns/op +--- BENCH: BenchmarkParents + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 86041 ns/op +--- BENCH: BenchmarkParentsFiltered + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 50000 65844 ns/op +--- BENCH: BenchmarkParentsUntil + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 146903 ns/op +--- BENCH: BenchmarkParentsUntilSelection + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 146638 ns/op +--- BENCH: BenchmarkParentsUntilNodes + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 100000 16413 ns/op +--- BENCH: BenchmarkParentsFilteredUntil + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 100000 20366 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 100000 18800 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 50000 63443 ns/op +--- BENCH: BenchmarkSiblings + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 50000 69250 ns/op +--- BENCH: BenchmarkSiblingsFiltered + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 200000 9193 ns/op +--- BENCH: BenchmarkNext + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 200000 10767 ns/op +--- BENCH: BenchmarkNextFiltered + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 50000 42829 ns/op +--- BENCH: BenchmarkNextAll + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 50000 48174 ns/op +--- BENCH: BenchmarkNextAllFiltered + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 200000 9114 ns/op +--- BENCH: BenchmarkPrev + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 200000 11114 ns/op +--- BENCH: BenchmarkPrevFiltered + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 100000 16387 ns/op +--- BENCH: BenchmarkPrevAll + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 100000 18322 ns/op +--- BENCH: BenchmarkPrevAllFiltered + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 20000 83828 ns/op +--- BENCH: BenchmarkNextUntil + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 50000 58822 ns/op +--- BENCH: BenchmarkNextUntilSelection + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 100000 23173 ns/op +--- BENCH: BenchmarkNextUntilNodes + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 10000 219407 ns/op +--- BENCH: BenchmarkPrevUntil + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 20000 76033 ns/op +--- BENCH: BenchmarkPrevUntilSelection + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 100000 19417 ns/op +--- BENCH: BenchmarkPrevUntilNodes + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 50000 44648 ns/op +--- BENCH: BenchmarkNextFilteredUntil + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 50000 62751 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 50000 62035 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 50000 43331 ns/op +--- BENCH: BenchmarkPrevFilteredUntil + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 50000 64767 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 50000 67808 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +BenchmarkClosest 500000 4870 ns/op +--- BENCH: BenchmarkClosest + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 +BenchmarkClosestSelection 5000000 656 ns/op +--- BENCH: BenchmarkClosestSelection + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 +BenchmarkClosestNodes 5000000 663 ns/op +--- BENCH: BenchmarkClosestNodes + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 +ok github.com/PuerkitoBio/goquery 218.007s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2rc1 b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2rc1 new file mode 100644 index 00000000..e1bada78 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v0.3.2-go1.2rc1 @@ -0,0 +1,477 @@ +PASS +BenchmarkFirst 20000000 91.0 ns/op +BenchmarkLast 20000000 90.5 ns/op +BenchmarkEq 20000000 90.2 ns/op +BenchmarkSlice 20000000 88.0 ns/op +BenchmarkGet 1000000000 2.04 ns/op +BenchmarkIndex 2000000 935 ns/op +--- BENCH: BenchmarkIndex + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 + bench_array_test.go:73: Index=3 +BenchmarkIndexSelector 100000 23613 ns/op +--- BENCH: BenchmarkIndexSelector + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 + bench_array_test.go:85: IndexSelector=4 +BenchmarkIndexOfNode 100000000 10.2 ns/op +--- BENCH: BenchmarkIndexOfNode + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 + bench_array_test.go:99: IndexOfNode=2 +BenchmarkIndexOfSelection 100000000 11.0 ns/op +--- BENCH: BenchmarkIndexOfSelection + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 + bench_array_test.go:111: IndexOfSelection=2 +BenchmarkMetalReviewExample 10000 213843 ns/op +--- BENCH: BenchmarkMetalReviewExample + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + + bench_example_test.go:41: MetalReviewExample=10 + bench_example_test.go:40: Review 0: Midnight - Complete and Total Hell (8.5). + Review 1: Over Your Threshold - Facticity (6.0). + Review 2: Nuclear Death Terror - Chaos Reigns (7.5). + Review 3: Evoken - Atra Mors (9.5). + ... [output truncated] +BenchmarkAdd 100000 18671 ns/op +--- BENCH: BenchmarkAdd + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 + bench_expand_test.go:20: Add=43 +BenchmarkAddSelection 10000000 204 ns/op +--- BENCH: BenchmarkAddSelection + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 + bench_expand_test.go:37: AddSelection=43 +BenchmarkAddNodes 10000000 195 ns/op +--- BENCH: BenchmarkAddNodes + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 + bench_expand_test.go:55: AddNodes=43 +BenchmarkAndSelf 1000000 2611 ns/op +--- BENCH: BenchmarkAndSelf + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 + bench_expand_test.go:71: AndSelf=44 +BenchmarkFilter 100000 27571 ns/op +--- BENCH: BenchmarkFilter + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 + bench_filter_test.go:20: Filter=13 +BenchmarkNot 50000 32006 ns/op +--- BENCH: BenchmarkNot + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 + bench_filter_test.go:36: Not=371 +BenchmarkFilterFunction 50000 61388 ns/op +--- BENCH: BenchmarkFilterFunction + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 + bench_filter_test.go:55: FilterFunction=112 +BenchmarkNotFunction 50000 66702 ns/op +--- BENCH: BenchmarkNotFunction + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 + bench_filter_test.go:74: NotFunction=261 +BenchmarkFilterNodes 50000 59699 ns/op +--- BENCH: BenchmarkFilterNodes + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 + bench_filter_test.go:92: FilterNodes=2 +BenchmarkNotNodes 50000 73248 ns/op +--- BENCH: BenchmarkNotNodes + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 + bench_filter_test.go:110: NotNodes=360 +BenchmarkFilterSelection 50000 59242 ns/op +--- BENCH: BenchmarkFilterSelection + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 + bench_filter_test.go:127: FilterSelection=2 +BenchmarkNotSelection 50000 73211 ns/op +--- BENCH: BenchmarkNotSelection + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 + bench_filter_test.go:144: NotSelection=360 +BenchmarkHas 5000 395087 ns/op +--- BENCH: BenchmarkHas + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 + bench_filter_test.go:160: Has=13 +BenchmarkHasNodes 10000 215849 ns/op +--- BENCH: BenchmarkHasNodes + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 + bench_filter_test.go:178: HasNodes=15 +BenchmarkHasSelection 10000 215612 ns/op +--- BENCH: BenchmarkHasSelection + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 + bench_filter_test.go:195: HasSelection=15 +BenchmarkEnd 500000000 4.59 ns/op +--- BENCH: BenchmarkEnd + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 + bench_filter_test.go:211: End=373 +BenchmarkEach 200000 8588 ns/op +--- BENCH: BenchmarkEach + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 + bench_iteration_test.go:22: Each=59 +BenchmarkMap 200000 14444 ns/op +--- BENCH: BenchmarkMap + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 + bench_iteration_test.go:41: Map=59 +BenchmarkEachWithBreak 1000000 1490 ns/op +--- BENCH: BenchmarkEachWithBreak + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 + bench_iteration_test.go:61: Each=10 +BenchmarkAttr 50000000 30.9 ns/op +--- BENCH: BenchmarkAttr + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading + bench_property_test.go:16: Attr=firstHeading +BenchmarkText 200000 14017 ns/op +BenchmarkLength 2000000000 0.31 ns/op +--- BENCH: BenchmarkLength + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 + bench_property_test.go:37: Length=14 +BenchmarkHtml 5000000 577 ns/op +BenchmarkIs 50000 31936 ns/op +--- BENCH: BenchmarkIs + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true + bench_query_test.go:16: Is=true +BenchmarkIsPositional 100000 23372 ns/op +--- BENCH: BenchmarkIsPositional + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true + bench_query_test.go:28: IsPositional=true +BenchmarkIsFunction 1000000 2170 ns/op +--- BENCH: BenchmarkIsFunction + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true + bench_query_test.go:43: IsFunction=true +BenchmarkIsSelection 50000 59814 ns/op +--- BENCH: BenchmarkIsSelection + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true + bench_query_test.go:56: IsSelection=true +BenchmarkIsNodes 50000 59629 ns/op +--- BENCH: BenchmarkIsNodes + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true + bench_query_test.go:70: IsNodes=true +BenchmarkHasClass 5000 384894 ns/op +--- BENCH: BenchmarkHasClass + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true + bench_query_test.go:82: HasClass=true +BenchmarkContains 100000000 11.4 ns/op +--- BENCH: BenchmarkContains + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true + bench_query_test.go:96: Contains=true +BenchmarkFind 100000 23545 ns/op +--- BENCH: BenchmarkFind + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 + bench_traversal_test.go:18: Find=41 +BenchmarkFindWithinSelection 50000 63775 ns/op +--- BENCH: BenchmarkFindWithinSelection + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 + bench_traversal_test.go:34: FindWithinSelection=39 +BenchmarkFindSelection 5000 441958 ns/op +--- BENCH: BenchmarkFindSelection + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 + bench_traversal_test.go:51: FindSelection=73 +BenchmarkFindNodes 5000 437717 ns/op +--- BENCH: BenchmarkFindNodes + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 + bench_traversal_test.go:69: FindNodes=73 +BenchmarkContents 1000000 2799 ns/op +--- BENCH: BenchmarkContents + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 + bench_traversal_test.go:85: Contents=16 +BenchmarkContentsFiltered 500000 4489 ns/op +--- BENCH: BenchmarkContentsFiltered + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 + bench_traversal_test.go:101: ContentsFiltered=1 +BenchmarkChildren 5000000 546 ns/op +--- BENCH: BenchmarkChildren + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 + bench_traversal_test.go:117: Children=2 +BenchmarkChildrenFiltered 500000 3472 ns/op +--- BENCH: BenchmarkChildrenFiltered + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 + bench_traversal_test.go:133: ChildrenFiltered=2 +BenchmarkParent 50000 39067 ns/op +--- BENCH: BenchmarkParent + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 + bench_traversal_test.go:149: Parent=55 +BenchmarkParentFiltered 50000 41450 ns/op +--- BENCH: BenchmarkParentFiltered + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 + bench_traversal_test.go:165: ParentFiltered=4 +BenchmarkParents 20000 84864 ns/op +--- BENCH: BenchmarkParents + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 + bench_traversal_test.go:181: Parents=73 +BenchmarkParentsFiltered 20000 87823 ns/op +--- BENCH: BenchmarkParentsFiltered + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 + bench_traversal_test.go:197: ParentsFiltered=18 +BenchmarkParentsUntil 50000 65986 ns/op +--- BENCH: BenchmarkParentsUntil + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 + bench_traversal_test.go:213: ParentsUntil=52 +BenchmarkParentsUntilSelection 10000 149798 ns/op +--- BENCH: BenchmarkParentsUntilSelection + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 + bench_traversal_test.go:230: ParentsUntilSelection=70 +BenchmarkParentsUntilNodes 10000 148144 ns/op +--- BENCH: BenchmarkParentsUntilNodes + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 + bench_traversal_test.go:248: ParentsUntilNodes=70 +BenchmarkParentsFilteredUntil 100000 15579 ns/op +--- BENCH: BenchmarkParentsFilteredUntil + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 + bench_traversal_test.go:264: ParentsFilteredUntil=2 +BenchmarkParentsFilteredUntilSelection 100000 19094 ns/op +--- BENCH: BenchmarkParentsFilteredUntilSelection + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 + bench_traversal_test.go:281: ParentsFilteredUntilSelection=2 +BenchmarkParentsFilteredUntilNodes 100000 19037 ns/op +--- BENCH: BenchmarkParentsFilteredUntilNodes + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 + bench_traversal_test.go:299: ParentsFilteredUntilNodes=2 +BenchmarkSiblings 50000 63891 ns/op +--- BENCH: BenchmarkSiblings + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 + bench_traversal_test.go:315: Siblings=293 +BenchmarkSiblingsFiltered 50000 70424 ns/op +--- BENCH: BenchmarkSiblingsFiltered + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 + bench_traversal_test.go:331: SiblingsFiltered=46 +BenchmarkNext 200000 9350 ns/op +--- BENCH: BenchmarkNext + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 + bench_traversal_test.go:347: Next=49 +BenchmarkNextFiltered 200000 10929 ns/op +--- BENCH: BenchmarkNextFiltered + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 + bench_traversal_test.go:363: NextFiltered=6 +BenchmarkNextAll 50000 43398 ns/op +--- BENCH: BenchmarkNextAll + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 + bench_traversal_test.go:379: NextAll=234 +BenchmarkNextAllFiltered 50000 48519 ns/op +--- BENCH: BenchmarkNextAllFiltered + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 + bench_traversal_test.go:395: NextAllFiltered=33 +BenchmarkPrev 200000 9181 ns/op +--- BENCH: BenchmarkPrev + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 + bench_traversal_test.go:411: Prev=49 +BenchmarkPrevFiltered 200000 10811 ns/op +--- BENCH: BenchmarkPrevFiltered + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 + bench_traversal_test.go:429: PrevFiltered=7 +BenchmarkPrevAll 100000 15589 ns/op +--- BENCH: BenchmarkPrevAll + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 + bench_traversal_test.go:445: PrevAll=78 +BenchmarkPrevAllFiltered 100000 17341 ns/op +--- BENCH: BenchmarkPrevAllFiltered + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 + bench_traversal_test.go:461: PrevAllFiltered=6 +BenchmarkNextUntil 20000 80663 ns/op +--- BENCH: BenchmarkNextUntil + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 + bench_traversal_test.go:477: NextUntil=84 +BenchmarkNextUntilSelection 50000 56496 ns/op +--- BENCH: BenchmarkNextUntilSelection + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 + bench_traversal_test.go:494: NextUntilSelection=42 +BenchmarkNextUntilNodes 100000 23729 ns/op +--- BENCH: BenchmarkNextUntilNodes + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 + bench_traversal_test.go:512: NextUntilNodes=12 +BenchmarkPrevUntil 10000 208267 ns/op +--- BENCH: BenchmarkPrevUntil + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 + bench_traversal_test.go:528: PrevUntil=238 +BenchmarkPrevUntilSelection 50000 72119 ns/op +--- BENCH: BenchmarkPrevUntilSelection + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 + bench_traversal_test.go:545: PrevUntilSelection=49 +BenchmarkPrevUntilNodes 100000 18549 ns/op +--- BENCH: BenchmarkPrevUntilNodes + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 + bench_traversal_test.go:563: PrevUntilNodes=11 +BenchmarkNextFilteredUntil 50000 42339 ns/op +--- BENCH: BenchmarkNextFilteredUntil + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 + bench_traversal_test.go:579: NextFilteredUntil=22 +BenchmarkNextFilteredUntilSelection 50000 61916 ns/op +--- BENCH: BenchmarkNextFilteredUntilSelection + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 + bench_traversal_test.go:596: NextFilteredUntilSelection=22 +BenchmarkNextFilteredUntilNodes 50000 62139 ns/op +--- BENCH: BenchmarkNextFilteredUntilNodes + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 + bench_traversal_test.go:614: NextFilteredUntilNodes=22 +BenchmarkPrevFilteredUntil 50000 43409 ns/op +--- BENCH: BenchmarkPrevFilteredUntil + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 + bench_traversal_test.go:630: PrevFilteredUntil=20 +BenchmarkPrevFilteredUntilSelection 50000 63768 ns/op +--- BENCH: BenchmarkPrevFilteredUntilSelection + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 + bench_traversal_test.go:647: PrevFilteredUntilSelection=20 +BenchmarkPrevFilteredUntilNodes 50000 63543 ns/op +--- BENCH: BenchmarkPrevFilteredUntilNodes + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 + bench_traversal_test.go:665: PrevFilteredUntilNodes=20 +BenchmarkClosest 500000 5110 ns/op +--- BENCH: BenchmarkClosest + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 + bench_traversal_test.go:681: Closest=2 +BenchmarkClosestSelection 5000000 629 ns/op +--- BENCH: BenchmarkClosestSelection + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 + bench_traversal_test.go:698: ClosestSelection=2 +BenchmarkClosestNodes 5000000 627 ns/op +--- BENCH: BenchmarkClosestNodes + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 + bench_traversal_test.go:715: ClosestNodes=2 +ok github.com/PuerkitoBio/goquery 215.785s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.0-go1.7 b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.0-go1.7 new file mode 100644 index 00000000..4bc3a1fa --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.0-go1.7 @@ -0,0 +1,85 @@ +BenchmarkFirst-4 30000000 50.7 ns/op 48 B/op 1 allocs/op +BenchmarkLast-4 30000000 50.9 ns/op 48 B/op 1 allocs/op +BenchmarkEq-4 30000000 55.7 ns/op 48 B/op 1 allocs/op +BenchmarkSlice-4 500000000 3.45 ns/op 0 B/op 0 allocs/op +BenchmarkGet-4 2000000000 1.68 ns/op 0 B/op 0 allocs/op +BenchmarkIndex-4 3000000 541 ns/op 248 B/op 10 allocs/op +BenchmarkIndexSelector-4 200000 10749 ns/op 2464 B/op 17 allocs/op +BenchmarkIndexOfNode-4 200000000 6.47 ns/op 0 B/op 0 allocs/op +BenchmarkIndexOfSelection-4 200000000 7.27 ns/op 0 B/op 0 allocs/op +BenchmarkMetalReviewExample-4 10000 138426 ns/op 12240 B/op 319 allocs/op +BenchmarkAdd-4 200000 10192 ns/op 208 B/op 9 allocs/op +BenchmarkAddSelection-4 10000000 158 ns/op 48 B/op 1 allocs/op +BenchmarkAddNodes-4 10000000 156 ns/op 48 B/op 1 allocs/op +BenchmarkAndSelf-4 1000000 1588 ns/op 1008 B/op 5 allocs/op +BenchmarkFilter-4 100000 20427 ns/op 360 B/op 8 allocs/op +BenchmarkNot-4 100000 23508 ns/op 136 B/op 5 allocs/op +BenchmarkFilterFunction-4 50000 34178 ns/op 22976 B/op 755 allocs/op +BenchmarkNotFunction-4 50000 38173 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterNodes-4 50000 34001 ns/op 20960 B/op 749 allocs/op +BenchmarkNotNodes-4 30000 40344 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterSelection-4 50000 33308 ns/op 20960 B/op 749 allocs/op +BenchmarkNotSelection-4 30000 40748 ns/op 29120 B/op 757 allocs/op +BenchmarkHas-4 5000 263346 ns/op 1816 B/op 48 allocs/op +BenchmarkHasNodes-4 10000 160840 ns/op 21184 B/op 752 allocs/op +BenchmarkHasSelection-4 10000 165410 ns/op 21184 B/op 752 allocs/op +BenchmarkEnd-4 2000000000 1.01 ns/op 0 B/op 0 allocs/op +BenchmarkEach-4 300000 4664 ns/op 3304 B/op 118 allocs/op +BenchmarkMap-4 200000 8286 ns/op 5572 B/op 184 allocs/op +BenchmarkEachWithBreak-4 2000000 806 ns/op 560 B/op 20 allocs/op +BenchmarkAttr-4 100000000 21.6 ns/op 0 B/op 0 allocs/op +BenchmarkText-4 200000 8909 ns/op 7536 B/op 110 allocs/op +BenchmarkLength-4 2000000000 0.34 ns/op 0 B/op 0 allocs/op +BenchmarkHtml-4 3000000 422 ns/op 120 B/op 2 allocs/op +BenchmarkIs-4 100000 22615 ns/op 88 B/op 4 allocs/op +BenchmarkIsPositional-4 50000 26655 ns/op 1112 B/op 10 allocs/op +BenchmarkIsFunction-4 1000000 1208 ns/op 784 B/op 28 allocs/op +BenchmarkIsSelection-4 50000 33497 ns/op 20960 B/op 749 allocs/op +BenchmarkIsNodes-4 50000 33572 ns/op 20960 B/op 749 allocs/op +BenchmarkHasClass-4 10000 232802 ns/op 14944 B/op 976 allocs/op +BenchmarkContains-4 200000000 7.33 ns/op 0 B/op 0 allocs/op +BenchmarkFind-4 200000 10715 ns/op 2464 B/op 17 allocs/op +BenchmarkFindWithinSelection-4 50000 35878 ns/op 2176 B/op 78 allocs/op +BenchmarkFindSelection-4 10000 194356 ns/op 2672 B/op 82 allocs/op +BenchmarkFindNodes-4 10000 195510 ns/op 2672 B/op 82 allocs/op +BenchmarkContents-4 1000000 2252 ns/op 864 B/op 34 allocs/op +BenchmarkContentsFiltered-4 500000 3015 ns/op 1016 B/op 39 allocs/op +BenchmarkChildren-4 5000000 364 ns/op 152 B/op 7 allocs/op +BenchmarkChildrenFiltered-4 1000000 2212 ns/op 352 B/op 15 allocs/op +BenchmarkParent-4 50000 24643 ns/op 4048 B/op 381 allocs/op +BenchmarkParentFiltered-4 50000 25967 ns/op 4248 B/op 388 allocs/op +BenchmarkParents-4 30000 50000 ns/op 27776 B/op 830 allocs/op +BenchmarkParentsFiltered-4 30000 53107 ns/op 28360 B/op 838 allocs/op +BenchmarkParentsUntil-4 100000 22423 ns/op 10352 B/op 353 allocs/op +BenchmarkParentsUntilSelection-4 20000 86925 ns/op 51144 B/op 1516 allocs/op +BenchmarkParentsUntilNodes-4 20000 87597 ns/op 51144 B/op 1516 allocs/op +BenchmarkParentsFilteredUntil-4 300000 5568 ns/op 2232 B/op 86 allocs/op +BenchmarkParentsFilteredUntilSelection-4 200000 10966 ns/op 5440 B/op 190 allocs/op +BenchmarkParentsFilteredUntilNodes-4 200000 10919 ns/op 5440 B/op 190 allocs/op +BenchmarkSiblings-4 30000 46018 ns/op 15400 B/op 204 allocs/op +BenchmarkSiblingsFiltered-4 30000 50566 ns/op 16496 B/op 213 allocs/op +BenchmarkNext-4 200000 7921 ns/op 3216 B/op 112 allocs/op +BenchmarkNextFiltered-4 200000 8804 ns/op 3416 B/op 118 allocs/op +BenchmarkNextAll-4 50000 31098 ns/op 9912 B/op 138 allocs/op +BenchmarkNextAllFiltered-4 50000 34677 ns/op 11008 B/op 147 allocs/op +BenchmarkPrev-4 200000 7920 ns/op 3216 B/op 112 allocs/op +BenchmarkPrevFiltered-4 200000 8913 ns/op 3416 B/op 118 allocs/op +BenchmarkPrevAll-4 200000 10845 ns/op 4376 B/op 113 allocs/op +BenchmarkPrevAllFiltered-4 100000 12030 ns/op 4576 B/op 119 allocs/op +BenchmarkNextUntil-4 100000 19193 ns/op 5760 B/op 260 allocs/op +BenchmarkNextUntilSelection-4 50000 34829 ns/op 18480 B/op 542 allocs/op +BenchmarkNextUntilNodes-4 100000 14459 ns/op 7944 B/op 248 allocs/op +BenchmarkPrevUntil-4 20000 66296 ns/op 12856 B/op 448 allocs/op +BenchmarkPrevUntilSelection-4 30000 45037 ns/op 23432 B/op 689 allocs/op +BenchmarkPrevUntilNodes-4 200000 11525 ns/op 6152 B/op 203 allocs/op +BenchmarkNextFilteredUntil-4 100000 12940 ns/op 4512 B/op 173 allocs/op +BenchmarkNextFilteredUntilSelection-4 50000 38924 ns/op 19160 B/op 567 allocs/op +BenchmarkNextFilteredUntilNodes-4 50000 38528 ns/op 19160 B/op 567 allocs/op +BenchmarkPrevFilteredUntil-4 100000 12980 ns/op 4664 B/op 175 allocs/op +BenchmarkPrevFilteredUntilSelection-4 50000 39671 ns/op 19936 B/op 587 allocs/op +BenchmarkPrevFilteredUntilNodes-4 50000 39484 ns/op 19936 B/op 587 allocs/op +BenchmarkClosest-4 500000 3310 ns/op 160 B/op 8 allocs/op +BenchmarkClosestSelection-4 5000000 361 ns/op 96 B/op 6 allocs/op +BenchmarkClosestNodes-4 5000000 359 ns/op 96 B/op 6 allocs/op +PASS +ok github.com/PuerkitoBio/goquery 163.718s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1a-go1.7 b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1a-go1.7 new file mode 100644 index 00000000..0831aac3 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1a-go1.7 @@ -0,0 +1,85 @@ +BenchmarkFirst-4 30000000 50.9 ns/op 48 B/op 1 allocs/op +BenchmarkLast-4 30000000 50.0 ns/op 48 B/op 1 allocs/op +BenchmarkEq-4 30000000 50.5 ns/op 48 B/op 1 allocs/op +BenchmarkSlice-4 500000000 3.53 ns/op 0 B/op 0 allocs/op +BenchmarkGet-4 2000000000 1.66 ns/op 0 B/op 0 allocs/op +BenchmarkIndex-4 2000000 832 ns/op 248 B/op 10 allocs/op +BenchmarkIndexSelector-4 100000 16073 ns/op 3839 B/op 21 allocs/op +BenchmarkIndexOfNode-4 200000000 6.38 ns/op 0 B/op 0 allocs/op +BenchmarkIndexOfSelection-4 200000000 7.14 ns/op 0 B/op 0 allocs/op +BenchmarkMetalReviewExample-4 10000 140737 ns/op 12418 B/op 320 allocs/op +BenchmarkAdd-4 100000 13162 ns/op 974 B/op 10 allocs/op +BenchmarkAddSelection-4 500000 3160 ns/op 814 B/op 2 allocs/op +BenchmarkAddNodes-4 500000 3159 ns/op 814 B/op 2 allocs/op +BenchmarkAndSelf-4 200000 7423 ns/op 2404 B/op 9 allocs/op +BenchmarkFilter-4 100000 19671 ns/op 360 B/op 8 allocs/op +BenchmarkNot-4 100000 22577 ns/op 136 B/op 5 allocs/op +BenchmarkFilterFunction-4 50000 33960 ns/op 22976 B/op 755 allocs/op +BenchmarkNotFunction-4 50000 37909 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterNodes-4 50000 34196 ns/op 20960 B/op 749 allocs/op +BenchmarkNotNodes-4 30000 40446 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterSelection-4 50000 33091 ns/op 20960 B/op 749 allocs/op +BenchmarkNotSelection-4 30000 40609 ns/op 29120 B/op 757 allocs/op +BenchmarkHas-4 5000 262936 ns/op 2371 B/op 50 allocs/op +BenchmarkHasNodes-4 10000 148631 ns/op 21184 B/op 752 allocs/op +BenchmarkHasSelection-4 10000 153117 ns/op 21184 B/op 752 allocs/op +BenchmarkEnd-4 2000000000 1.02 ns/op 0 B/op 0 allocs/op +BenchmarkEach-4 300000 4653 ns/op 3304 B/op 118 allocs/op +BenchmarkMap-4 200000 8257 ns/op 5572 B/op 184 allocs/op +BenchmarkEachWithBreak-4 2000000 806 ns/op 560 B/op 20 allocs/op +BenchmarkAttr-4 100000000 22.0 ns/op 0 B/op 0 allocs/op +BenchmarkText-4 200000 8913 ns/op 7536 B/op 110 allocs/op +BenchmarkLength-4 2000000000 0.35 ns/op 0 B/op 0 allocs/op +BenchmarkHtml-4 5000000 398 ns/op 120 B/op 2 allocs/op +BenchmarkIs-4 100000 22392 ns/op 88 B/op 4 allocs/op +BenchmarkIsPositional-4 50000 26259 ns/op 1112 B/op 10 allocs/op +BenchmarkIsFunction-4 1000000 1212 ns/op 784 B/op 28 allocs/op +BenchmarkIsSelection-4 50000 33222 ns/op 20960 B/op 749 allocs/op +BenchmarkIsNodes-4 50000 33408 ns/op 20960 B/op 749 allocs/op +BenchmarkHasClass-4 10000 233208 ns/op 14944 B/op 976 allocs/op +BenchmarkContains-4 200000000 7.57 ns/op 0 B/op 0 allocs/op +BenchmarkFind-4 100000 16121 ns/op 3839 B/op 21 allocs/op +BenchmarkFindWithinSelection-4 20000 68019 ns/op 11521 B/op 97 allocs/op +BenchmarkFindSelection-4 5000 387582 ns/op 59787 B/op 176 allocs/op +BenchmarkFindNodes-4 5000 389246 ns/op 59797 B/op 176 allocs/op +BenchmarkContents-4 200000 11475 ns/op 2878 B/op 42 allocs/op +BenchmarkContentsFiltered-4 200000 11222 ns/op 2498 B/op 46 allocs/op +BenchmarkChildren-4 2000000 650 ns/op 152 B/op 7 allocs/op +BenchmarkChildrenFiltered-4 500000 2568 ns/op 352 B/op 15 allocs/op +BenchmarkParent-4 2000 702513 ns/op 194478 B/op 828 allocs/op +BenchmarkParentFiltered-4 2000 690778 ns/op 194658 B/op 835 allocs/op +BenchmarkParents-4 10000 124855 ns/op 49869 B/op 868 allocs/op +BenchmarkParentsFiltered-4 10000 128535 ns/op 50456 B/op 876 allocs/op +BenchmarkParentsUntil-4 20000 72982 ns/op 23802 B/op 388 allocs/op +BenchmarkParentsUntilSelection-4 10000 156099 ns/op 72453 B/op 1549 allocs/op +BenchmarkParentsUntilNodes-4 10000 156610 ns/op 72455 B/op 1549 allocs/op +BenchmarkParentsFilteredUntil-4 100000 15549 ns/op 4068 B/op 94 allocs/op +BenchmarkParentsFilteredUntilSelection-4 100000 20564 ns/op 7276 B/op 198 allocs/op +BenchmarkParentsFilteredUntilNodes-4 100000 20635 ns/op 7276 B/op 198 allocs/op +BenchmarkSiblings-4 3000 565114 ns/op 205910 B/op 336 allocs/op +BenchmarkSiblingsFiltered-4 3000 580264 ns/op 206993 B/op 345 allocs/op +BenchmarkNext-4 20000 93177 ns/op 26810 B/op 169 allocs/op +BenchmarkNextFiltered-4 20000 94171 ns/op 27013 B/op 175 allocs/op +BenchmarkNextAll-4 5000 270320 ns/op 89289 B/op 237 allocs/op +BenchmarkNextAllFiltered-4 5000 275283 ns/op 90375 B/op 246 allocs/op +BenchmarkPrev-4 20000 92777 ns/op 26810 B/op 169 allocs/op +BenchmarkPrevFiltered-4 20000 95577 ns/op 27007 B/op 175 allocs/op +BenchmarkPrevAll-4 20000 86339 ns/op 27515 B/op 151 allocs/op +BenchmarkPrevAllFiltered-4 20000 87759 ns/op 27715 B/op 157 allocs/op +BenchmarkNextUntil-4 10000 163930 ns/op 48541 B/op 330 allocs/op +BenchmarkNextUntilSelection-4 30000 56382 ns/op 23880 B/op 556 allocs/op +BenchmarkNextUntilNodes-4 100000 18883 ns/op 8703 B/op 252 allocs/op +BenchmarkPrevUntil-4 3000 484668 ns/op 145402 B/op 611 allocs/op +BenchmarkPrevUntilSelection-4 20000 72125 ns/op 28865 B/op 705 allocs/op +BenchmarkPrevUntilNodes-4 100000 14722 ns/op 6510 B/op 205 allocs/op +BenchmarkNextFilteredUntil-4 50000 39006 ns/op 10990 B/op 192 allocs/op +BenchmarkNextFilteredUntilSelection-4 20000 66048 ns/op 25641 B/op 586 allocs/op +BenchmarkNextFilteredUntilNodes-4 20000 65314 ns/op 25640 B/op 586 allocs/op +BenchmarkPrevFilteredUntil-4 50000 33312 ns/op 9709 B/op 189 allocs/op +BenchmarkPrevFilteredUntilSelection-4 20000 64197 ns/op 24981 B/op 601 allocs/op +BenchmarkPrevFilteredUntilNodes-4 20000 64505 ns/op 24982 B/op 601 allocs/op +BenchmarkClosest-4 500000 4065 ns/op 160 B/op 8 allocs/op +BenchmarkClosestSelection-4 2000000 756 ns/op 96 B/op 6 allocs/op +BenchmarkClosestNodes-4 2000000 753 ns/op 96 B/op 6 allocs/op +PASS +ok github.com/PuerkitoBio/goquery 162.053s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1b-go1.7 b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1b-go1.7 new file mode 100644 index 00000000..e8a9e512 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1b-go1.7 @@ -0,0 +1,85 @@ +BenchmarkFirst-4 30000000 51.8 ns/op 48 B/op 1 allocs/op +BenchmarkLast-4 30000000 50.1 ns/op 48 B/op 1 allocs/op +BenchmarkEq-4 30000000 51.4 ns/op 48 B/op 1 allocs/op +BenchmarkSlice-4 500000000 3.52 ns/op 0 B/op 0 allocs/op +BenchmarkGet-4 2000000000 1.65 ns/op 0 B/op 0 allocs/op +BenchmarkIndex-4 2000000 787 ns/op 248 B/op 10 allocs/op +BenchmarkIndexSelector-4 100000 16952 ns/op 3839 B/op 21 allocs/op +BenchmarkIndexOfNode-4 200000000 6.42 ns/op 0 B/op 0 allocs/op +BenchmarkIndexOfSelection-4 200000000 7.12 ns/op 0 B/op 0 allocs/op +BenchmarkMetalReviewExample-4 10000 141994 ns/op 12418 B/op 320 allocs/op +BenchmarkAdd-4 200000 10367 ns/op 208 B/op 9 allocs/op +BenchmarkAddSelection-4 10000000 152 ns/op 48 B/op 1 allocs/op +BenchmarkAddNodes-4 10000000 147 ns/op 48 B/op 1 allocs/op +BenchmarkAndSelf-4 1000000 1647 ns/op 1008 B/op 5 allocs/op +BenchmarkFilter-4 100000 19522 ns/op 360 B/op 8 allocs/op +BenchmarkNot-4 100000 22546 ns/op 136 B/op 5 allocs/op +BenchmarkFilterFunction-4 50000 35087 ns/op 22976 B/op 755 allocs/op +BenchmarkNotFunction-4 50000 39123 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterNodes-4 50000 34890 ns/op 20960 B/op 749 allocs/op +BenchmarkNotNodes-4 30000 41145 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterSelection-4 50000 33735 ns/op 20960 B/op 749 allocs/op +BenchmarkNotSelection-4 30000 41334 ns/op 29120 B/op 757 allocs/op +BenchmarkHas-4 5000 264058 ns/op 2370 B/op 50 allocs/op +BenchmarkHasNodes-4 10000 151718 ns/op 21184 B/op 752 allocs/op +BenchmarkHasSelection-4 10000 156955 ns/op 21184 B/op 752 allocs/op +BenchmarkEnd-4 2000000000 1.01 ns/op 0 B/op 0 allocs/op +BenchmarkEach-4 300000 4660 ns/op 3304 B/op 118 allocs/op +BenchmarkMap-4 200000 8404 ns/op 5572 B/op 184 allocs/op +BenchmarkEachWithBreak-4 2000000 806 ns/op 560 B/op 20 allocs/op +BenchmarkAttr-4 100000000 21.6 ns/op 0 B/op 0 allocs/op +BenchmarkText-4 200000 8911 ns/op 7536 B/op 110 allocs/op +BenchmarkLength-4 2000000000 0.34 ns/op 0 B/op 0 allocs/op +BenchmarkHtml-4 3000000 405 ns/op 120 B/op 2 allocs/op +BenchmarkIs-4 100000 22228 ns/op 88 B/op 4 allocs/op +BenchmarkIsPositional-4 50000 26469 ns/op 1112 B/op 10 allocs/op +BenchmarkIsFunction-4 1000000 1240 ns/op 784 B/op 28 allocs/op +BenchmarkIsSelection-4 50000 33709 ns/op 20960 B/op 749 allocs/op +BenchmarkIsNodes-4 50000 33711 ns/op 20960 B/op 749 allocs/op +BenchmarkHasClass-4 10000 236005 ns/op 14944 B/op 976 allocs/op +BenchmarkContains-4 200000000 7.47 ns/op 0 B/op 0 allocs/op +BenchmarkFind-4 100000 16075 ns/op 3839 B/op 21 allocs/op +BenchmarkFindWithinSelection-4 30000 41418 ns/op 3539 B/op 82 allocs/op +BenchmarkFindSelection-4 10000 209490 ns/op 5616 B/op 89 allocs/op +BenchmarkFindNodes-4 10000 208206 ns/op 5614 B/op 89 allocs/op +BenchmarkContents-4 300000 4751 ns/op 1420 B/op 36 allocs/op +BenchmarkContentsFiltered-4 300000 5454 ns/op 1570 B/op 41 allocs/op +BenchmarkChildren-4 3000000 527 ns/op 152 B/op 7 allocs/op +BenchmarkChildrenFiltered-4 1000000 2484 ns/op 352 B/op 15 allocs/op +BenchmarkParent-4 50000 34724 ns/op 6940 B/op 387 allocs/op +BenchmarkParentFiltered-4 50000 35596 ns/op 7141 B/op 394 allocs/op +BenchmarkParents-4 20000 62094 ns/op 30720 B/op 837 allocs/op +BenchmarkParentsFiltered-4 20000 63223 ns/op 31304 B/op 845 allocs/op +BenchmarkParentsUntil-4 50000 30391 ns/op 11828 B/op 358 allocs/op +BenchmarkParentsUntilSelection-4 20000 99962 ns/op 54075 B/op 1523 allocs/op +BenchmarkParentsUntilNodes-4 20000 98763 ns/op 54073 B/op 1523 allocs/op +BenchmarkParentsFilteredUntil-4 200000 7982 ns/op 2787 B/op 88 allocs/op +BenchmarkParentsFilteredUntilSelection-4 100000 13618 ns/op 5995 B/op 192 allocs/op +BenchmarkParentsFilteredUntilNodes-4 100000 13639 ns/op 5994 B/op 192 allocs/op +BenchmarkSiblings-4 20000 75287 ns/op 28453 B/op 225 allocs/op +BenchmarkSiblingsFiltered-4 20000 80139 ns/op 29543 B/op 234 allocs/op +BenchmarkNext-4 100000 14270 ns/op 4659 B/op 117 allocs/op +BenchmarkNextFiltered-4 100000 15352 ns/op 4860 B/op 123 allocs/op +BenchmarkNextAll-4 20000 60811 ns/op 22771 B/op 157 allocs/op +BenchmarkNextAllFiltered-4 20000 69079 ns/op 23871 B/op 166 allocs/op +BenchmarkPrev-4 100000 14417 ns/op 4659 B/op 117 allocs/op +BenchmarkPrevFiltered-4 100000 15443 ns/op 4859 B/op 123 allocs/op +BenchmarkPrevAll-4 100000 22008 ns/op 7346 B/op 120 allocs/op +BenchmarkPrevAllFiltered-4 100000 23212 ns/op 7544 B/op 126 allocs/op +BenchmarkNextUntil-4 50000 30589 ns/op 8767 B/op 267 allocs/op +BenchmarkNextUntilSelection-4 30000 40875 ns/op 19862 B/op 546 allocs/op +BenchmarkNextUntilNodes-4 100000 15987 ns/op 8134 B/op 249 allocs/op +BenchmarkPrevUntil-4 20000 98799 ns/op 25727 B/op 467 allocs/op +BenchmarkPrevUntilSelection-4 30000 51874 ns/op 24875 B/op 694 allocs/op +BenchmarkPrevUntilNodes-4 100000 12901 ns/op 6334 B/op 204 allocs/op +BenchmarkNextFilteredUntil-4 100000 19869 ns/op 5909 B/op 177 allocs/op +BenchmarkNextFilteredUntilSelection-4 30000 45412 ns/op 20557 B/op 571 allocs/op +BenchmarkNextFilteredUntilNodes-4 30000 45363 ns/op 20557 B/op 571 allocs/op +BenchmarkPrevFilteredUntil-4 100000 19357 ns/op 6033 B/op 179 allocs/op +BenchmarkPrevFilteredUntilSelection-4 30000 46396 ns/op 21305 B/op 591 allocs/op +BenchmarkPrevFilteredUntilNodes-4 30000 46133 ns/op 21305 B/op 591 allocs/op +BenchmarkClosest-4 500000 3448 ns/op 160 B/op 8 allocs/op +BenchmarkClosestSelection-4 3000000 528 ns/op 96 B/op 6 allocs/op +BenchmarkClosestNodes-4 3000000 523 ns/op 96 B/op 6 allocs/op +PASS +ok github.com/PuerkitoBio/goquery 162.012s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1c-go1.7 b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1c-go1.7 new file mode 100644 index 00000000..e522e5a2 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench/v1.0.1c-go1.7 @@ -0,0 +1,86 @@ +BenchmarkFirst-4 30000000 51.7 ns/op 48 B/op 1 allocs/op +BenchmarkLast-4 30000000 51.9 ns/op 48 B/op 1 allocs/op +BenchmarkEq-4 30000000 50.0 ns/op 48 B/op 1 allocs/op +BenchmarkSlice-4 500000000 3.47 ns/op 0 B/op 0 allocs/op +BenchmarkGet-4 2000000000 1.68 ns/op 0 B/op 0 allocs/op +BenchmarkIndex-4 2000000 804 ns/op 248 B/op 10 allocs/op +BenchmarkIndexSelector-4 100000 16285 ns/op 3839 B/op 21 allocs/op +BenchmarkIndexOfNode-4 200000000 6.50 ns/op 0 B/op 0 allocs/op +BenchmarkIndexOfSelection-4 200000000 7.02 ns/op 0 B/op 0 allocs/op +BenchmarkMetalReviewExample-4 10000 143160 ns/op 12417 B/op 320 allocs/op +BenchmarkAdd-4 200000 10326 ns/op 208 B/op 9 allocs/op +BenchmarkAddSelection-4 10000000 155 ns/op 48 B/op 1 allocs/op +BenchmarkAddNodes-4 10000000 156 ns/op 48 B/op 1 allocs/op +BenchmarkAddNodesBig-4 20000 94439 ns/op 21847 B/op 37 allocs/op +BenchmarkAndSelf-4 1000000 1791 ns/op 1008 B/op 5 allocs/op +BenchmarkFilter-4 100000 19470 ns/op 360 B/op 8 allocs/op +BenchmarkNot-4 100000 22500 ns/op 136 B/op 5 allocs/op +BenchmarkFilterFunction-4 50000 34578 ns/op 22976 B/op 755 allocs/op +BenchmarkNotFunction-4 50000 38703 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterNodes-4 50000 34486 ns/op 20960 B/op 749 allocs/op +BenchmarkNotNodes-4 30000 41094 ns/op 29120 B/op 757 allocs/op +BenchmarkFilterSelection-4 50000 33623 ns/op 20960 B/op 749 allocs/op +BenchmarkNotSelection-4 30000 41483 ns/op 29120 B/op 757 allocs/op +BenchmarkHas-4 5000 266628 ns/op 2371 B/op 50 allocs/op +BenchmarkHasNodes-4 10000 152617 ns/op 21184 B/op 752 allocs/op +BenchmarkHasSelection-4 10000 156682 ns/op 21184 B/op 752 allocs/op +BenchmarkEnd-4 2000000000 1.00 ns/op 0 B/op 0 allocs/op +BenchmarkEach-4 300000 4712 ns/op 3304 B/op 118 allocs/op +BenchmarkMap-4 200000 8434 ns/op 5572 B/op 184 allocs/op +BenchmarkEachWithBreak-4 2000000 819 ns/op 560 B/op 20 allocs/op +BenchmarkAttr-4 100000000 21.7 ns/op 0 B/op 0 allocs/op +BenchmarkText-4 200000 9376 ns/op 7536 B/op 110 allocs/op +BenchmarkLength-4 2000000000 0.35 ns/op 0 B/op 0 allocs/op +BenchmarkHtml-4 5000000 401 ns/op 120 B/op 2 allocs/op +BenchmarkIs-4 100000 22214 ns/op 88 B/op 4 allocs/op +BenchmarkIsPositional-4 50000 26559 ns/op 1112 B/op 10 allocs/op +BenchmarkIsFunction-4 1000000 1228 ns/op 784 B/op 28 allocs/op +BenchmarkIsSelection-4 50000 33471 ns/op 20960 B/op 749 allocs/op +BenchmarkIsNodes-4 50000 34461 ns/op 20960 B/op 749 allocs/op +BenchmarkHasClass-4 10000 232429 ns/op 14944 B/op 976 allocs/op +BenchmarkContains-4 200000000 7.62 ns/op 0 B/op 0 allocs/op +BenchmarkFind-4 100000 16114 ns/op 3839 B/op 21 allocs/op +BenchmarkFindWithinSelection-4 30000 42520 ns/op 3540 B/op 82 allocs/op +BenchmarkFindSelection-4 10000 209801 ns/op 5615 B/op 89 allocs/op +BenchmarkFindNodes-4 10000 209082 ns/op 5614 B/op 89 allocs/op +BenchmarkContents-4 300000 4836 ns/op 1420 B/op 36 allocs/op +BenchmarkContentsFiltered-4 200000 5495 ns/op 1570 B/op 41 allocs/op +BenchmarkChildren-4 3000000 527 ns/op 152 B/op 7 allocs/op +BenchmarkChildrenFiltered-4 500000 2499 ns/op 352 B/op 15 allocs/op +BenchmarkParent-4 50000 34072 ns/op 6942 B/op 387 allocs/op +BenchmarkParentFiltered-4 50000 36077 ns/op 7141 B/op 394 allocs/op +BenchmarkParents-4 20000 64118 ns/op 30719 B/op 837 allocs/op +BenchmarkParentsFiltered-4 20000 63432 ns/op 31303 B/op 845 allocs/op +BenchmarkParentsUntil-4 50000 29589 ns/op 11829 B/op 358 allocs/op +BenchmarkParentsUntilSelection-4 10000 101033 ns/op 54076 B/op 1523 allocs/op +BenchmarkParentsUntilNodes-4 10000 100584 ns/op 54076 B/op 1523 allocs/op +BenchmarkParentsFilteredUntil-4 200000 8061 ns/op 2787 B/op 88 allocs/op +BenchmarkParentsFilteredUntilSelection-4 100000 13848 ns/op 5995 B/op 192 allocs/op +BenchmarkParentsFilteredUntilNodes-4 100000 13766 ns/op 5995 B/op 192 allocs/op +BenchmarkSiblings-4 20000 75135 ns/op 28453 B/op 225 allocs/op +BenchmarkSiblingsFiltered-4 20000 80532 ns/op 29544 B/op 234 allocs/op +BenchmarkNext-4 100000 14200 ns/op 4660 B/op 117 allocs/op +BenchmarkNextFiltered-4 100000 15284 ns/op 4859 B/op 123 allocs/op +BenchmarkNextAll-4 20000 60889 ns/op 22774 B/op 157 allocs/op +BenchmarkNextAllFiltered-4 20000 65125 ns/op 23869 B/op 166 allocs/op +BenchmarkPrev-4 100000 14448 ns/op 4659 B/op 117 allocs/op +BenchmarkPrevFiltered-4 100000 15444 ns/op 4859 B/op 123 allocs/op +BenchmarkPrevAll-4 100000 22019 ns/op 7344 B/op 120 allocs/op +BenchmarkPrevAllFiltered-4 100000 23307 ns/op 7545 B/op 126 allocs/op +BenchmarkNextUntil-4 50000 30287 ns/op 8766 B/op 267 allocs/op +BenchmarkNextUntilSelection-4 30000 41476 ns/op 19862 B/op 546 allocs/op +BenchmarkNextUntilNodes-4 100000 16106 ns/op 8133 B/op 249 allocs/op +BenchmarkPrevUntil-4 20000 98951 ns/op 25728 B/op 467 allocs/op +BenchmarkPrevUntilSelection-4 30000 52390 ns/op 24875 B/op 694 allocs/op +BenchmarkPrevUntilNodes-4 100000 12986 ns/op 6334 B/op 204 allocs/op +BenchmarkNextFilteredUntil-4 100000 19365 ns/op 5908 B/op 177 allocs/op +BenchmarkNextFilteredUntilSelection-4 30000 45334 ns/op 20555 B/op 571 allocs/op +BenchmarkNextFilteredUntilNodes-4 30000 45292 ns/op 20556 B/op 571 allocs/op +BenchmarkPrevFilteredUntil-4 100000 19412 ns/op 6032 B/op 179 allocs/op +BenchmarkPrevFilteredUntilSelection-4 30000 46286 ns/op 21304 B/op 591 allocs/op +BenchmarkPrevFilteredUntilNodes-4 30000 46554 ns/op 21305 B/op 591 allocs/op +BenchmarkClosest-4 500000 3480 ns/op 160 B/op 8 allocs/op +BenchmarkClosestSelection-4 2000000 722 ns/op 96 B/op 6 allocs/op +BenchmarkClosestNodes-4 2000000 719 ns/op 96 B/op 6 allocs/op +PASS +ok github.com/PuerkitoBio/goquery 160.565s diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_array_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_array_test.go new file mode 100644 index 00000000..29c7e205 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_array_test.go @@ -0,0 +1,120 @@ +package goquery + +import ( + "testing" +) + +func BenchmarkFirst(b *testing.B) { + b.StopTimer() + sel := DocB().Find("dd") + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.First() + } +} + +func BenchmarkLast(b *testing.B) { + b.StopTimer() + sel := DocB().Find("dd") + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Last() + } +} + +func BenchmarkEq(b *testing.B) { + b.StopTimer() + sel := DocB().Find("dd") + j := 0 + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Eq(j) + if j++; j >= sel.Length() { + j = 0 + } + } +} + +func BenchmarkSlice(b *testing.B) { + b.StopTimer() + sel := DocB().Find("dd") + j := 0 + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Slice(j, j+4) + if j++; j >= (sel.Length() - 4) { + j = 0 + } + } +} + +func BenchmarkGet(b *testing.B) { + b.StopTimer() + sel := DocB().Find("dd") + j := 0 + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Get(j) + if j++; j >= sel.Length() { + j = 0 + } + } +} + +func BenchmarkIndex(b *testing.B) { + var j int + + b.StopTimer() + sel := DocB().Find("#Main") + b.StartTimer() + for i := 0; i < b.N; i++ { + j = sel.Index() + } + if j != 3 { + b.Fatalf("want 3, got %d", j) + } +} + +func BenchmarkIndexSelector(b *testing.B) { + var j int + + b.StopTimer() + sel := DocB().Find("#manual-nav dl dd:nth-child(1)") + b.StartTimer() + for i := 0; i < b.N; i++ { + j = sel.IndexSelector("dd") + } + if j != 4 { + b.Fatalf("want 4, got %d", j) + } +} + +func BenchmarkIndexOfNode(b *testing.B) { + var j int + + b.StopTimer() + sel := DocB().Find("span a") + sel2 := DocB().Find("span a:nth-child(3)") + n := sel2.Get(0) + b.StartTimer() + for i := 0; i < b.N; i++ { + j = sel.IndexOfNode(n) + } + if j != 2 { + b.Fatalf("want 2, got %d", j) + } +} + +func BenchmarkIndexOfSelection(b *testing.B) { + var j int + b.StopTimer() + sel := DocB().Find("span a") + sel2 := DocB().Find("span a:nth-child(3)") + b.StartTimer() + for i := 0; i < b.N; i++ { + j = sel.IndexOfSelection(sel2) + } + if j != 2 { + b.Fatalf("want 2, got %d", j) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_example_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_example_test.go new file mode 100644 index 00000000..ba9ebe5c --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_example_test.go @@ -0,0 +1,40 @@ +package goquery + +import ( + "bytes" + "fmt" + "strconv" + "testing" +) + +func BenchmarkMetalReviewExample(b *testing.B) { + var n int + var buf bytes.Buffer + + b.StopTimer() + doc := loadDoc("metalreview.html") + b.StartTimer() + for i := 0; i < b.N; i++ { + doc.Find(".slider-row:nth-child(1) .slider-item").Each(func(i int, s *Selection) { + var band, title string + var score float64 + var e error + + n++ + // For each item found, get the band, title and score, and print it + band = s.Find("strong").Text() + title = s.Find("em").Text() + if score, e = strconv.ParseFloat(s.Find(".score").Text(), 64); e != nil { + // Not a valid float, ignore score + if n <= 4 { + buf.WriteString(fmt.Sprintf("Review %d: %s - %s.\n", i, band, title)) + } + } else { + // Print all, including score + if n <= 4 { + buf.WriteString(fmt.Sprintf("Review %d: %s - %s (%2.1f).\n", i, band, title, score)) + } + } + }) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_expand_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_expand_test.go new file mode 100644 index 00000000..61f19475 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_expand_test.go @@ -0,0 +1,104 @@ +package goquery + +import ( + "testing" +) + +func BenchmarkAdd(b *testing.B) { + var n int + + b.StopTimer() + sel := DocB().Find("dd") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Add("h2[title]").Length() + } else { + sel.Add("h2[title]") + } + } + if n != 43 { + b.Fatalf("want 43, got %d", n) + } +} + +func BenchmarkAddSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocB().Find("dd") + sel2 := DocB().Find("h2[title]") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.AddSelection(sel2).Length() + } else { + sel.AddSelection(sel2) + } + } + if n != 43 { + b.Fatalf("want 43, got %d", n) + } +} + +func BenchmarkAddNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocB().Find("dd") + sel2 := DocB().Find("h2[title]") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.AddNodes(nodes...).Length() + } else { + sel.AddNodes(nodes...) + } + } + if n != 43 { + b.Fatalf("want 43, got %d", n) + } +} + +func BenchmarkAddNodesBig(b *testing.B) { + var n int + + doc := DocW() + sel := doc.Find("li") + // make nodes > 1000 + nodes := sel.Nodes + nodes = append(nodes, nodes...) + nodes = append(nodes, nodes...) + sel = doc.Find("xyz") + b.ResetTimer() + + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.AddNodes(nodes...).Length() + } else { + sel.AddNodes(nodes...) + } + } + if n != 373 { + b.Fatalf("want 373, got %d", n) + } +} + +func BenchmarkAndSelf(b *testing.B) { + var n int + + b.StopTimer() + sel := DocB().Find("dd").Parent() + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.AndSelf().Length() + } else { + sel.AndSelf() + } + } + if n != 44 { + b.Fatalf("want 44, got %d", n) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_filter_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_filter_test.go new file mode 100644 index 00000000..61641c8f --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_filter_test.go @@ -0,0 +1,236 @@ +package goquery + +import ( + "testing" +) + +func BenchmarkFilter(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Filter(".toclevel-1").Length() + } else { + sel.Filter(".toclevel-1") + } + } + if n != 13 { + b.Fatalf("want 13, got %d", n) + } +} + +func BenchmarkNot(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Not(".toclevel-2").Length() + } else { + sel.Filter(".toclevel-2") + } + } + if n != 371 { + b.Fatalf("want 371, got %d", n) + } +} + +func BenchmarkFilterFunction(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + f := func(i int, s *Selection) bool { + return len(s.Get(0).Attr) > 0 + } + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.FilterFunction(f).Length() + } else { + sel.FilterFunction(f) + } + } + if n != 112 { + b.Fatalf("want 112, got %d", n) + } +} + +func BenchmarkNotFunction(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + f := func(i int, s *Selection) bool { + return len(s.Get(0).Attr) > 0 + } + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NotFunction(f).Length() + } else { + sel.NotFunction(f) + } + } + if n != 261 { + b.Fatalf("want 261, got %d", n) + } +} + +func BenchmarkFilterNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".toclevel-2") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.FilterNodes(nodes...).Length() + } else { + sel.FilterNodes(nodes...) + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkNotNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".toclevel-1") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NotNodes(nodes...).Length() + } else { + sel.NotNodes(nodes...) + } + } + if n != 360 { + b.Fatalf("want 360, got %d", n) + } +} + +func BenchmarkFilterSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".toclevel-2") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.FilterSelection(sel2).Length() + } else { + sel.FilterSelection(sel2) + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkNotSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".toclevel-1") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NotSelection(sel2).Length() + } else { + sel.NotSelection(sel2) + } + } + if n != 360 { + b.Fatalf("want 360, got %d", n) + } +} + +func BenchmarkHas(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Has(".editsection").Length() + } else { + sel.Has(".editsection") + } + } + if n != 13 { + b.Fatalf("want 13, got %d", n) + } +} + +func BenchmarkHasNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".tocnumber") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.HasNodes(nodes...).Length() + } else { + sel.HasNodes(nodes...) + } + } + if n != 15 { + b.Fatalf("want 15, got %d", n) + } +} + +func BenchmarkHasSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".tocnumber") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.HasSelection(sel2).Length() + } else { + sel.HasSelection(sel2) + } + } + if n != 15 { + b.Fatalf("want 15, got %d", n) + } +} + +func BenchmarkEnd(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li").Has(".tocnumber") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.End().Length() + } else { + sel.End() + } + } + if n != 373 { + b.Fatalf("wnat 373, got %d", n) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_iteration_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_iteration_test.go new file mode 100644 index 00000000..39445b0a --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_iteration_test.go @@ -0,0 +1,68 @@ +package goquery + +import ( + "testing" +) + +func BenchmarkEach(b *testing.B) { + var tmp, n int + + b.StopTimer() + sel := DocW().Find("td") + f := func(i int, s *Selection) { + tmp++ + } + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Each(f) + if n == 0 { + n = tmp + } + } + if n != 59 { + b.Fatalf("want 59, got %d", n) + } +} + +func BenchmarkMap(b *testing.B) { + var tmp, n int + + b.StopTimer() + sel := DocW().Find("td") + f := func(i int, s *Selection) string { + tmp++ + return string(tmp) + } + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Map(f) + if n == 0 { + n = tmp + } + } + if n != 59 { + b.Fatalf("want 59, got %d", n) + } +} + +func BenchmarkEachWithBreak(b *testing.B) { + var tmp, n int + + b.StopTimer() + sel := DocW().Find("td") + f := func(i int, s *Selection) bool { + tmp++ + return tmp < 10 + } + b.StartTimer() + for i := 0; i < b.N; i++ { + tmp = 0 + sel.EachWithBreak(f) + if n == 0 { + n = tmp + } + } + if n != 10 { + b.Fatalf("want 10, got %d", n) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_property_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_property_test.go new file mode 100644 index 00000000..8acf5bfc --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_property_test.go @@ -0,0 +1,51 @@ +package goquery + +import ( + "testing" +) + +func BenchmarkAttr(b *testing.B) { + var s string + + b.StopTimer() + sel := DocW().Find("h1") + b.StartTimer() + for i := 0; i < b.N; i++ { + s, _ = sel.Attr("id") + } + if s != "firstHeading" { + b.Fatalf("want firstHeading, got %q", s) + } +} + +func BenchmarkText(b *testing.B) { + b.StopTimer() + sel := DocW().Find("h2") + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Text() + } +} + +func BenchmarkLength(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + b.StartTimer() + for i := 0; i < b.N; i++ { + n = sel.Length() + } + if n != 14 { + b.Fatalf("want 14, got %d", n) + } +} + +func BenchmarkHtml(b *testing.B) { + b.StopTimer() + sel := DocW().Find("h2") + b.StartTimer() + for i := 0; i < b.N; i++ { + sel.Html() + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_query_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_query_test.go new file mode 100644 index 00000000..64fdbc4b --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_query_test.go @@ -0,0 +1,111 @@ +package goquery + +import ( + "testing" +) + +func BenchmarkIs(b *testing.B) { + var y bool + + b.StopTimer() + sel := DocW().Find("li") + b.StartTimer() + for i := 0; i < b.N; i++ { + y = sel.Is(".toclevel-2") + } + if !y { + b.Fatal("want true") + } +} + +func BenchmarkIsPositional(b *testing.B) { + var y bool + + b.StopTimer() + sel := DocW().Find("li") + b.StartTimer() + for i := 0; i < b.N; i++ { + y = sel.Is("li:nth-child(2)") + } + if !y { + b.Fatal("want true") + } +} + +func BenchmarkIsFunction(b *testing.B) { + var y bool + + b.StopTimer() + sel := DocW().Find(".toclevel-1") + f := func(i int, s *Selection) bool { + return i == 8 + } + b.StartTimer() + for i := 0; i < b.N; i++ { + y = sel.IsFunction(f) + } + if !y { + b.Fatal("want true") + } +} + +func BenchmarkIsSelection(b *testing.B) { + var y bool + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".toclevel-2") + b.StartTimer() + for i := 0; i < b.N; i++ { + y = sel.IsSelection(sel2) + } + if !y { + b.Fatal("want true") + } +} + +func BenchmarkIsNodes(b *testing.B) { + var y bool + + b.StopTimer() + sel := DocW().Find("li") + sel2 := DocW().Find(".toclevel-2") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + y = sel.IsNodes(nodes...) + } + if !y { + b.Fatal("want true") + } +} + +func BenchmarkHasClass(b *testing.B) { + var y bool + + b.StopTimer() + sel := DocW().Find("span") + b.StartTimer() + for i := 0; i < b.N; i++ { + y = sel.HasClass("official") + } + if !y { + b.Fatal("want true") + } +} + +func BenchmarkContains(b *testing.B) { + var y bool + + b.StopTimer() + sel := DocW().Find("span.url") + sel2 := DocW().Find("a[rel=\"nofollow\"]") + node := sel2.Nodes[0] + b.StartTimer() + for i := 0; i < b.N; i++ { + y = sel.Contains(node) + } + if !y { + b.Fatal("want true") + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_traversal_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_traversal_test.go new file mode 100644 index 00000000..de84bcdf --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/bench_traversal_test.go @@ -0,0 +1,802 @@ +package goquery + +import ( + "testing" +) + +func BenchmarkFind(b *testing.B) { + var n int + + for i := 0; i < b.N; i++ { + if n == 0 { + n = DocB().Find("dd").Length() + + } else { + DocB().Find("dd") + } + } + if n != 41 { + b.Fatalf("want 41, got %d", n) + } +} + +func BenchmarkFindWithinSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("ul") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Find("a[class]").Length() + } else { + sel.Find("a[class]") + } + } + if n != 39 { + b.Fatalf("want 39, got %d", n) + } +} + +func BenchmarkFindSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("ul") + sel2 := DocW().Find("span") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.FindSelection(sel2).Length() + } else { + sel.FindSelection(sel2) + } + } + if n != 73 { + b.Fatalf("want 73, got %d", n) + } +} + +func BenchmarkFindNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("ul") + sel2 := DocW().Find("span") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.FindNodes(nodes...).Length() + } else { + sel.FindNodes(nodes...) + } + } + if n != 73 { + b.Fatalf("want 73, got %d", n) + } +} + +func BenchmarkContents(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find(".toclevel-1") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Contents().Length() + } else { + sel.Contents() + } + } + if n != 16 { + b.Fatalf("want 16, got %d", n) + } +} + +func BenchmarkContentsFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find(".toclevel-1") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ContentsFiltered("a[href=\"#Examples\"]").Length() + } else { + sel.ContentsFiltered("a[href=\"#Examples\"]") + } + } + if n != 1 { + b.Fatalf("want 1, got %d", n) + } +} + +func BenchmarkChildren(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find(".toclevel-2") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Children().Length() + } else { + sel.Children() + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkChildrenFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h3") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ChildrenFiltered(".editsection").Length() + } else { + sel.ChildrenFiltered(".editsection") + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkParent(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Parent().Length() + } else { + sel.Parent() + } + } + if n != 55 { + b.Fatalf("want 55, got %d", n) + } +} + +func BenchmarkParentFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentFiltered("ul[id]").Length() + } else { + sel.ParentFiltered("ul[id]") + } + } + if n != 4 { + b.Fatalf("want 4, got %d", n) + } +} + +func BenchmarkParents(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("th a") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Parents().Length() + } else { + sel.Parents() + } + } + if n != 73 { + b.Fatalf("want 73, got %d", n) + } +} + +func BenchmarkParentsFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("th a") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentsFiltered("tr").Length() + } else { + sel.ParentsFiltered("tr") + } + } + if n != 18 { + b.Fatalf("want 18, got %d", n) + } +} + +func BenchmarkParentsUntil(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("th a") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentsUntil("table").Length() + } else { + sel.ParentsUntil("table") + } + } + if n != 52 { + b.Fatalf("want 52, got %d", n) + } +} + +func BenchmarkParentsUntilSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("th a") + sel2 := DocW().Find("#content") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentsUntilSelection(sel2).Length() + } else { + sel.ParentsUntilSelection(sel2) + } + } + if n != 70 { + b.Fatalf("want 70, got %d", n) + } +} + +func BenchmarkParentsUntilNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("th a") + sel2 := DocW().Find("#content") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentsUntilNodes(nodes...).Length() + } else { + sel.ParentsUntilNodes(nodes...) + } + } + if n != 70 { + b.Fatalf("want 70, got %d", n) + } +} + +func BenchmarkParentsFilteredUntil(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find(".toclevel-1 a") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentsFilteredUntil(":nth-child(1)", "ul").Length() + } else { + sel.ParentsFilteredUntil(":nth-child(1)", "ul") + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkParentsFilteredUntilSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find(".toclevel-1 a") + sel2 := DocW().Find("ul") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentsFilteredUntilSelection(":nth-child(1)", sel2).Length() + } else { + sel.ParentsFilteredUntilSelection(":nth-child(1)", sel2) + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkParentsFilteredUntilNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find(".toclevel-1 a") + sel2 := DocW().Find("ul") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ParentsFilteredUntilNodes(":nth-child(1)", nodes...).Length() + } else { + sel.ParentsFilteredUntilNodes(":nth-child(1)", nodes...) + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkSiblings(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("ul li:nth-child(1)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Siblings().Length() + } else { + sel.Siblings() + } + } + if n != 293 { + b.Fatalf("want 293, got %d", n) + } +} + +func BenchmarkSiblingsFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("ul li:nth-child(1)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.SiblingsFiltered("[class]").Length() + } else { + sel.SiblingsFiltered("[class]") + } + } + if n != 46 { + b.Fatalf("want 46, got %d", n) + } +} + +func BenchmarkNext(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:nth-child(1)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Next().Length() + } else { + sel.Next() + } + } + if n != 49 { + b.Fatalf("want 49, got %d", n) + } +} + +func BenchmarkNextFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:nth-child(1)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextFiltered("[class]").Length() + } else { + sel.NextFiltered("[class]") + } + } + if n != 6 { + b.Fatalf("want 6, got %d", n) + } +} + +func BenchmarkNextAll(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:nth-child(3)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextAll().Length() + } else { + sel.NextAll() + } + } + if n != 234 { + b.Fatalf("want 234, got %d", n) + } +} + +func BenchmarkNextAllFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:nth-child(3)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextAllFiltered("[class]").Length() + } else { + sel.NextAllFiltered("[class]") + } + } + if n != 33 { + b.Fatalf("want 33, got %d", n) + } +} + +func BenchmarkPrev(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:last-child") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Prev().Length() + } else { + sel.Prev() + } + } + if n != 49 { + b.Fatalf("want 49, got %d", n) + } +} + +func BenchmarkPrevFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:last-child") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevFiltered("[class]").Length() + } else { + sel.PrevFiltered("[class]") + } + } + // There is one more Prev li with a class, compared to Next li with a class + // (confirmed by looking at the HTML, this is ok) + if n != 7 { + b.Fatalf("want 7, got %d", n) + } +} + +func BenchmarkPrevAll(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:nth-child(4)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevAll().Length() + } else { + sel.PrevAll() + } + } + if n != 78 { + b.Fatalf("want 78, got %d", n) + } +} + +func BenchmarkPrevAllFiltered(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:nth-child(4)") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevAllFiltered("[class]").Length() + } else { + sel.PrevAllFiltered("[class]") + } + } + if n != 6 { + b.Fatalf("want 6, got %d", n) + } +} + +func BenchmarkNextUntil(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:first-child") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextUntil(":nth-child(4)").Length() + } else { + sel.NextUntil(":nth-child(4)") + } + } + if n != 84 { + b.Fatalf("want 84, got %d", n) + } +} + +func BenchmarkNextUntilSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("ul") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextUntilSelection(sel2).Length() + } else { + sel.NextUntilSelection(sel2) + } + } + if n != 42 { + b.Fatalf("want 42, got %d", n) + } +} + +func BenchmarkNextUntilNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("p") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextUntilNodes(nodes...).Length() + } else { + sel.NextUntilNodes(nodes...) + } + } + if n != 12 { + b.Fatalf("want 12, got %d", n) + } +} + +func BenchmarkPrevUntil(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("li:last-child") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevUntil(":nth-child(4)").Length() + } else { + sel.PrevUntil(":nth-child(4)") + } + } + if n != 238 { + b.Fatalf("want 238, got %d", n) + } +} + +func BenchmarkPrevUntilSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("ul") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevUntilSelection(sel2).Length() + } else { + sel.PrevUntilSelection(sel2) + } + } + if n != 49 { + b.Fatalf("want 49, got %d", n) + } +} + +func BenchmarkPrevUntilNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("p") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevUntilNodes(nodes...).Length() + } else { + sel.PrevUntilNodes(nodes...) + } + } + if n != 11 { + b.Fatalf("want 11, got %d", n) + } +} + +func BenchmarkNextFilteredUntil(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextFilteredUntil("p", "div").Length() + } else { + sel.NextFilteredUntil("p", "div") + } + } + if n != 22 { + b.Fatalf("want 22, got %d", n) + } +} + +func BenchmarkNextFilteredUntilSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("div") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextFilteredUntilSelection("p", sel2).Length() + } else { + sel.NextFilteredUntilSelection("p", sel2) + } + } + if n != 22 { + b.Fatalf("want 22, got %d", n) + } +} + +func BenchmarkNextFilteredUntilNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("div") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.NextFilteredUntilNodes("p", nodes...).Length() + } else { + sel.NextFilteredUntilNodes("p", nodes...) + } + } + if n != 22 { + b.Fatalf("want 22, got %d", n) + } +} + +func BenchmarkPrevFilteredUntil(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevFilteredUntil("p", "div").Length() + } else { + sel.PrevFilteredUntil("p", "div") + } + } + if n != 20 { + b.Fatalf("want 20, got %d", n) + } +} + +func BenchmarkPrevFilteredUntilSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("div") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevFilteredUntilSelection("p", sel2).Length() + } else { + sel.PrevFilteredUntilSelection("p", sel2) + } + } + if n != 20 { + b.Fatalf("want 20, got %d", n) + } +} + +func BenchmarkPrevFilteredUntilNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := DocW().Find("h2") + sel2 := DocW().Find("div") + nodes := sel2.Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.PrevFilteredUntilNodes("p", nodes...).Length() + } else { + sel.PrevFilteredUntilNodes("p", nodes...) + } + } + if n != 20 { + b.Fatalf("want 20, got %d", n) + } +} + +func BenchmarkClosest(b *testing.B) { + var n int + + b.StopTimer() + sel := Doc().Find(".container-fluid") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.Closest(".pvk-content").Length() + } else { + sel.Closest(".pvk-content") + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkClosestSelection(b *testing.B) { + var n int + + b.StopTimer() + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".pvk-content") + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ClosestSelection(sel2).Length() + } else { + sel.ClosestSelection(sel2) + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} + +func BenchmarkClosestNodes(b *testing.B) { + var n int + + b.StopTimer() + sel := Doc().Find(".container-fluid") + nodes := Doc().Find(".pvk-content").Nodes + b.StartTimer() + for i := 0; i < b.N; i++ { + if n == 0 { + n = sel.ClosestNodes(nodes...).Length() + } else { + sel.ClosestNodes(nodes...) + } + } + if n != 2 { + b.Fatalf("want 2, got %d", n) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/doc.go b/vendor/github.com/PuerkitoBio/goquery/doc.go new file mode 100644 index 00000000..71146a78 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/doc.go @@ -0,0 +1,123 @@ +// Copyright (c) 2012-2016, Martin Angers & Contributors +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without modification, +// are permitted provided that the following conditions are met: +// +// * Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation and/or +// other materials provided with the distribution. +// * Neither the name of the author nor the names of its contributors may be used to +// endorse or promote products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS +// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY +// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* +Package goquery implements features similar to jQuery, including the chainable +syntax, to manipulate and query an HTML document. + +It brings a syntax and a set of features similar to jQuery to the Go language. +It is based on Go's net/html package and the CSS Selector library cascadia. +Since the net/html parser returns nodes, and not a full-featured DOM +tree, jQuery's stateful manipulation functions (like height(), css(), detach()) +have been left off. + +Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is +the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. +See the repository's wiki for various options on how to do this. + +Syntax-wise, it is as close as possible to jQuery, with the same method names when +possible, and that warm and fuzzy chainable interface. jQuery being the +ultra-popular library that it is, writing a similar HTML-manipulating +library was better to follow its API than to start anew (in the same spirit as +Go's fmt package), even though some of its methods are less than intuitive (looking +at you, index()...). + +It is hosted on GitHub, along with additional documentation in the README.md +file: https://github.com/puerkitobio/goquery + +Please note that because of the net/html dependency, goquery requires Go1.1+. + +The various methods are split into files based on the category of behavior. +The three dots (...) indicate that various "overloads" are available. + +* array.go : array-like positional manipulation of the selection. + - Eq() + - First() + - Get() + - Index...() + - Last() + - Slice() + +* expand.go : methods that expand or augment the selection's set. + - Add...() + - AndSelf() + - Union(), which is an alias for AddSelection() + +* filter.go : filtering methods, that reduce the selection's set. + - End() + - Filter...() + - Has...() + - Intersection(), which is an alias of FilterSelection() + - Not...() + +* iteration.go : methods to loop over the selection's nodes. + - Each() + - EachWithBreak() + - Map() + +* manipulation.go : methods for modifying the document + - After...() + - Append...() + - Before...() + - Clone() + - Empty() + - Prepend...() + - Remove...() + - ReplaceWith...() + - Unwrap() + - Wrap...() + - WrapAll...() + - WrapInner...() + +* property.go : methods that inspect and get the node's properties values. + - Attr*(), RemoveAttr(), SetAttr() + - AddClass(), HasClass(), RemoveClass(), ToggleClass() + - Html() + - Length() + - Size(), which is an alias for Length() + - Text() + +* query.go : methods that query, or reflect, a node's identity. + - Contains() + - Is...() + +* traversal.go : methods to traverse the HTML document tree. + - Children...() + - Contents() + - Find...() + - Next...() + - Parent[s]...() + - Prev...() + - Siblings...() + +* type.go : definition of the types exposed by goquery. + - Document + - Selection + - Matcher + +* utilities.go : definition of helper functions (and not methods on a *Selection) +that are not part of jQuery, but are useful to goquery. + - NodeName + - OuterHtml +*/ +package goquery diff --git a/vendor/github.com/PuerkitoBio/goquery/doc/tips.md b/vendor/github.com/PuerkitoBio/goquery/doc/tips.md new file mode 100644 index 00000000..4e789528 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/doc/tips.md @@ -0,0 +1,68 @@ +# Tips and tricks + +## Handle Non-UTF8 html Pages + +The `go.net/html` package used by `goquery` requires that the html document is UTF-8 encoded. When you know the encoding of the html page is not UTF-8, you can use the `iconv` package to convert it to UTF-8 (there are various implementation of the `iconv` API, see [godoc.org][iconv] for other options): + +``` +$ go get -u github.com/djimenez/iconv-go +``` + +and then: + +``` +// Load the URL +res, err := http.Get(url) +if err != nil { + // handle error +} +defer res.Body.Close() + +// Convert the designated charset HTML to utf-8 encoded HTML. +// `charset` being one of the charsets known by the iconv package. +utfBody, err := iconv.NewReader(res.Body, charset, "utf-8") +if err != nil { + // handler error +} + +// use utfBody using goquery +doc, err := goquery.NewDocumentFromReader(utfBody) +if err != nil { + // handler error +} +// use doc... +``` + +Thanks to github user @YuheiNakasaka. + +Actually, the official go.text repository covers this use case too, see its [godoc page][text] for the details. + + +## Handle Javascript-based Pages + +`goquery` is great to handle normal html pages, but when most of the page is build dynamically using javascript, there's not much it can do. There are various options when faced with this problem: + +* Use a headless browser such as [webloop][]. +* Use a Go javascript parser package, such as [otto][]. + +You can find a code example using `otto` [in this gist][exotto]. Thanks to github user @cryptix. + +## For Loop + +If all you need is a normal `for` loop over all nodes in the current selection, where `Map/Each`-style iteration is not necessary, you can use the following: + +``` +sel := Doc().Find(".selector") +for i := range sel.Nodes { + single := sel.Eq(i) + // use `single` as a selection of 1 node +} +``` + +Thanks to github user @jmoiron. + +[webloop]: https://github.com/sourcegraph/webloop +[otto]: https://github.com/robertkrimen/otto +[exotto]: https://gist.github.com/cryptix/87127f76a94183747b53 +[iconv]: http://godoc.org/?q=iconv +[text]: https://godoc.org/golang.org/x/text/encoding diff --git a/vendor/github.com/PuerkitoBio/goquery/example_test.go b/vendor/github.com/PuerkitoBio/goquery/example_test.go new file mode 100644 index 00000000..17b2354d --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/example_test.go @@ -0,0 +1,30 @@ +package goquery_test + +import ( + "fmt" + "log" + + "github.com/PuerkitoBio/goquery" +) + +// This example scrapes the reviews shown on the home page of metalsucks.net. +func Example() { + // Load the HTML document + doc, err := goquery.NewDocument("http://metalsucks.net") + if err != nil { + log.Fatal(err) + } + + // Find the review items + doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) { + // For each item found, get the band and title + band := s.Find("a").Text() + title := s.Find("i").Text() + fmt.Printf("Review %d: %s - %s\n", i, band, title) + }) + // To see the output of the Example while running the test suite (go test), simply + // remove the leading "x" before Output on the next line. This will cause the + // example to fail (all the "real" tests should pass). + + // xOutput: voluntarily fail the Example output. +} diff --git a/vendor/github.com/PuerkitoBio/goquery/expand.go b/vendor/github.com/PuerkitoBio/goquery/expand.go new file mode 100644 index 00000000..f0c6c86d --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/expand.go @@ -0,0 +1,46 @@ +package goquery + +import "golang.org/x/net/html" + +// Add adds the selector string's matching nodes to those in the current +// selection and returns a new Selection object. +// The selector string is run in the context of the document of the current +// Selection object. +func (s *Selection) Add(selector string) *Selection { + return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...) +} + +// AddMatcher adds the matcher's matching nodes to those in the current +// selection and returns a new Selection object. +// The matcher is run in the context of the document of the current +// Selection object. +func (s *Selection) AddMatcher(m Matcher) *Selection { + return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...) +} + +// AddSelection adds the specified Selection object's nodes to those in the +// current selection and returns a new Selection object. +func (s *Selection) AddSelection(sel *Selection) *Selection { + if sel == nil { + return s.AddNodes() + } + return s.AddNodes(sel.Nodes...) +} + +// Union is an alias for AddSelection. +func (s *Selection) Union(sel *Selection) *Selection { + return s.AddSelection(sel) +} + +// AddNodes adds the specified nodes to those in the +// current selection and returns a new Selection object. +func (s *Selection) AddNodes(nodes ...*html.Node) *Selection { + return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil)) +} + +// AndSelf adds the previous set of elements on the stack to the current set. +// It returns a new Selection object containing the current Selection combined +// with the previous one. +func (s *Selection) AndSelf() *Selection { + return s.AddSelection(s.prevSel) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/expand_test.go b/vendor/github.com/PuerkitoBio/goquery/expand_test.go new file mode 100644 index 00000000..4557025d --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/expand_test.go @@ -0,0 +1,96 @@ +package goquery + +import ( + "testing" +) + +func TestAdd(t *testing.T) { + sel := Doc().Find("div.row-fluid").Add("a") + assertLength(t, sel.Nodes, 19) +} + +func TestAddInvalid(t *testing.T) { + sel1 := Doc().Find("div.row-fluid") + sel2 := sel1.Add("") + assertLength(t, sel1.Nodes, 9) + assertLength(t, sel2.Nodes, 9) + if sel1 == sel2 { + t.Errorf("selections should not be the same") + } +} + +func TestAddRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Add("a").End() + assertEqual(t, sel, sel2) +} + +func TestAddSelection(t *testing.T) { + sel := Doc().Find("div.row-fluid") + sel2 := Doc().Find("a") + sel = sel.AddSelection(sel2) + assertLength(t, sel.Nodes, 19) +} + +func TestAddSelectionNil(t *testing.T) { + sel := Doc().Find("div.row-fluid") + assertLength(t, sel.Nodes, 9) + + sel = sel.AddSelection(nil) + assertLength(t, sel.Nodes, 9) +} + +func TestAddSelectionRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Find("a") + sel2 = sel.AddSelection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestAddNodes(t *testing.T) { + sel := Doc().Find("div.pvk-gutter") + sel2 := Doc().Find(".pvk-content") + sel = sel.AddNodes(sel2.Nodes...) + assertLength(t, sel.Nodes, 9) +} + +func TestAddNodesNone(t *testing.T) { + sel := Doc().Find("div.pvk-gutter").AddNodes() + assertLength(t, sel.Nodes, 6) +} + +func TestAddNodesRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Find("a") + sel2 = sel.AddNodes(sel2.Nodes...).End() + assertEqual(t, sel, sel2) +} + +func TestAddNodesBig(t *testing.T) { + doc := DocW() + sel := doc.Find("li") + assertLength(t, sel.Nodes, 373) + sel2 := doc.Find("xyz") + assertLength(t, sel2.Nodes, 0) + + nodes := sel.Nodes + sel2 = sel2.AddNodes(nodes...) + assertLength(t, sel2.Nodes, 373) + nodes2 := append(nodes, nodes...) + sel2 = sel2.End().AddNodes(nodes2...) + assertLength(t, sel2.Nodes, 373) + nodes3 := append(nodes2, nodes...) + sel2 = sel2.End().AddNodes(nodes3...) + assertLength(t, sel2.Nodes, 373) +} + +func TestAndSelf(t *testing.T) { + sel := Doc().Find(".span12").Last().AndSelf() + assertLength(t, sel.Nodes, 2) +} + +func TestAndSelfRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Find("a").AndSelf().End().End() + assertEqual(t, sel, sel2) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/filter.go b/vendor/github.com/PuerkitoBio/goquery/filter.go new file mode 100644 index 00000000..9138ffb3 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/filter.go @@ -0,0 +1,163 @@ +package goquery + +import "golang.org/x/net/html" + +// Filter reduces the set of matched elements to those that match the selector string. +// It returns a new Selection object for this subset of matching elements. +func (s *Selection) Filter(selector string) *Selection { + return s.FilterMatcher(compileMatcher(selector)) +} + +// FilterMatcher reduces the set of matched elements to those that match +// the given matcher. It returns a new Selection object for this subset +// of matching elements. +func (s *Selection) FilterMatcher(m Matcher) *Selection { + return pushStack(s, winnow(s, m, true)) +} + +// Not removes elements from the Selection that match the selector string. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) Not(selector string) *Selection { + return s.NotMatcher(compileMatcher(selector)) +} + +// NotMatcher removes elements from the Selection that match the given matcher. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) NotMatcher(m Matcher) *Selection { + return pushStack(s, winnow(s, m, false)) +} + +// FilterFunction reduces the set of matched elements to those that pass the function's test. +// It returns a new Selection object for this subset of elements. +func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection { + return pushStack(s, winnowFunction(s, f, true)) +} + +// NotFunction removes elements from the Selection that pass the function's test. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection { + return pushStack(s, winnowFunction(s, f, false)) +} + +// FilterNodes reduces the set of matched elements to those that match the specified nodes. +// It returns a new Selection object for this subset of elements. +func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection { + return pushStack(s, winnowNodes(s, nodes, true)) +} + +// NotNodes removes elements from the Selection that match the specified nodes. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) NotNodes(nodes ...*html.Node) *Selection { + return pushStack(s, winnowNodes(s, nodes, false)) +} + +// FilterSelection reduces the set of matched elements to those that match a +// node in the specified Selection object. +// It returns a new Selection object for this subset of elements. +func (s *Selection) FilterSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, winnowNodes(s, nil, true)) + } + return pushStack(s, winnowNodes(s, sel.Nodes, true)) +} + +// NotSelection removes elements from the Selection that match a node in the specified +// Selection object. It returns a new Selection object with the matching elements removed. +func (s *Selection) NotSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, winnowNodes(s, nil, false)) + } + return pushStack(s, winnowNodes(s, sel.Nodes, false)) +} + +// Intersection is an alias for FilterSelection. +func (s *Selection) Intersection(sel *Selection) *Selection { + return s.FilterSelection(sel) +} + +// Has reduces the set of matched elements to those that have a descendant +// that matches the selector. +// It returns a new Selection object with the matching elements. +func (s *Selection) Has(selector string) *Selection { + return s.HasSelection(s.document.Find(selector)) +} + +// HasMatcher reduces the set of matched elements to those that have a descendant +// that matches the matcher. +// It returns a new Selection object with the matching elements. +func (s *Selection) HasMatcher(m Matcher) *Selection { + return s.HasSelection(s.document.FindMatcher(m)) +} + +// HasNodes reduces the set of matched elements to those that have a +// descendant that matches one of the nodes. +// It returns a new Selection object with the matching elements. +func (s *Selection) HasNodes(nodes ...*html.Node) *Selection { + return s.FilterFunction(func(_ int, sel *Selection) bool { + // Add all nodes that contain one of the specified nodes + for _, n := range nodes { + if sel.Contains(n) { + return true + } + } + return false + }) +} + +// HasSelection reduces the set of matched elements to those that have a +// descendant that matches one of the nodes of the specified Selection object. +// It returns a new Selection object with the matching elements. +func (s *Selection) HasSelection(sel *Selection) *Selection { + if sel == nil { + return s.HasNodes() + } + return s.HasNodes(sel.Nodes...) +} + +// End ends the most recent filtering operation in the current chain and +// returns the set of matched elements to its previous state. +func (s *Selection) End() *Selection { + if s.prevSel != nil { + return s.prevSel + } + return newEmptySelection(s.document) +} + +// Filter based on the matcher, and the indicator to keep (Filter) or +// to get rid of (Not) the matching elements. +func winnow(sel *Selection, m Matcher, keep bool) []*html.Node { + // Optimize if keep is requested + if keep { + return m.Filter(sel.Nodes) + } + // Use grep + return grep(sel, func(i int, s *Selection) bool { + return !m.Match(s.Get(0)) + }) +} + +// Filter based on an array of nodes, and the indicator to keep (Filter) or +// to get rid of (Not) the matching elements. +func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node { + if len(nodes)+len(sel.Nodes) < minNodesForSet { + return grep(sel, func(i int, s *Selection) bool { + return isInSlice(nodes, s.Get(0)) == keep + }) + } + + set := make(map[*html.Node]bool) + for _, n := range nodes { + set[n] = true + } + return grep(sel, func(i int, s *Selection) bool { + return set[s.Get(0)] == keep + }) +} + +// Filter based on a function test, and the indicator to keep (Filter) or +// to get rid of (Not) the matching elements. +func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node { + return grep(sel, func(i int, s *Selection) bool { + return f(i, s) == keep + }) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/filter_test.go b/vendor/github.com/PuerkitoBio/goquery/filter_test.go new file mode 100644 index 00000000..f663c08f --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/filter_test.go @@ -0,0 +1,206 @@ +package goquery + +import ( + "testing" +) + +func TestFilter(t *testing.T) { + sel := Doc().Find(".span12").Filter(".alert") + assertLength(t, sel.Nodes, 1) +} + +func TestFilterNone(t *testing.T) { + sel := Doc().Find(".span12").Filter(".zzalert") + assertLength(t, sel.Nodes, 0) +} + +func TestFilterInvalid(t *testing.T) { + sel := Doc().Find(".span12").Filter("") + assertLength(t, sel.Nodes, 0) +} + +func TestFilterRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Filter(".alert").End() + assertEqual(t, sel, sel2) +} + +func TestFilterFunction(t *testing.T) { + sel := Doc().Find(".pvk-content").FilterFunction(func(i int, s *Selection) bool { + return i > 0 + }) + assertLength(t, sel.Nodes, 2) +} + +func TestFilterFunctionRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.FilterFunction(func(i int, s *Selection) bool { + return i > 0 + }).End() + assertEqual(t, sel, sel2) +} + +func TestFilterNode(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.FilterNodes(sel.Nodes[2]) + assertLength(t, sel2.Nodes, 1) +} + +func TestFilterNodeRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.FilterNodes(sel.Nodes[2]).End() + assertEqual(t, sel, sel2) +} + +func TestFilterSelection(t *testing.T) { + sel := Doc().Find(".link") + sel2 := Doc().Find("a[ng-click]") + sel3 := sel.FilterSelection(sel2) + assertLength(t, sel3.Nodes, 1) +} + +func TestFilterSelectionRollback(t *testing.T) { + sel := Doc().Find(".link") + sel2 := Doc().Find("a[ng-click]") + sel2 = sel.FilterSelection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestFilterSelectionNil(t *testing.T) { + var sel2 *Selection + + sel := Doc().Find(".link") + sel3 := sel.FilterSelection(sel2) + assertLength(t, sel3.Nodes, 0) +} + +func TestNot(t *testing.T) { + sel := Doc().Find(".span12").Not(".alert") + assertLength(t, sel.Nodes, 1) +} + +func TestNotInvalid(t *testing.T) { + sel := Doc().Find(".span12").Not("") + assertLength(t, sel.Nodes, 2) +} + +func TestNotRollback(t *testing.T) { + sel := Doc().Find(".span12") + sel2 := sel.Not(".alert").End() + assertEqual(t, sel, sel2) +} + +func TestNotNone(t *testing.T) { + sel := Doc().Find(".span12").Not(".zzalert") + assertLength(t, sel.Nodes, 2) +} + +func TestNotFunction(t *testing.T) { + sel := Doc().Find(".pvk-content").NotFunction(func(i int, s *Selection) bool { + return i > 0 + }) + assertLength(t, sel.Nodes, 1) +} + +func TestNotFunctionRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.NotFunction(func(i int, s *Selection) bool { + return i > 0 + }).End() + assertEqual(t, sel, sel2) +} + +func TestNotNode(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.NotNodes(sel.Nodes[2]) + assertLength(t, sel2.Nodes, 2) +} + +func TestNotNodeRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.NotNodes(sel.Nodes[2]).End() + assertEqual(t, sel, sel2) +} + +func TestNotSelection(t *testing.T) { + sel := Doc().Find(".link") + sel2 := Doc().Find("a[ng-click]") + sel3 := sel.NotSelection(sel2) + assertLength(t, sel3.Nodes, 6) +} + +func TestNotSelectionRollback(t *testing.T) { + sel := Doc().Find(".link") + sel2 := Doc().Find("a[ng-click]") + sel2 = sel.NotSelection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestIntersection(t *testing.T) { + sel := Doc().Find(".pvk-gutter") + sel2 := Doc().Find("div").Intersection(sel) + assertLength(t, sel2.Nodes, 6) +} + +func TestIntersectionRollback(t *testing.T) { + sel := Doc().Find(".pvk-gutter") + sel2 := Doc().Find("div") + sel2 = sel.Intersection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestHas(t *testing.T) { + sel := Doc().Find(".container-fluid").Has(".center-content") + assertLength(t, sel.Nodes, 2) + // Has() returns the high-level .container-fluid div, and the one that is the immediate parent of center-content +} + +func TestHasInvalid(t *testing.T) { + sel := Doc().Find(".container-fluid").Has("") + assertLength(t, sel.Nodes, 0) +} + +func TestHasRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.Has(".center-content").End() + assertEqual(t, sel, sel2) +} + +func TestHasNodes(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".center-content") + sel = sel.HasNodes(sel2.Nodes...) + assertLength(t, sel.Nodes, 2) + // Has() returns the high-level .container-fluid div, and the one that is the immediate parent of center-content +} + +func TestHasNodesRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".center-content") + sel2 = sel.HasNodes(sel2.Nodes...).End() + assertEqual(t, sel, sel2) +} + +func TestHasSelection(t *testing.T) { + sel := Doc().Find("p") + sel2 := Doc().Find("small") + sel = sel.HasSelection(sel2) + assertLength(t, sel.Nodes, 1) +} + +func TestHasSelectionRollback(t *testing.T) { + sel := Doc().Find("p") + sel2 := Doc().Find("small") + sel2 = sel.HasSelection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestEnd(t *testing.T) { + sel := Doc().Find("p").Has("small").End() + assertLength(t, sel.Nodes, 4) +} + +func TestEndToTop(t *testing.T) { + sel := Doc().Find("p").Has("small").End().End().End() + assertLength(t, sel.Nodes, 0) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/iteration.go b/vendor/github.com/PuerkitoBio/goquery/iteration.go new file mode 100644 index 00000000..e246f2e0 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/iteration.go @@ -0,0 +1,39 @@ +package goquery + +// Each iterates over a Selection object, executing a function for each +// matched element. It returns the current Selection object. The function +// f is called for each element in the selection with the index of the +// element in that selection starting at 0, and a *Selection that contains +// only that element. +func (s *Selection) Each(f func(int, *Selection)) *Selection { + for i, n := range s.Nodes { + f(i, newSingleSelection(n, s.document)) + } + return s +} + +// EachWithBreak iterates over a Selection object, executing a function for each +// matched element. It is identical to Each except that it is possible to break +// out of the loop by returning false in the callback function. It returns the +// current Selection object. +func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection { + for i, n := range s.Nodes { + if !f(i, newSingleSelection(n, s.document)) { + return s + } + } + return s +} + +// Map passes each element in the current matched set through a function, +// producing a slice of string holding the returned values. The function +// f is called for each element in the selection with the index of the +// element in that selection starting at 0, and a *Selection that contains +// only that element. +func (s *Selection) Map(f func(int, *Selection) string) (result []string) { + for i, n := range s.Nodes { + result = append(result, f(i, newSingleSelection(n, s.document))) + } + + return result +} diff --git a/vendor/github.com/PuerkitoBio/goquery/iteration_test.go b/vendor/github.com/PuerkitoBio/goquery/iteration_test.go new file mode 100644 index 00000000..9b6aafb7 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/iteration_test.go @@ -0,0 +1,88 @@ +package goquery + +import ( + "testing" + + "golang.org/x/net/html" +) + +func TestEach(t *testing.T) { + var cnt int + + sel := Doc().Find(".hero-unit .row-fluid").Each(func(i int, n *Selection) { + cnt++ + t.Logf("At index %v, node %v", i, n.Nodes[0].Data) + }).Find("a") + + if cnt != 4 { + t.Errorf("Expected Each() to call function 4 times, got %v times.", cnt) + } + assertLength(t, sel.Nodes, 6) +} + +func TestEachWithBreak(t *testing.T) { + var cnt int + + sel := Doc().Find(".hero-unit .row-fluid").EachWithBreak(func(i int, n *Selection) bool { + cnt++ + t.Logf("At index %v, node %v", i, n.Nodes[0].Data) + return false + }).Find("a") + + if cnt != 1 { + t.Errorf("Expected Each() to call function 1 time, got %v times.", cnt) + } + assertLength(t, sel.Nodes, 6) +} + +func TestEachEmptySelection(t *testing.T) { + var cnt int + + sel := Doc().Find("zzzz") + sel.Each(func(i int, n *Selection) { + cnt++ + }) + if cnt > 0 { + t.Error("Expected Each() to not be called on empty Selection.") + } + sel2 := sel.Find("div") + assertLength(t, sel2.Nodes, 0) +} + +func TestMap(t *testing.T) { + sel := Doc().Find(".pvk-content") + vals := sel.Map(func(i int, s *Selection) string { + n := s.Get(0) + if n.Type == html.ElementNode { + return n.Data + } + return "" + }) + for _, v := range vals { + if v != "div" { + t.Error("Expected Map array result to be all 'div's.") + } + } + if len(vals) != 3 { + t.Errorf("Expected Map array result to have a length of 3, found %v.", len(vals)) + } +} + +func TestForRange(t *testing.T) { + sel := Doc().Find(".pvk-content") + initLen := sel.Length() + for i := range sel.Nodes { + single := sel.Eq(i) + //h, err := single.Html() + //if err != nil { + // t.Fatal(err) + //} + //fmt.Println(i, h) + if single.Length() != 1 { + t.Errorf("%d: expected length of 1, got %d", i, single.Length()) + } + } + if sel.Length() != initLen { + t.Errorf("expected initial selection to still have length %d, got %d", initLen, sel.Length()) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/manipulation.go b/vendor/github.com/PuerkitoBio/goquery/manipulation.go new file mode 100644 index 00000000..ebb4ffe1 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/manipulation.go @@ -0,0 +1,573 @@ +package goquery + +import ( + "strings" + + "golang.org/x/net/html" +) + +// After applies the selector from the root document and inserts the matched elements +// after the elements in the set of matched elements. +// +// If one of the matched elements in the selection is not currently in the +// document, it's impossible to insert nodes after it, so it will be ignored. +// +// This follows the same rules as Selection.Append. +func (s *Selection) After(selector string) *Selection { + return s.AfterMatcher(compileMatcher(selector)) +} + +// AfterMatcher applies the matcher from the root document and inserts the matched elements +// after the elements in the set of matched elements. +// +// If one of the matched elements in the selection is not currently in the +// document, it's impossible to insert nodes after it, so it will be ignored. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterMatcher(m Matcher) *Selection { + return s.AfterNodes(m.MatchAll(s.document.rootNode)...) +} + +// AfterSelection inserts the elements in the selection after each element in the set of matched +// elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterSelection(sel *Selection) *Selection { + return s.AfterNodes(sel.Nodes...) +} + +// AfterHtml parses the html and inserts it after the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterHtml(html string) *Selection { + return s.AfterNodes(parseHtml(html)...) +} + +// AfterNodes inserts the nodes after each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) { + if sn.Parent != nil { + sn.Parent.InsertBefore(n, sn.NextSibling) + } + }) +} + +// Append appends the elements specified by the selector to the end of each element +// in the set of matched elements, following those rules: +// +// 1) The selector is applied to the root document. +// +// 2) Elements that are part of the document will be moved to the new location. +// +// 3) If there are multiple locations to append to, cloned nodes will be +// appended to all target locations except the last one, which will be moved +// as noted in (2). +func (s *Selection) Append(selector string) *Selection { + return s.AppendMatcher(compileMatcher(selector)) +} + +// AppendMatcher appends the elements specified by the matcher to the end of each element +// in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AppendMatcher(m Matcher) *Selection { + return s.AppendNodes(m.MatchAll(s.document.rootNode)...) +} + +// AppendSelection appends the elements in the selection to the end of each element +// in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AppendSelection(sel *Selection) *Selection { + return s.AppendNodes(sel.Nodes...) +} + +// AppendHtml parses the html and appends it to the set of matched elements. +func (s *Selection) AppendHtml(html string) *Selection { + return s.AppendNodes(parseHtml(html)...) +} + +// AppendNodes appends the specified nodes to each node in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AppendNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) { + sn.AppendChild(n) + }) +} + +// Before inserts the matched elements before each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) Before(selector string) *Selection { + return s.BeforeMatcher(compileMatcher(selector)) +} + +// BeforeMatcher inserts the matched elements before each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeMatcher(m Matcher) *Selection { + return s.BeforeNodes(m.MatchAll(s.document.rootNode)...) +} + +// BeforeSelection inserts the elements in the selection before each element in the set of matched +// elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeSelection(sel *Selection) *Selection { + return s.BeforeNodes(sel.Nodes...) +} + +// BeforeHtml parses the html and inserts it before the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeHtml(html string) *Selection { + return s.BeforeNodes(parseHtml(html)...) +} + +// BeforeNodes inserts the nodes before each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) { + if sn.Parent != nil { + sn.Parent.InsertBefore(n, sn) + } + }) +} + +// Clone creates a deep copy of the set of matched nodes. The new nodes will not be +// attached to the document. +func (s *Selection) Clone() *Selection { + ns := newEmptySelection(s.document) + ns.Nodes = cloneNodes(s.Nodes) + return ns +} + +// Empty removes all children nodes from the set of matched elements. +// It returns the children nodes in a new Selection. +func (s *Selection) Empty() *Selection { + var nodes []*html.Node + + for _, n := range s.Nodes { + for c := n.FirstChild; c != nil; c = n.FirstChild { + n.RemoveChild(c) + nodes = append(nodes, c) + } + } + + return pushStack(s, nodes) +} + +// Prepend prepends the elements specified by the selector to each element in +// the set of matched elements, following the same rules as Append. +func (s *Selection) Prepend(selector string) *Selection { + return s.PrependMatcher(compileMatcher(selector)) +} + +// PrependMatcher prepends the elements specified by the matcher to each +// element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) PrependMatcher(m Matcher) *Selection { + return s.PrependNodes(m.MatchAll(s.document.rootNode)...) +} + +// PrependSelection prepends the elements in the selection to each element in +// the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) PrependSelection(sel *Selection) *Selection { + return s.PrependNodes(sel.Nodes...) +} + +// PrependHtml parses the html and prepends it to the set of matched elements. +func (s *Selection) PrependHtml(html string) *Selection { + return s.PrependNodes(parseHtml(html)...) +} + +// PrependNodes prepends the specified nodes to each node in the set of +// matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) PrependNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) { + // sn.FirstChild may be nil, in which case this functions like + // sn.AppendChild() + sn.InsertBefore(n, sn.FirstChild) + }) +} + +// Remove removes the set of matched elements from the document. +// It returns the same selection, now consisting of nodes not in the document. +func (s *Selection) Remove() *Selection { + for _, n := range s.Nodes { + if n.Parent != nil { + n.Parent.RemoveChild(n) + } + } + + return s +} + +// RemoveFiltered removes the set of matched elements by selector. +// It returns the Selection of removed nodes. +func (s *Selection) RemoveFiltered(selector string) *Selection { + return s.RemoveMatcher(compileMatcher(selector)) +} + +// RemoveMatcher removes the set of matched elements. +// It returns the Selection of removed nodes. +func (s *Selection) RemoveMatcher(m Matcher) *Selection { + return s.FilterMatcher(m).Remove() +} + +// ReplaceWith replaces each element in the set of matched elements with the +// nodes matched by the given selector. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWith(selector string) *Selection { + return s.ReplaceWithMatcher(compileMatcher(selector)) +} + +// ReplaceWithMatcher replaces each element in the set of matched elements with +// the nodes matched by the given Matcher. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection { + return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...) +} + +// ReplaceWithSelection replaces each element in the set of matched elements with +// the nodes from the given Selection. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection { + return s.ReplaceWithNodes(sel.Nodes...) +} + +// ReplaceWithHtml replaces each element in the set of matched elements with +// the parsed HTML. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithHtml(html string) *Selection { + return s.ReplaceWithNodes(parseHtml(html)...) +} + +// ReplaceWithNodes replaces each element in the set of matched elements with +// the given nodes. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection { + s.AfterNodes(ns...) + return s.Remove() +} + +// Set the html content of each element in the selection to specified html string. +func (s *Selection) SetHtml(html string) *Selection { + return setHtmlNodes(s, parseHtml(html)...) +} + +// Set the content of each element in the selection to specified content. The +// provided text string is escaped. +func (s *Selection) SetText(text string) *Selection { + return s.SetHtml(html.EscapeString(text)) +} + +// Unwrap removes the parents of the set of matched elements, leaving the matched +// elements (and their siblings, if any) in their place. +// It returns the original selection. +func (s *Selection) Unwrap() *Selection { + s.Parent().Each(func(i int, ss *Selection) { + // For some reason, jquery allows unwrap to remove the element, so + // allowing it here too. Same for . Why it allows those elements to + // be unwrapped while not allowing body is a mystery to me. + if ss.Nodes[0].Data != "body" { + ss.ReplaceWithSelection(ss.Contents()) + } + }) + + return s +} + +// Wrap wraps each element in the set of matched elements inside the first +// element matched by the given selector. The matched child is cloned before +// being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) Wrap(selector string) *Selection { + return s.WrapMatcher(compileMatcher(selector)) +} + +// WrapMatcher wraps each element in the set of matched elements inside the +// first element matched by the given matcher. The matched child is cloned +// before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapMatcher(m Matcher) *Selection { + return s.wrapNodes(m.MatchAll(s.document.rootNode)...) +} + +// WrapSelection wraps each element in the set of matched elements inside the +// first element in the given Selection. The element is cloned before being +// inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapSelection(sel *Selection) *Selection { + return s.wrapNodes(sel.Nodes...) +} + +// WrapHtml wraps each element in the set of matched elements inside the inner- +// most child of the given HTML. +// +// It returns the original set of elements. +func (s *Selection) WrapHtml(html string) *Selection { + return s.wrapNodes(parseHtml(html)...) +} + +// WrapNode wraps each element in the set of matched elements inside the inner- +// most child of the given node. The given node is copied before being inserted +// into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapNode(n *html.Node) *Selection { + return s.wrapNodes(n) +} + +func (s *Selection) wrapNodes(ns ...*html.Node) *Selection { + s.Each(func(i int, ss *Selection) { + ss.wrapAllNodes(ns...) + }) + + return s +} + +// WrapAll wraps a single HTML structure, matched by the given selector, around +// all elements in the set of matched elements. The matched child is cloned +// before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAll(selector string) *Selection { + return s.WrapAllMatcher(compileMatcher(selector)) +} + +// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher, +// around all elements in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllMatcher(m Matcher) *Selection { + return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...) +} + +// WrapAllSelection wraps a single HTML structure, the first node of the given +// Selection, around all elements in the set of matched elements. The matched +// child is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllSelection(sel *Selection) *Selection { + return s.wrapAllNodes(sel.Nodes...) +} + +// WrapAllHtml wraps the given HTML structure around all elements in the set of +// matched elements. The matched child is cloned before being inserted into the +// document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllHtml(html string) *Selection { + return s.wrapAllNodes(parseHtml(html)...) +} + +func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection { + if len(ns) > 0 { + return s.WrapAllNode(ns[0]) + } + return s +} + +// WrapAllNode wraps the given node around the first element in the Selection, +// making all other nodes in the Selection children of the given node. The node +// is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllNode(n *html.Node) *Selection { + if s.Size() == 0 { + return s + } + + wrap := cloneNode(n) + + first := s.Nodes[0] + if first.Parent != nil { + first.Parent.InsertBefore(wrap, first) + first.Parent.RemoveChild(first) + } + + for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) { + wrap = c + } + + newSingleSelection(wrap, s.document).AppendSelection(s) + + return s +} + +// WrapInner wraps an HTML structure, matched by the given selector, around the +// content of element in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInner(selector string) *Selection { + return s.WrapInnerMatcher(compileMatcher(selector)) +} + +// WrapInnerMatcher wraps an HTML structure, matched by the given selector, +// around the content of element in the set of matched elements. The matched +// child is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerMatcher(m Matcher) *Selection { + return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...) +} + +// WrapInnerSelection wraps an HTML structure, matched by the given selector, +// around the content of element in the set of matched elements. The matched +// child is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerSelection(sel *Selection) *Selection { + return s.wrapInnerNodes(sel.Nodes...) +} + +// WrapInnerHtml wraps an HTML structure, matched by the given selector, around +// the content of element in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerHtml(html string) *Selection { + return s.wrapInnerNodes(parseHtml(html)...) +} + +// WrapInnerNode wraps an HTML structure, matched by the given selector, around +// the content of element in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerNode(n *html.Node) *Selection { + return s.wrapInnerNodes(n) +} + +func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection { + if len(ns) == 0 { + return s + } + + s.Each(func(i int, s *Selection) { + contents := s.Contents() + + if contents.Size() > 0 { + contents.wrapAllNodes(ns...) + } else { + s.AppendNodes(cloneNode(ns[0])) + } + }) + + return s +} + +func parseHtml(h string) []*html.Node { + // Errors are only returned when the io.Reader returns any error besides + // EOF, but strings.Reader never will + nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode}) + if err != nil { + panic("goquery: failed to parse HTML: " + err.Error()) + } + return nodes +} + +func setHtmlNodes(s *Selection, ns ...*html.Node) *Selection { + for _, n := range s.Nodes { + for c := n.FirstChild; c != nil; c = n.FirstChild { + n.RemoveChild(c) + } + for _, c := range ns { + n.AppendChild(cloneNode(c)) + } + } + return s +} + +// Get the first child that is an ElementNode +func getFirstChildEl(n *html.Node) *html.Node { + c := n.FirstChild + for c != nil && c.Type != html.ElementNode { + c = c.NextSibling + } + return c +} + +// Deep copy a slice of nodes. +func cloneNodes(ns []*html.Node) []*html.Node { + cns := make([]*html.Node, 0, len(ns)) + + for _, n := range ns { + cns = append(cns, cloneNode(n)) + } + + return cns +} + +// Deep copy a node. The new node has clones of all the original node's +// children but none of its parents or siblings. +func cloneNode(n *html.Node) *html.Node { + nn := &html.Node{ + Type: n.Type, + DataAtom: n.DataAtom, + Data: n.Data, + Attr: make([]html.Attribute, len(n.Attr)), + } + + copy(nn.Attr, n.Attr) + for c := n.FirstChild; c != nil; c = c.NextSibling { + nn.AppendChild(cloneNode(c)) + } + + return nn +} + +func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool, + f func(sn *html.Node, n *html.Node)) *Selection { + + lasti := s.Size() - 1 + + // net.Html doesn't provide document fragments for insertion, so to get + // things in the correct order with After() and Prepend(), the callback + // needs to be called on the reverse of the nodes. + if reverse { + for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 { + ns[i], ns[j] = ns[j], ns[i] + } + } + + for i, sn := range s.Nodes { + for _, n := range ns { + if i != lasti { + f(sn, cloneNode(n)) + } else { + if n.Parent != nil { + n.Parent.RemoveChild(n) + } + f(sn, n) + } + } + } + + return s +} diff --git a/vendor/github.com/PuerkitoBio/goquery/manipulation_test.go b/vendor/github.com/PuerkitoBio/goquery/manipulation_test.go new file mode 100644 index 00000000..c5f50225 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/manipulation_test.go @@ -0,0 +1,513 @@ +package goquery + +import ( + "testing" +) + +const ( + wrapHtml = "
test string

" +) + +func TestAfter(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").After("#nf6") + + assertLength(t, doc.Find("#main #nf6").Nodes, 0) + assertLength(t, doc.Find("#foot #nf6").Nodes, 0) + assertLength(t, doc.Find("#main + #nf6").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestAfterMany(t *testing.T) { + doc := Doc2Clone() + doc.Find(".one").After("#nf6") + + assertLength(t, doc.Find("#foot #nf6").Nodes, 1) + assertLength(t, doc.Find("#main #nf6").Nodes, 1) + assertLength(t, doc.Find(".one + #nf6").Nodes, 2) + printSel(t, doc.Selection) +} + +func TestAfterWithRemoved(t *testing.T) { + doc := Doc2Clone() + s := doc.Find("#main").Remove() + s.After("#nf6") + + assertLength(t, s.Find("#nf6").Nodes, 0) + assertLength(t, doc.Find("#nf6").Nodes, 0) + printSel(t, doc.Selection) +} + +func TestAfterSelection(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").AfterSelection(doc.Find("#nf1, #nf2")) + + assertLength(t, doc.Find("#main #nf1, #main #nf2").Nodes, 0) + assertLength(t, doc.Find("#foot #nf1, #foot #nf2").Nodes, 0) + assertLength(t, doc.Find("#main + #nf1, #nf1 + #nf2").Nodes, 2) + printSel(t, doc.Selection) +} + +func TestAfterHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").AfterHtml("new node") + + assertLength(t, doc.Find("#main + strong").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestAppend(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").Append("#nf6") + + assertLength(t, doc.Find("#foot #nf6").Nodes, 0) + assertLength(t, doc.Find("#main #nf6").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestAppendBody(t *testing.T) { + doc := Doc2Clone() + doc.Find("body").Append("#nf6") + + assertLength(t, doc.Find("#foot #nf6").Nodes, 0) + assertLength(t, doc.Find("#main #nf6").Nodes, 0) + assertLength(t, doc.Find("body > #nf6").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestAppendSelection(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").AppendSelection(doc.Find("#nf1, #nf2")) + + assertLength(t, doc.Find("#foot #nf1").Nodes, 0) + assertLength(t, doc.Find("#foot #nf2").Nodes, 0) + assertLength(t, doc.Find("#main #nf1").Nodes, 1) + assertLength(t, doc.Find("#main #nf2").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestAppendSelectionExisting(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").AppendSelection(doc.Find("#n1, #n2")) + + assertClass(t, doc.Find("#main :nth-child(1)"), "three") + assertClass(t, doc.Find("#main :nth-child(5)"), "one") + assertClass(t, doc.Find("#main :nth-child(6)"), "two") + printSel(t, doc.Selection) +} + +func TestAppendClone(t *testing.T) { + doc := Doc2Clone() + doc.Find("#n1").AppendSelection(doc.Find("#nf1").Clone()) + + assertLength(t, doc.Find("#foot #nf1").Nodes, 1) + assertLength(t, doc.Find("#main #nf1").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestAppendHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find("div").AppendHtml("new node") + + assertLength(t, doc.Find("strong").Nodes, 14) + printSel(t, doc.Selection) +} + +func TestBefore(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").Before("#nf6") + + assertLength(t, doc.Find("#main #nf6").Nodes, 0) + assertLength(t, doc.Find("#foot #nf6").Nodes, 0) + assertLength(t, doc.Find("body > #nf6:first-child").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestBeforeWithRemoved(t *testing.T) { + doc := Doc2Clone() + s := doc.Find("#main").Remove() + s.Before("#nf6") + + assertLength(t, s.Find("#nf6").Nodes, 0) + assertLength(t, doc.Find("#nf6").Nodes, 0) + printSel(t, doc.Selection) +} + +func TestBeforeSelection(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").BeforeSelection(doc.Find("#nf1, #nf2")) + + assertLength(t, doc.Find("#main #nf1, #main #nf2").Nodes, 0) + assertLength(t, doc.Find("#foot #nf1, #foot #nf2").Nodes, 0) + assertLength(t, doc.Find("body > #nf1:first-child, #nf1 + #nf2").Nodes, 2) + printSel(t, doc.Selection) +} + +func TestBeforeHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").BeforeHtml("new node") + + assertLength(t, doc.Find("body > strong:first-child").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestEmpty(t *testing.T) { + doc := Doc2Clone() + s := doc.Find("#main").Empty() + + assertLength(t, doc.Find("#main").Children().Nodes, 0) + assertLength(t, s.Filter("div").Nodes, 6) + printSel(t, doc.Selection) +} + +func TestPrepend(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").Prepend("#nf6") + + assertLength(t, doc.Find("#foot #nf6").Nodes, 0) + assertLength(t, doc.Find("#main #nf6:first-child").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestPrependBody(t *testing.T) { + doc := Doc2Clone() + doc.Find("body").Prepend("#nf6") + + assertLength(t, doc.Find("#foot #nf6").Nodes, 0) + assertLength(t, doc.Find("#main #nf6").Nodes, 0) + assertLength(t, doc.Find("body > #nf6:first-child").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestPrependSelection(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").PrependSelection(doc.Find("#nf1, #nf2")) + + assertLength(t, doc.Find("#foot #nf1").Nodes, 0) + assertLength(t, doc.Find("#foot #nf2").Nodes, 0) + assertLength(t, doc.Find("#main #nf1:first-child").Nodes, 1) + assertLength(t, doc.Find("#main #nf2:nth-child(2)").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestPrependSelectionExisting(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main").PrependSelection(doc.Find("#n5, #n6")) + + assertClass(t, doc.Find("#main :nth-child(1)"), "five") + assertClass(t, doc.Find("#main :nth-child(2)"), "six") + assertClass(t, doc.Find("#main :nth-child(5)"), "three") + assertClass(t, doc.Find("#main :nth-child(6)"), "four") + printSel(t, doc.Selection) +} + +func TestPrependClone(t *testing.T) { + doc := Doc2Clone() + doc.Find("#n1").PrependSelection(doc.Find("#nf1").Clone()) + + assertLength(t, doc.Find("#foot #nf1:first-child").Nodes, 1) + assertLength(t, doc.Find("#main #nf1:first-child").Nodes, 1) + printSel(t, doc.Selection) +} + +func TestPrependHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find("div").PrependHtml("new node") + + assertLength(t, doc.Find("strong:first-child").Nodes, 14) + printSel(t, doc.Selection) +} + +func TestRemove(t *testing.T) { + doc := Doc2Clone() + doc.Find("#nf1").Remove() + + assertLength(t, doc.Find("#foot #nf1").Nodes, 0) + printSel(t, doc.Selection) +} + +func TestRemoveAll(t *testing.T) { + doc := Doc2Clone() + doc.Find("*").Remove() + + assertLength(t, doc.Find("*").Nodes, 0) + printSel(t, doc.Selection) +} + +func TestRemoveRoot(t *testing.T) { + doc := Doc2Clone() + doc.Find("html").Remove() + + assertLength(t, doc.Find("html").Nodes, 0) + printSel(t, doc.Selection) +} + +func TestRemoveFiltered(t *testing.T) { + doc := Doc2Clone() + nf6 := doc.Find("#nf6") + s := doc.Find("div").RemoveFiltered("#nf6") + + assertLength(t, doc.Find("#nf6").Nodes, 0) + assertLength(t, s.Nodes, 1) + if nf6.Nodes[0] != s.Nodes[0] { + t.Error("Removed node does not match original") + } + printSel(t, doc.Selection) +} + +func TestReplaceWith(t *testing.T) { + doc := Doc2Clone() + + doc.Find("#nf6").ReplaceWith("#main") + assertLength(t, doc.Find("#foot #main:last-child").Nodes, 1) + printSel(t, doc.Selection) + + doc.Find("#foot").ReplaceWith("#main") + assertLength(t, doc.Find("#foot").Nodes, 0) + assertLength(t, doc.Find("#main").Nodes, 1) + + printSel(t, doc.Selection) +} + +func TestReplaceWithHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find("#main, #foot").ReplaceWithHtml("
") + + assertLength(t, doc.Find("#replace").Nodes, 2) + + printSel(t, doc.Selection) +} + +func TestSetHtml(t *testing.T) { + doc := Doc2Clone() + q := doc.Find("#main, #foot") + q.SetHtml(`
test
`) + + assertLength(t, doc.Find("#replace").Nodes, 2) + assertLength(t, doc.Find("#main, #foot").Nodes, 2) + + if q.Text() != "testtest" { + t.Errorf("Expected text to be %v, found %v", "testtest", q.Text()) + } + + printSel(t, doc.Selection) +} + +func TestSetHtmlNoMatch(t *testing.T) { + doc := Doc2Clone() + q := doc.Find("#notthere") + q.SetHtml(`
test
`) + + assertLength(t, doc.Find("#replace").Nodes, 0) + + printSel(t, doc.Selection) +} + +func TestSetHtmlEmpty(t *testing.T) { + doc := Doc2Clone() + q := doc.Find("#main") + q.SetHtml(``) + + assertLength(t, doc.Find("#main").Nodes, 1) + assertLength(t, doc.Find("#main").Children().Nodes, 0) + printSel(t, doc.Selection) +} + +func TestSetText(t *testing.T) { + doc := Doc2Clone() + q := doc.Find("#main, #foot") + repl := "
test
" + q.SetText(repl) + + assertLength(t, doc.Find("#replace").Nodes, 0) + assertLength(t, doc.Find("#main, #foot").Nodes, 2) + + if q.Text() != (repl + repl) { + t.Errorf("Expected text to be %v, found %v", (repl + repl), q.Text()) + } + + h, err := q.Html() + if err != nil { + t.Errorf("Error: %v", err) + } + esc := "<div id="replace">test</div>" + if h != esc { + t.Errorf("Expected html to be %v, found %v", esc, h) + } + + printSel(t, doc.Selection) +} + +func TestReplaceWithSelection(t *testing.T) { + doc := Doc2Clone() + sel := doc.Find("#nf6").ReplaceWithSelection(doc.Find("#nf5")) + + assertSelectionIs(t, sel, "#nf6") + assertLength(t, doc.Find("#nf6").Nodes, 0) + assertLength(t, doc.Find("#nf5").Nodes, 1) + + printSel(t, doc.Selection) +} + +func TestUnwrap(t *testing.T) { + doc := Doc2Clone() + + doc.Find("#nf5").Unwrap() + assertLength(t, doc.Find("#foot").Nodes, 0) + assertLength(t, doc.Find("body > #nf1").Nodes, 1) + assertLength(t, doc.Find("body > #nf5").Nodes, 1) + + printSel(t, doc.Selection) + + doc = Doc2Clone() + + doc.Find("#nf5, #n1").Unwrap() + assertLength(t, doc.Find("#foot").Nodes, 0) + assertLength(t, doc.Find("#main").Nodes, 0) + assertLength(t, doc.Find("body > #n1").Nodes, 1) + assertLength(t, doc.Find("body > #nf5").Nodes, 1) + + printSel(t, doc.Selection) +} + +func TestUnwrapBody(t *testing.T) { + doc := Doc2Clone() + + doc.Find("#main").Unwrap() + assertLength(t, doc.Find("body").Nodes, 1) + assertLength(t, doc.Find("body > #main").Nodes, 1) + + printSel(t, doc.Selection) +} + +func TestUnwrapHead(t *testing.T) { + doc := Doc2Clone() + + doc.Find("title").Unwrap() + assertLength(t, doc.Find("head").Nodes, 0) + assertLength(t, doc.Find("head > title").Nodes, 0) + assertLength(t, doc.Find("title").Nodes, 1) + + printSel(t, doc.Selection) +} + +func TestUnwrapHtml(t *testing.T) { + doc := Doc2Clone() + + doc.Find("head").Unwrap() + assertLength(t, doc.Find("html").Nodes, 0) + assertLength(t, doc.Find("html head").Nodes, 0) + assertLength(t, doc.Find("head").Nodes, 1) + + printSel(t, doc.Selection) +} + +func TestWrap(t *testing.T) { + doc := Doc2Clone() + doc.Find("#nf1").Wrap("#nf2") + nf1 := doc.Find("#foot #nf2 #nf1") + assertLength(t, nf1.Nodes, 1) + + nf2 := doc.Find("#nf2") + assertLength(t, nf2.Nodes, 2) + + printSel(t, doc.Selection) +} + +func TestWrapEmpty(t *testing.T) { + doc := Doc2Clone() + doc.Find("#nf1").Wrap("#doesnt-exist") + + origHtml, _ := Doc2().Html() + newHtml, _ := doc.Html() + + if origHtml != newHtml { + t.Error("Expected the two documents to be identical.") + } + + printSel(t, doc.Selection) +} + +func TestWrapHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find(".odd").WrapHtml(wrapHtml) + nf2 := doc.Find("#ins #nf2") + assertLength(t, nf2.Nodes, 1) + printSel(t, doc.Selection) +} + +func TestWrapSelection(t *testing.T) { + doc := Doc2Clone() + doc.Find("#nf1").WrapSelection(doc.Find("#nf2")) + nf1 := doc.Find("#foot #nf2 #nf1") + assertLength(t, nf1.Nodes, 1) + + nf2 := doc.Find("#nf2") + assertLength(t, nf2.Nodes, 2) + + printSel(t, doc.Selection) +} + +func TestWrapAll(t *testing.T) { + doc := Doc2Clone() + doc.Find(".odd").WrapAll("#nf1") + nf1 := doc.Find("#main #nf1") + assertLength(t, nf1.Nodes, 1) + + sel := nf1.Find("#n2 ~ #n4 ~ #n6 ~ #nf2 ~ #nf4 ~ #nf6") + assertLength(t, sel.Nodes, 1) + + printSel(t, doc.Selection) +} + +func TestWrapAllHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find(".odd").WrapAllHtml(wrapHtml) + nf1 := doc.Find("#main div#ins div p em b #n2 ~ #n4 ~ #n6 ~ #nf2 ~ #nf4 ~ #nf6") + assertLength(t, nf1.Nodes, 1) + printSel(t, doc.Selection) +} + +func TestWrapInnerNoContent(t *testing.T) { + doc := Doc2Clone() + doc.Find(".one").WrapInner(".two") + + twos := doc.Find(".two") + assertLength(t, twos.Nodes, 4) + assertLength(t, doc.Find(".one .two").Nodes, 2) + + printSel(t, doc.Selection) +} + +func TestWrapInnerWithContent(t *testing.T) { + doc := Doc3Clone() + doc.Find(".one").WrapInner(".two") + + twos := doc.Find(".two") + assertLength(t, twos.Nodes, 4) + assertLength(t, doc.Find(".one .two").Nodes, 2) + + printSel(t, doc.Selection) +} + +func TestWrapInnerNoWrapper(t *testing.T) { + doc := Doc2Clone() + doc.Find(".one").WrapInner(".not-exist") + + twos := doc.Find(".two") + assertLength(t, twos.Nodes, 2) + assertLength(t, doc.Find(".one").Nodes, 2) + assertLength(t, doc.Find(".one .two").Nodes, 0) + + printSel(t, doc.Selection) +} + +func TestWrapInnerHtml(t *testing.T) { + doc := Doc2Clone() + doc.Find("#foot").WrapInnerHtml(wrapHtml) + + foot := doc.Find("#foot div#ins div p em b #nf1 ~ #nf2 ~ #nf3") + assertLength(t, foot.Nodes, 1) + + printSel(t, doc.Selection) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/misc/git/pre-commit b/vendor/github.com/PuerkitoBio/goquery/misc/git/pre-commit new file mode 100755 index 00000000..6a3d7984 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/misc/git/pre-commit @@ -0,0 +1,52 @@ +#!/bin/sh + +echo ">>> golint" +for dir in $(go list ./... | grep -v /vendor/) +do + golint "${dir}" +done +echo "<<< golint" +echo + +echo ">>> go vet" +go vet $(go list ./... | grep -v /vendor/) +echo "<<< go vet" +echo + +echo ">>> gosimple" +gosimple $(go list ./... | grep -v /vendor/) +echo "<<< gosimple" +echo + +echo ">>> staticcheck" +staticcheck $(go list ./... | grep -v /vendor/) +echo "<<< staticcheck" +echo + +echo ">>> unused" +unused $(go list ./... | grep -v /vendor/) +echo "<<< unused" +echo + +echo ">>> gas" +gas $(find . -name "*.go" | grep -v /vendor/ | grep -v '_test.go$') +echo "<<< gas" +echo + +# Check for gofmt problems and report if any. +gofiles=$(git diff --cached --name-only --diff-filter=ACM | grep '.go$' | grep -v /vendor/) +[ -z "$gofiles" ] && echo "EXIT $vetres" && exit $vetres + +if [ -n "$gofiles" ]; then + unformatted=$(gofmt -l $gofiles) + + if [ -n "$unformatted" ]; then + # Some files are not gofmt'd. + echo >&2 "Go files must be formatted with gofmt. Please run:" + for fn in $unformatted; do + echo >&2 " gofmt -w $PWD/$fn" + done + fi +fi +echo + diff --git a/vendor/github.com/PuerkitoBio/goquery/property.go b/vendor/github.com/PuerkitoBio/goquery/property.go new file mode 100644 index 00000000..411126db --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/property.go @@ -0,0 +1,275 @@ +package goquery + +import ( + "bytes" + "regexp" + "strings" + + "golang.org/x/net/html" +) + +var rxClassTrim = regexp.MustCompile("[\t\r\n]") + +// Attr gets the specified attribute's value for the first element in the +// Selection. To get the value for each element individually, use a looping +// construct such as Each or Map method. +func (s *Selection) Attr(attrName string) (val string, exists bool) { + if len(s.Nodes) == 0 { + return + } + return getAttributeValue(attrName, s.Nodes[0]) +} + +// AttrOr works like Attr but returns default value if attribute is not present. +func (s *Selection) AttrOr(attrName, defaultValue string) string { + if len(s.Nodes) == 0 { + return defaultValue + } + + val, exists := getAttributeValue(attrName, s.Nodes[0]) + if !exists { + return defaultValue + } + + return val +} + +// RemoveAttr removes the named attribute from each element in the set of matched elements. +func (s *Selection) RemoveAttr(attrName string) *Selection { + for _, n := range s.Nodes { + removeAttr(n, attrName) + } + + return s +} + +// SetAttr sets the given attribute on each element in the set of matched elements. +func (s *Selection) SetAttr(attrName, val string) *Selection { + for _, n := range s.Nodes { + attr := getAttributePtr(attrName, n) + if attr == nil { + n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val}) + } else { + attr.Val = val + } + } + + return s +} + +// Text gets the combined text contents of each element in the set of matched +// elements, including their descendants. +func (s *Selection) Text() string { + var buf bytes.Buffer + + // Slightly optimized vs calling Each: no single selection object created + var f func(*html.Node) + f = func(n *html.Node) { + if n.Type == html.TextNode { + // Keep newlines and spaces, like jQuery + buf.WriteString(n.Data) + } + if n.FirstChild != nil { + for c := n.FirstChild; c != nil; c = c.NextSibling { + f(c) + } + } + } + for _, n := range s.Nodes { + f(n) + } + + return buf.String() +} + +// Size is an alias for Length. +func (s *Selection) Size() int { + return s.Length() +} + +// Length returns the number of elements in the Selection object. +func (s *Selection) Length() int { + return len(s.Nodes) +} + +// Html gets the HTML contents of the first element in the set of matched +// elements. It includes text and comment nodes. +func (s *Selection) Html() (ret string, e error) { + // Since there is no .innerHtml, the HTML content must be re-created from + // the nodes using html.Render. + var buf bytes.Buffer + + if len(s.Nodes) > 0 { + for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling { + e = html.Render(&buf, c) + if e != nil { + return + } + } + ret = buf.String() + } + + return +} + +// AddClass adds the given class(es) to each element in the set of matched elements. +// Multiple class names can be specified, separated by a space or via multiple arguments. +func (s *Selection) AddClass(class ...string) *Selection { + classStr := strings.TrimSpace(strings.Join(class, " ")) + + if classStr == "" { + return s + } + + tcls := getClassesSlice(classStr) + for _, n := range s.Nodes { + curClasses, attr := getClassesAndAttr(n, true) + for _, newClass := range tcls { + if !strings.Contains(curClasses, " "+newClass+" ") { + curClasses += newClass + " " + } + } + + setClasses(n, attr, curClasses) + } + + return s +} + +// HasClass determines whether any of the matched elements are assigned the +// given class. +func (s *Selection) HasClass(class string) bool { + class = " " + class + " " + for _, n := range s.Nodes { + classes, _ := getClassesAndAttr(n, false) + if strings.Contains(classes, class) { + return true + } + } + return false +} + +// RemoveClass removes the given class(es) from each element in the set of matched elements. +// Multiple class names can be specified, separated by a space or via multiple arguments. +// If no class name is provided, all classes are removed. +func (s *Selection) RemoveClass(class ...string) *Selection { + var rclasses []string + + classStr := strings.TrimSpace(strings.Join(class, " ")) + remove := classStr == "" + + if !remove { + rclasses = getClassesSlice(classStr) + } + + for _, n := range s.Nodes { + if remove { + removeAttr(n, "class") + } else { + classes, attr := getClassesAndAttr(n, true) + for _, rcl := range rclasses { + classes = strings.Replace(classes, " "+rcl+" ", " ", -1) + } + + setClasses(n, attr, classes) + } + } + + return s +} + +// ToggleClass adds or removes the given class(es) for each element in the set of matched elements. +// Multiple class names can be specified, separated by a space or via multiple arguments. +func (s *Selection) ToggleClass(class ...string) *Selection { + classStr := strings.TrimSpace(strings.Join(class, " ")) + + if classStr == "" { + return s + } + + tcls := getClassesSlice(classStr) + + for _, n := range s.Nodes { + classes, attr := getClassesAndAttr(n, true) + for _, tcl := range tcls { + if strings.Contains(classes, " "+tcl+" ") { + classes = strings.Replace(classes, " "+tcl+" ", " ", -1) + } else { + classes += tcl + " " + } + } + + setClasses(n, attr, classes) + } + + return s +} + +func getAttributePtr(attrName string, n *html.Node) *html.Attribute { + if n == nil { + return nil + } + + for i, a := range n.Attr { + if a.Key == attrName { + return &n.Attr[i] + } + } + return nil +} + +// Private function to get the specified attribute's value from a node. +func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) { + if a := getAttributePtr(attrName, n); a != nil { + val = a.Val + exists = true + } + return +} + +// Get and normalize the "class" attribute from the node. +func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) { + // Applies only to element nodes + if n.Type == html.ElementNode { + attr = getAttributePtr("class", n) + if attr == nil && create { + n.Attr = append(n.Attr, html.Attribute{ + Key: "class", + Val: "", + }) + attr = &n.Attr[len(n.Attr)-1] + } + } + + if attr == nil { + classes = " " + } else { + classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ") + } + + return +} + +func getClassesSlice(classes string) []string { + return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ") +} + +func removeAttr(n *html.Node, attrName string) { + for i, a := range n.Attr { + if a.Key == attrName { + n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr = + n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1] + return + } + } +} + +func setClasses(n *html.Node, attr *html.Attribute, classes string) { + classes = strings.TrimSpace(classes) + if classes == "" { + removeAttr(n, "class") + return + } + + attr.Val = classes +} diff --git a/vendor/github.com/PuerkitoBio/goquery/property_test.go b/vendor/github.com/PuerkitoBio/goquery/property_test.go new file mode 100644 index 00000000..1095dcc8 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/property_test.go @@ -0,0 +1,252 @@ +package goquery + +import ( + "regexp" + "strings" + "testing" +) + +func TestAttrExists(t *testing.T) { + if val, ok := Doc().Find("a").Attr("href"); !ok { + t.Error("Expected a value for the href attribute.") + } else { + t.Logf("Href of first anchor: %v.", val) + } +} + +func TestAttrOr(t *testing.T) { + if val := Doc().Find("a").AttrOr("fake-attribute", "alternative"); val != "alternative" { + t.Error("Expected an alternative value for 'fake-attribute' attribute.") + } else { + t.Logf("Value returned for not existing attribute: %v.", val) + } + if val := Doc().Find("zz").AttrOr("fake-attribute", "alternative"); val != "alternative" { + t.Error("Expected an alternative value for 'fake-attribute' on an empty selection.") + } else { + t.Logf("Value returned for empty selection: %v.", val) + } +} + +func TestAttrNotExist(t *testing.T) { + if val, ok := Doc().Find("div.row-fluid").Attr("href"); ok { + t.Errorf("Expected no value for the href attribute, got %v.", val) + } +} + +func TestRemoveAttr(t *testing.T) { + sel := Doc2Clone().Find("div") + + sel.RemoveAttr("id") + + _, ok := sel.Attr("id") + if ok { + t.Error("Expected there to be no id attributes set") + } +} + +func TestSetAttr(t *testing.T) { + sel := Doc2Clone().Find("#main") + + sel.SetAttr("id", "not-main") + + val, ok := sel.Attr("id") + if !ok { + t.Error("Expected an id attribute on main") + } + + if val != "not-main" { + t.Errorf("Expected an attribute id to be not-main, got %s", val) + } +} + +func TestSetAttr2(t *testing.T) { + sel := Doc2Clone().Find("#main") + + sel.SetAttr("foo", "bar") + + val, ok := sel.Attr("foo") + if !ok { + t.Error("Expected an 'foo' attribute on main") + } + + if val != "bar" { + t.Errorf("Expected an attribute 'foo' to be 'bar', got '%s'", val) + } +} + +func TestText(t *testing.T) { + txt := Doc().Find("h1").Text() + if strings.Trim(txt, " \n\r\t") != "Provok.in" { + t.Errorf("Expected text to be Provok.in, found %s.", txt) + } +} + +func TestText2(t *testing.T) { + txt := Doc().Find(".hero-unit .container-fluid .row-fluid:nth-child(1)").Text() + if ok, e := regexp.MatchString(`^\s+Provok\.in\s+Prove your point.\s+$`, txt); !ok || e != nil { + t.Errorf("Expected text to be Provok.in Prove your point., found %s.", txt) + if e != nil { + t.Logf("Error: %s.", e.Error()) + } + } +} + +func TestText3(t *testing.T) { + txt := Doc().Find(".pvk-gutter").First().Text() + // There's an   character in there... + if ok, e := regexp.MatchString(`^[\s\x{00A0}]+$`, txt); !ok || e != nil { + t.Errorf("Expected spaces, found <%v>.", txt) + if e != nil { + t.Logf("Error: %s.", e.Error()) + } + } +} + +func TestHtml(t *testing.T) { + txt, e := Doc().Find("h1").Html() + if e != nil { + t.Errorf("Error: %s.", e) + } + + if ok, e := regexp.MatchString(`^\s*Provok\.in\s*$`, txt); !ok || e != nil { + t.Errorf("Unexpected HTML content, found %s.", txt) + if e != nil { + t.Logf("Error: %s.", e.Error()) + } + } +} + +func TestNbsp(t *testing.T) { + src := `

Some text

` + d, err := NewDocumentFromReader(strings.NewReader(src)) + if err != nil { + t.Fatal(err) + } + txt := d.Find("p").Text() + ix := strings.Index(txt, "\u00a0") + if ix != 4 { + t.Errorf("Text: expected a non-breaking space at index 4, got %d", ix) + } + + h, err := d.Find("p").Html() + if err != nil { + t.Fatal(err) + } + ix = strings.Index(h, "\u00a0") + if ix != 4 { + t.Errorf("Html: expected a non-breaking space at index 4, got %d", ix) + } +} + +func TestAddClass(t *testing.T) { + sel := Doc2Clone().Find("#main") + sel.AddClass("main main main") + + // Make sure that class was only added once + if a, ok := sel.Attr("class"); !ok || a != "main" { + t.Error("Expected #main to have class main") + } +} + +func TestAddClassSimilar(t *testing.T) { + sel := Doc2Clone().Find("#nf5") + sel.AddClass("odd") + + assertClass(t, sel, "odd") + assertClass(t, sel, "odder") + printSel(t, sel.Parent()) +} + +func TestAddEmptyClass(t *testing.T) { + sel := Doc2Clone().Find("#main") + sel.AddClass("") + + // Make sure that class was only added once + if a, ok := sel.Attr("class"); ok { + t.Errorf("Expected #main to not to have a class, have: %s", a) + } +} + +func TestAddClasses(t *testing.T) { + sel := Doc2Clone().Find("#main") + sel.AddClass("a b") + + // Make sure that class was only added once + if !sel.HasClass("a") || !sel.HasClass("b") { + t.Errorf("#main does not have classes") + } +} + +func TestHasClass(t *testing.T) { + sel := Doc().Find("div") + if !sel.HasClass("span12") { + t.Error("Expected at least one div to have class span12.") + } +} + +func TestHasClassNone(t *testing.T) { + sel := Doc().Find("h2") + if sel.HasClass("toto") { + t.Error("Expected h1 to have no class.") + } +} + +func TestHasClassNotFirst(t *testing.T) { + sel := Doc().Find(".alert") + if !sel.HasClass("alert-error") { + t.Error("Expected .alert to also have class .alert-error.") + } +} + +func TestRemoveClass(t *testing.T) { + sel := Doc2Clone().Find("#nf1") + sel.RemoveClass("one row") + + if !sel.HasClass("even") || sel.HasClass("one") || sel.HasClass("row") { + classes, _ := sel.Attr("class") + t.Error("Expected #nf1 to have class even, has ", classes) + } +} + +func TestRemoveClassSimilar(t *testing.T) { + sel := Doc2Clone().Find("#nf5, #nf6") + assertLength(t, sel.Nodes, 2) + + sel.RemoveClass("odd") + assertClass(t, sel.Eq(0), "odder") + printSel(t, sel) +} + +func TestRemoveAllClasses(t *testing.T) { + sel := Doc2Clone().Find("#nf1") + sel.RemoveClass() + + if a, ok := sel.Attr("class"); ok { + t.Error("All classes were not removed, has ", a) + } + + sel = Doc2Clone().Find("#main") + sel.RemoveClass() + if a, ok := sel.Attr("class"); ok { + t.Error("All classes were not removed, has ", a) + } +} + +func TestToggleClass(t *testing.T) { + sel := Doc2Clone().Find("#nf1") + + sel.ToggleClass("one") + if sel.HasClass("one") { + t.Error("Expected #nf1 to not have class one") + } + + sel.ToggleClass("one") + if !sel.HasClass("one") { + t.Error("Expected #nf1 to have class one") + } + + sel.ToggleClass("one even row") + if a, ok := sel.Attr("class"); ok { + t.Errorf("Expected #nf1 to have no classes, have %q", a) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/query.go b/vendor/github.com/PuerkitoBio/goquery/query.go new file mode 100644 index 00000000..1a7f8732 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/query.go @@ -0,0 +1,53 @@ +package goquery + +import "golang.org/x/net/html" + +// Is checks the current matched set of elements against a selector and +// returns true if at least one of these elements matches. +func (s *Selection) Is(selector string) bool { + if len(s.Nodes) > 0 { + return s.IsMatcher(compileMatcher(selector)) + } + + return false +} + +// IsMatcher checks the current matched set of elements against a matcher and +// returns true if at least one of these elements matches. +func (s *Selection) IsMatcher(m Matcher) bool { + if len(s.Nodes) > 0 { + if len(s.Nodes) == 1 { + return m.Match(s.Nodes[0]) + } + return len(m.Filter(s.Nodes)) > 0 + } + + return false +} + +// IsFunction checks the current matched set of elements against a predicate and +// returns true if at least one of these elements matches. +func (s *Selection) IsFunction(f func(int, *Selection) bool) bool { + return s.FilterFunction(f).Length() > 0 +} + +// IsSelection checks the current matched set of elements against a Selection object +// and returns true if at least one of these elements matches. +func (s *Selection) IsSelection(sel *Selection) bool { + return s.FilterSelection(sel).Length() > 0 +} + +// IsNodes checks the current matched set of elements against the specified nodes +// and returns true if at least one of these elements matches. +func (s *Selection) IsNodes(nodes ...*html.Node) bool { + return s.FilterNodes(nodes...).Length() > 0 +} + +// Contains returns true if the specified Node is within, +// at any depth, one of the nodes in the Selection object. +// It is NOT inclusive, to behave like jQuery's implementation, and +// unlike Javascript's .contains, so if the contained +// node is itself in the selection, it returns false. +func (s *Selection) Contains(n *html.Node) bool { + return sliceContains(s.Nodes, n) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/query_test.go b/vendor/github.com/PuerkitoBio/goquery/query_test.go new file mode 100644 index 00000000..54b2a2e0 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/query_test.go @@ -0,0 +1,103 @@ +package goquery + +import ( + "testing" +) + +func TestIs(t *testing.T) { + sel := Doc().Find(".footer p:nth-child(1)") + if !sel.Is("p") { + t.Error("Expected .footer p:nth-child(1) to be p.") + } +} + +func TestIsInvalid(t *testing.T) { + sel := Doc().Find(".footer p:nth-child(1)") + if sel.Is("") { + t.Error("Is should not succeed with invalid selector string") + } +} + +func TestIsPositional(t *testing.T) { + sel := Doc().Find(".footer p:nth-child(2)") + if !sel.Is("p:nth-child(2)") { + t.Error("Expected .footer p:nth-child(2) to be p:nth-child(2).") + } +} + +func TestIsPositionalNot(t *testing.T) { + sel := Doc().Find(".footer p:nth-child(1)") + if sel.Is("p:nth-child(2)") { + t.Error("Expected .footer p:nth-child(1) NOT to be p:nth-child(2).") + } +} + +func TestIsFunction(t *testing.T) { + ok := Doc().Find("div").IsFunction(func(i int, s *Selection) bool { + return s.HasClass("container-fluid") + }) + + if !ok { + t.Error("Expected some div to have a container-fluid class.") + } +} + +func TestIsFunctionRollback(t *testing.T) { + ok := Doc().Find("div").IsFunction(func(i int, s *Selection) bool { + return s.HasClass("container-fluid") + }) + + if !ok { + t.Error("Expected some div to have a container-fluid class.") + } +} + +func TestIsSelection(t *testing.T) { + sel := Doc().Find("div") + sel2 := Doc().Find(".pvk-gutter") + + if !sel.IsSelection(sel2) { + t.Error("Expected some div to have a pvk-gutter class.") + } +} + +func TestIsSelectionNot(t *testing.T) { + sel := Doc().Find("div") + sel2 := Doc().Find("a") + + if sel.IsSelection(sel2) { + t.Error("Expected some div NOT to be an anchor.") + } +} + +func TestIsNodes(t *testing.T) { + sel := Doc().Find("div") + sel2 := Doc().Find(".footer") + + if !sel.IsNodes(sel2.Nodes[0]) { + t.Error("Expected some div to have a footer class.") + } +} + +func TestDocContains(t *testing.T) { + sel := Doc().Find("h1") + if !Doc().Contains(sel.Nodes[0]) { + t.Error("Expected document to contain H1 tag.") + } +} + +func TestSelContains(t *testing.T) { + sel := Doc().Find(".row-fluid") + sel2 := Doc().Find("a[ng-click]") + if !sel.Contains(sel2.Nodes[0]) { + t.Error("Expected .row-fluid to contain a[ng-click] tag.") + } +} + +func TestSelNotContains(t *testing.T) { + sel := Doc().Find("a.link") + sel2 := Doc().Find("span") + if sel.Contains(sel2.Nodes[0]) { + t.Error("Expected a.link to NOT contain span tag.") + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/gotesting.html b/vendor/github.com/PuerkitoBio/goquery/testdata/gotesting.html new file mode 100644 index 00000000..ba5348fd --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/testdata/gotesting.html @@ -0,0 +1,855 @@ + + + + + + testing - The Go Programming Language + + + + + + + + + + + + +
+ + +
+

Package testing

+ + + + + + + + + + +
+
+
import "testing"
+
+
+
Overview
+
Index
+ + +
Subdirectories
+ +
+
+ +
+ +
+

Overview ▾

+

+Package testing provides support for automated testing of Go packages. +It is intended to be used in concert with the “go test” command, which automates +execution of any function of the form +

+
func TestXxx(*testing.T)
+
+

+where Xxx can be any alphanumeric string (but the first letter must not be in +[a-z]) and serves to identify the test routine. +These TestXxx routines should be declared within the package they are testing. +

+

+Functions of the form +

+
func BenchmarkXxx(*testing.B)
+
+

+are considered benchmarks, and are executed by the "go test" command when +the -test.bench flag is provided. +

+

+A sample benchmark function looks like this: +

+
func BenchmarkHello(b *testing.B) {
+    for i := 0; i < b.N; i++ {
+        fmt.Sprintf("hello")
+    }
+}
+
+

+The benchmark package will vary b.N until the benchmark function lasts +long enough to be timed reliably. The output +

+
testing.BenchmarkHello    10000000    282 ns/op
+
+

+means that the loop ran 10000000 times at a speed of 282 ns per loop. +

+

+If a benchmark needs some expensive setup before running, the timer +may be stopped: +

+
func BenchmarkBigLen(b *testing.B) {
+    b.StopTimer()
+    big := NewBig()
+    b.StartTimer()
+    for i := 0; i < b.N; i++ {
+        big.Len()
+    }
+}
+
+

+The package also runs and verifies example code. Example functions may +include a concluding comment that begins with "Output:" and is compared with +the standard output of the function when the tests are run, as in these +examples of an example: +

+
func ExampleHello() {
+        fmt.Println("hello")
+        // Output: hello
+}
+
+func ExampleSalutations() {
+        fmt.Println("hello, and")
+        fmt.Println("goodbye")
+        // Output:
+        // hello, and
+        // goodbye
+}
+
+

+Example functions without output comments are compiled but not executed. +

+

+The naming convention to declare examples for a function F, a type T and +method M on type T are: +

+
func ExampleF() { ... }
+func ExampleT() { ... }
+func ExampleT_M() { ... }
+
+

+Multiple example functions for a type/function/method may be provided by +appending a distinct suffix to the name. The suffix must start with a +lower-case letter. +

+
func ExampleF_suffix() { ... }
+func ExampleT_suffix() { ... }
+func ExampleT_M_suffix() { ... }
+
+

+The entire test file is presented as the example when it contains a single +example function, at least one other function, type, variable, or constant +declaration, and no test or benchmark functions. +

+ +
+
+ + +

Index

+ +
+
+ + + + +
func Main(matchString func(pat, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample)
+ + +
func RunBenchmarks(matchString func(pat, str string) (bool, error), benchmarks []InternalBenchmark)
+ + +
func RunExamples(matchString func(pat, str string) (bool, error), examples []InternalExample) (ok bool)
+ + +
func RunTests(matchString func(pat, str string) (bool, error), tests []InternalTest) (ok bool)
+ + +
func Short() bool
+ + + +
type B
+ + + +
    func (c *B) Error(args ...interface{})
+ + +
    func (c *B) Errorf(format string, args ...interface{})
+ + +
    func (c *B) Fail()
+ + +
    func (c *B) FailNow()
+ + +
    func (c *B) Failed() bool
+ + +
    func (c *B) Fatal(args ...interface{})
+ + +
    func (c *B) Fatalf(format string, args ...interface{})
+ + +
    func (c *B) Log(args ...interface{})
+ + +
    func (c *B) Logf(format string, args ...interface{})
+ + +
    func (b *B) ResetTimer()
+ + +
    func (b *B) SetBytes(n int64)
+ + +
    func (b *B) StartTimer()
+ + +
    func (b *B) StopTimer()
+ + + +
type BenchmarkResult
+ + +
    func Benchmark(f func(b *B)) BenchmarkResult
+ + + +
    func (r BenchmarkResult) NsPerOp() int64
+ + +
    func (r BenchmarkResult) String() string
+ + + +
type InternalBenchmark
+ + + + +
type InternalExample
+ + + + +
type InternalTest
+ + + + +
type T
+ + + +
    func (c *T) Error(args ...interface{})
+ + +
    func (c *T) Errorf(format string, args ...interface{})
+ + +
    func (c *T) Fail()
+ + +
    func (c *T) FailNow()
+ + +
    func (c *T) Failed() bool
+ + +
    func (c *T) Fatal(args ...interface{})
+ + +
    func (c *T) Fatalf(format string, args ...interface{})
+ + +
    func (c *T) Log(args ...interface{})
+ + +
    func (c *T) Logf(format string, args ...interface{})
+ + +
    func (t *T) Parallel()
+ + + +
+ + + + +

Package files

+

+ + + benchmark.go + + example.go + + testing.go + + +

+ + + + + + + +

func Main

+
func Main(matchString func(pat, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample)
+

+An internal function but exported because it is cross-package; part of the implementation +of the "go test" command. +

+ + + + + +

func RunBenchmarks

+
func RunBenchmarks(matchString func(pat, str string) (bool, error), benchmarks []InternalBenchmark)
+

+An internal function but exported because it is cross-package; part of the implementation +of the "go test" command. +

+ + + + + +

func RunExamples

+
func RunExamples(matchString func(pat, str string) (bool, error), examples []InternalExample) (ok bool)
+ + + + + +

func RunTests

+
func RunTests(matchString func(pat, str string) (bool, error), tests []InternalTest) (ok bool)
+ + + + + +

func Short

+
func Short() bool
+

+Short reports whether the -test.short flag is set. +

+ + + + + + +

type B

+
type B struct {
+    N int
+    // contains filtered or unexported fields
+}
+

+B is a type passed to Benchmark functions to manage benchmark +timing and to specify the number of iterations to run. +

+ + + + + + + + + + + + +

func (*B) Error

+
func (c *B) Error(args ...interface{})
+

+Error is equivalent to Log() followed by Fail(). +

+ + + + + +

func (*B) Errorf

+
func (c *B) Errorf(format string, args ...interface{})
+

+Errorf is equivalent to Logf() followed by Fail(). +

+ + + + + +

func (*B) Fail

+
func (c *B) Fail()
+

+Fail marks the function as having failed but continues execution. +

+ + + + + +

func (*B) FailNow

+
func (c *B) FailNow()
+

+FailNow marks the function as having failed and stops its execution. +Execution will continue at the next test or benchmark. +

+ + + + + +

func (*B) Failed

+
func (c *B) Failed() bool
+

+Failed returns whether the function has failed. +

+ + + + + +

func (*B) Fatal

+
func (c *B) Fatal(args ...interface{})
+

+Fatal is equivalent to Log() followed by FailNow(). +

+ + + + + +

func (*B) Fatalf

+
func (c *B) Fatalf(format string, args ...interface{})
+

+Fatalf is equivalent to Logf() followed by FailNow(). +

+ + + + + +

func (*B) Log

+
func (c *B) Log(args ...interface{})
+

+Log formats its arguments using default formatting, analogous to Println(), +and records the text in the error log. +

+ + + + + +

func (*B) Logf

+
func (c *B) Logf(format string, args ...interface{})
+

+Logf formats its arguments according to the format, analogous to Printf(), +and records the text in the error log. +

+ + + + + +

func (*B) ResetTimer

+
func (b *B) ResetTimer()
+

+ResetTimer sets the elapsed benchmark time to zero. +It does not affect whether the timer is running. +

+ + + + + +

func (*B) SetBytes

+
func (b *B) SetBytes(n int64)
+

+SetBytes records the number of bytes processed in a single operation. +If this is called, the benchmark will report ns/op and MB/s. +

+ + + + + +

func (*B) StartTimer

+
func (b *B) StartTimer()
+

+StartTimer starts timing a test. This function is called automatically +before a benchmark starts, but it can also used to resume timing after +a call to StopTimer. +

+ + + + + +

func (*B) StopTimer

+
func (b *B) StopTimer()
+

+StopTimer stops timing a test. This can be used to pause the timer +while performing complex initialization that you don't +want to measure. +

+ + + + + + + +

type BenchmarkResult

+
type BenchmarkResult struct {
+    N     int           // The number of iterations.
+    T     time.Duration // The total time taken.
+    Bytes int64         // Bytes processed in one iteration.
+}
+

+The results of a benchmark run. +

+ + + + + + + + + + +

func Benchmark

+
func Benchmark(f func(b *B)) BenchmarkResult
+

+Benchmark benchmarks a single function. Useful for creating +custom benchmarks that do not use the "go test" command. +

+ + + + + + +

func (BenchmarkResult) NsPerOp

+
func (r BenchmarkResult) NsPerOp() int64
+ + + + + +

func (BenchmarkResult) String

+
func (r BenchmarkResult) String() string
+ + + + + + + +

type InternalBenchmark

+
type InternalBenchmark struct {
+    Name string
+    F    func(b *B)
+}
+

+An internal type but exported because it is cross-package; part of the implementation +of the "go test" command. +

+ + + + + + + + + + + + + + +

type InternalExample

+
type InternalExample struct {
+    Name   string
+    F      func()
+    Output string
+}
+ + + + + + + + + + + + + + +

type InternalTest

+
type InternalTest struct {
+    Name string
+    F    func(*T)
+}
+

+An internal type but exported because it is cross-package; part of the implementation +of the "go test" command. +

+ + + + + + + + + + + + + + +

type T

+
type T struct {
+    // contains filtered or unexported fields
+}
+

+T is a type passed to Test functions to manage test state and support formatted test logs. +Logs are accumulated during execution and dumped to standard error when done. +

+ + + + + + + + + + + + +

func (*T) Error

+
func (c *T) Error(args ...interface{})
+

+Error is equivalent to Log() followed by Fail(). +

+ + + + + +

func (*T) Errorf

+
func (c *T) Errorf(format string, args ...interface{})
+

+Errorf is equivalent to Logf() followed by Fail(). +

+ + + + + +

func (*T) Fail

+
func (c *T) Fail()
+

+Fail marks the function as having failed but continues execution. +

+ + + + + +

func (*T) FailNow

+
func (c *T) FailNow()
+

+FailNow marks the function as having failed and stops its execution. +Execution will continue at the next test or benchmark. +

+ + + + + +

func (*T) Failed

+
func (c *T) Failed() bool
+

+Failed returns whether the function has failed. +

+ + + + + +

func (*T) Fatal

+
func (c *T) Fatal(args ...interface{})
+

+Fatal is equivalent to Log() followed by FailNow(). +

+ + + + + +

func (*T) Fatalf

+
func (c *T) Fatalf(format string, args ...interface{})
+

+Fatalf is equivalent to Logf() followed by FailNow(). +

+ + + + + +

func (*T) Log

+
func (c *T) Log(args ...interface{})
+

+Log formats its arguments using default formatting, analogous to Println(), +and records the text in the error log. +

+ + + + + +

func (*T) Logf

+
func (c *T) Logf(format string, args ...interface{})
+

+Logf formats its arguments according to the format, analogous to Printf(), +and records the text in the error log. +

+ + + + + +

func (*T) Parallel

+
func (t *T) Parallel()
+

+Parallel signals that this test is to be run in parallel with (and only with) +other parallel tests in this CPU group. +

+ + + + + +
+ + + + + + + + + + + + +

Subdirectories

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Name    Synopsis
..
iotest    Package iotest implements Readers and Writers useful mainly for testing.
quick    Package quick implements utility functions to help with black box testing.
+ + + + +
+ + + + + + + + diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/gowiki.html b/vendor/github.com/PuerkitoBio/goquery/testdata/gowiki.html new file mode 100644 index 00000000..2ed6bb71 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/testdata/gowiki.html @@ -0,0 +1,1214 @@ + + + +Go (programming language) - Wikipedia, the free encyclopedia + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+ + + +
+ + +

Go (programming language)

+ + +
+ +
From Wikipedia, the free encyclopedia
+ + +
  (Redirected from Golang)
+ + +
+ Jump to: navigation, search +
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Go
Golang.png
Paradigm(s)compiled, concurrent, imperative, structured
Appeared in2009
Designed byRobert Griesemer
+Rob Pike
+Ken Thompson
DeveloperGoogle Inc.
Stable releaseversion 1.0.2[1] (14 June 2012; 2 months ago (2012-06-14))
Typing disciplinestrong, static
Major implementationsgc (8g, 6g, 5g), gccgo
Influenced byC, Limbo, Modula, Newsqueak, Oberon, Pascal,[2] Python
OSLinux, Mac OS X, FreeBSD, OpenBSD, MS Windows, Plan 9[3]
LicenseBSD-style[4] + Patent grant[5]
Usual filename extensions.go
Websitegolang.org
+

Go is a compiled, garbage-collected, concurrent programming language developed by Google Inc.[6]

+

The initial design of Go was started in September 2007 by Robert Griesemer, Rob Pike, and Ken Thompson.[2] Go was officially announced in November 2009. In May 2010, Rob Pike publicly stated that Go was being used "for real stuff" at Google.[7] Go's "gc" compiler targets the Linux, Mac OS X, FreeBSD, OpenBSD, Plan 9, and Microsoft Windows operating systems and the i386, amd64, and ARM processor architectures.[8]

+ + + + +
+
+

Contents

+
+ +
+

[edit] Goals

+

Go aims to provide the efficiency of a statically typed compiled language with the ease of programming of a dynamic language.[9] Other goals include:

+
    +
  • Safety: Type-safe and memory-safe.
  • +
  • Good support for concurrency and communication.
  • +
  • Efficient, latency-free garbage collection.
  • +
  • High-speed compilation.
  • +
+

[edit] Description

+

The syntax of Go is broadly similar to that of C: blocks of code are surrounded with curly braces; common control flow structures include for, switch, and if. Unlike C, line-ending semicolons are optional, variable declarations are written differently and are usually optional, type conversions must be made explicit, and new go and select control keywords have been introduced to support concurrent programming. New built-in types include maps, Unicode strings, array slices, and channels for inter-thread communication.

+

Go is designed for exceptionally fast compiling times, even on modest hardware.[10] The language requires garbage collection. Certain concurrency-related structural conventions of Go (channels and alternative channel inputs) are borrowed from Tony Hoare's CSP. Unlike previous concurrent programming languages such as occam or Limbo, Go does not provide any built-in notion of safe or verifiable concurrency.[11]

+

Of features found in C++ or Java, Go does not include type inheritance, generic programming, assertions, method overloading, or pointer arithmetic.[2] Of these, the Go authors express an openness to generic programming, explicitly argue against assertions and pointer arithmetic, while defending the choice to omit type inheritance as giving a more useful language, encouraging heavy use of interfaces instead.[2] Initially, the language did not include exception handling, but in March 2010 a mechanism known as panic/recover was implemented to handle exceptional errors while avoiding some of the problems the Go authors find with exceptions.[12][13]

+

[edit] Type system

+

Go allows a programmer to write functions that can operate on inputs of arbitrary type, provided that the type implements the functions defined by a given interface.

+

Unlike Java, the interfaces a type supports do not need to be specified at the point at which the type is defined, and Go interfaces do not participate in a type hierarchy. A Go interface is best described as a set of methods, each identified by a name and signature. A type is considered to implement an interface if all the required methods have been defined for that type. An interface can be declared to "embed" other interfaces, meaning the declared interface includes the methods defined in the other interfaces.[11]

+

Unlike Java, the in-memory representation of an object does not contain a pointer to a virtual method table. Instead a value of interface type is implemented as a pair of a pointer to the object, and a pointer to a dictionary containing implementations of the interface methods for that type.

+

Consider the following example:

+
+
+
+type Sequence []int
+ 
+func (s Sequence) Len() int {
+    return len(s)
+}
+ 
+type HasLength interface {
+    Len() int
+}
+ 
+func Foo (o HasLength) {
+    ...
+}
+
+
+

These four definitions could have been placed in separate files, in different parts of the program. Notably, the programmer who defined the Sequence type did not need to declare that the type implemented HasLength, and the person who implemented the Len method for Sequence did not need to specify that this method was part of HasLength.

+

[edit] Name visibility

+

Visibility of structures, structure fields, variables, constants, methods, top-level types and functions outside their defining package is defined implicitly according to the capitalization of their identifier.[14]

+

[edit] Concurrency

+

Go provides goroutines, small lightweight threads; the name alludes to coroutines. Goroutines are created with the go statement from anonymous or named functions.

+

Goroutines are executed in parallel with other goroutines, including their caller. They do not necessarily run in separate threads, but a group of goroutines are multiplexed onto multiple threads — execution control is moved between them by blocking them when sending or receiving messages over channels.

+

[edit] Implementations

+

There are currently two Go compilers:

+
    +
  • 6g/8g/5g (the compilers for AMD64, x86, and ARM respectively) with their supporting tools (collectively known as "gc") based on Ken's previous work on Plan 9's C toolchain.
  • +
  • gccgo, a GCC frontend written in C++,[15] and now officially supported as of version 4.6, albeit not part of the standard binary for gcc.[16]
  • +
+

Both compilers work on Unix-like systems, and a port to Microsoft Windows of the gc compiler and runtime have been integrated in the main distribution. Most of the standard libraries also work on Windows.

+

There is also an unmaintained "tiny" runtime environment that allows Go programs to run on bare hardware.[17]

+

[edit] Examples

+

[edit] Hello world

+

The following is a Hello world program in Go:

+
+
+
+package main
+ 
+import "fmt"
+ 
+func main() {
+        fmt.Println("Hello, World")
+}
+
+
+

Go's automatic semicolon insertion feature requires that opening braces not be placed on their own lines, and this is thus the preferred brace style; the examples shown comply with this style.[18]

+

[edit] Echo

+

Example illustrating how to write a program like the Unix echo command in Go:[19]

+
+
+
+package main
+ 
+import (
+        "os"
+        "flag"  // command line option parser
+)
+ 
+var omitNewline = flag.Bool("n", false, "don't print final newline")
+ 
+const (
+        Space = " "
+        Newline = "\n"
+)
+ 
+func main() {
+        flag.Parse()   // Scans the arg list and sets up flags
+        var s string
+        for i := 0; i < flag.NArg(); i++ {
+                if i > 0 {
+                        s += Space
+                }
+                s += flag.Arg(i)
+        }
+        if !*omitNewline {
+                s += Newline
+        }
+        os.Stdout.WriteString(s)
+}
+
+
+

[edit] Reception

+

Go's initial release led to much discussion.

+

Michele Simionato wrote in an article for artima.com:[20]

+
+
Here I just wanted to point out the design choices about interfaces and inheritance. Such ideas are not new and it is a shame that no popular language has followed such particular route in the design space. I hope Go will become popular; if not, I hope such ideas will finally enter in a popular language, we are already 10 or 20 years too late :-(
+
+

Dave Astels at Engine Yard wrote:[21]

+
+
Go is extremely easy to dive into. There are a minimal number of fundamental language concepts and the syntax is clean and designed to be clear and unambiguous. Go is still experimental and still a little rough around the edges.
+
+

Ars Technica interviewed Rob Pike, one of the authors of Go, and asked why a new language was needed. He replied that:[22]

+
+
It wasn't enough to just add features to existing programming languages, because sometimes you can get more in the long run by taking things away. They wanted to start from scratch and rethink everything. ... [But they did not want] to deviate too much from what developers already knew because they wanted to avoid alienating Go's target audience.
+
+

Go was in 15th place on the TIOBE Programming Community Index of programming language popularity in its first year, 2009,[citation needed] surpassing established languages like Pascal. As of March 2012, it ranked 66th in the index.[23]

+

Bruce Eckel stated:[24]

+
+
The complexity of C++ (even more complexity has been added in the new C++), and the resulting impact on productivity, is no longer justified. All the hoops that the C++ programmer had to jump through in order to use a C-compatible language make no sense anymore -- they're just a waste of time and effort. Now, Go makes much more sense for the class of problems that C++ was originally intended to solve.
+
+

[edit] Naming dispute

+

On the day of the general release of the language, Francis McCabe, developer of the Go! programming language (note the exclamation point), requested a name change of Google's language to prevent confusion with his language.[25] The issue was closed by a Google developer on 12 October 2010 with the custom status "Unfortunate", with a comment that "there are many computing products and services named Go. In the 11 months since our release, there has been minimal confusion of the two languages."[26]

+

[edit] See also

+ +

[edit] References

+ +
+
    +
  1. ^ "golang-announce: go1.0.2 released". https://groups.google.com/forum/#!msg/golang-announce/9-f_fnXNDzw/MiM3tk0iyjYJ. Retrieved 14 June 2012. 
  2. +
  3. ^ a b c d "Language Design FAQ". golang.org. 16 January 2010. http://golang.org/doc/go_faq.html. Retrieved 27 February 2010. 
  4. +
  5. ^ "Go Porting Efforts". Go Language Resources. cat-v. 12 January 2010. http://go-lang.cat-v.org/os-ports. Retrieved 18 January 2010. 
  6. +
  7. ^ "Text file LICENSE". http://golang.org/LICENSE. Retrieved 27 January 2011. 
  8. +
  9. ^ "Additional IP Rights Grant". http://code.google.com/p/go/source/browse/PATENTS. Retrieved 26 July 2012. 
  10. +
  11. ^ Kincaid, Jason (10 November 2009). "Google’s Go: A New Programming Language That’s Python Meets C++". TechCrunch. http://www.techcrunch.com/2009/11/10/google-go-language/. Retrieved 18 January 2010. 
  12. +
  13. ^ Metz, Cade (20 May 2010). "Google programming Frankenstein is a Go". The Register. http://www.theregister.co.uk/2010/05/20/go_in_production_at_google/. 
  14. +
  15. ^ "Installing Go". golang.org. The Go Authors. 11 June 2010. http://golang.org/doc/install.html#tmp_33. Retrieved 11 June 2010. 
  16. +
  17. ^ Pike, Rob. "The Go Programming Language". YouTube. http://www.youtube.com/watch?v=rKnDgT73v8s&feature=related. Retrieved 1 Jul 2011. 
  18. +
  19. ^ Rob Pike (10 November 2009) (flv). The Go Programming Language (Tech talk). Google. Event occurs at 8:53. http://www.youtube.com/watch?v=rKnDgT73v8s#t=8m53. 
  20. +
  21. ^ a b "The Go Memory Model". Google. http://golang.org/doc/go_mem.html. Retrieved 5 January 2011. 
  22. +
  23. ^ Release notes, 30 March 2010
  24. +
  25. ^ "Proposal for an exception-like mechanism". golang-nuts. 25 March 2010. http://groups.google.com/group/golang-nuts/browse_thread/thread/1ce5cd050bb973e4. Retrieved 25 March 2010. 
  26. +
  27. ^ "A Tutorial for the Go Programming Language". The Go Programming Language. Google. http://golang.org/doc/go_tutorial.html. Retrieved 10 March 2010. "In Go the rule about visibility of information is simple: if a name (of a top-level type, function, method, constant or variable, or of a structure field or method) is capitalized, users of the package may see it. Otherwise, the name and hence the thing being named is visible only inside the package in which it is declared." 
  28. +
  29. ^ "FAQ: Implementation". golang.org. 16 January 2010. http://golang.org/doc/go_faq.html#Implementation. Retrieved 18 January 2010. 
  30. +
  31. ^ "Installing GCC: Configuration". http://gcc.gnu.org/install/configure.html. Retrieved 3 December 2011. "Ada, Go and Objective-C++ are not default languages" 
  32. +
  33. ^ Gerrand, Andrew (1 February 2011). "release.2011-02-01". golang-nuts. Google. http://groups.google.com/group/golang-nuts/browse_thread/thread/b877e34723b543a7. Retrieved 5 February 2011. 
  34. +
  35. ^ "A Tutorial for the Go Programming Language". The Go Programming Language. Google. http://golang.org/doc/go_tutorial.html. Retrieved 10 March 2010. "The one surprise is that it's important to put the opening brace of a construct such as an if statement on the same line as the if; however, if you don't, there are situations that may not compile or may give the wrong result. The language forces the brace style to some extent." 
  36. +
  37. ^ "A Tutorial for the Go Programming Language". golang.org. 16 January 2010. http://golang.org/doc/go_tutorial.html. Retrieved 18 January 2010. 
  38. +
  39. ^ Simionato, Michele (15 November 2009). "Interfaces vs Inheritance (or, watch out for Go!)". artima. http://www.artima.com/weblogs/viewpost.jsp?thread=274019. Retrieved 15 November 2009. 
  40. +
  41. ^ Astels, Dave (9 November 2009). "Ready, Set, Go!". engineyard. http://www.engineyard.com/blog/2009/ready-set-go/. Retrieved 9 November 2009. 
  42. +
  43. ^ Paul, Ryan (10 November 2009). "Go: new open source programming language from Google". Ars Technica. http://arstechnica.com/open-source/news/2009/11/go-new-open-source-programming-language-from-google.ars. Retrieved 13 November 2009. 
  44. +
  45. ^ "TIOBE Programming Community Index for March 2012". TIOBE Software. March 2012. http://es.scribd.com/doc/89569304/TIOBE-Programming-Community-Index-for-March-2012. Retrieved 28 April 2012. 
  46. +
  47. ^ Bruce Eckel (27). "Calling Go from Python via JSON-RPC". http://www.artima.com/weblogs/viewpost.jsp?thread=333589. Retrieved 29 August 2011. 
  48. +
  49. ^ Claburn, Thomas (11 November 2009). "Google 'Go' Name Brings Accusations Of Evil'". InformationWeek. http://www.informationweek.com/news/software/web_services/showArticle.jhtml?articleID=221601351. Retrieved 18 January 2010. 
  50. +
  51. ^ "Issue 9 - go - I have already used the name for *MY* programming language". Google Code. Google Inc.. http://code.google.com/p/go/issues/detail?id=9. Retrieved 12 October 2010. 
  52. +
+
+

[edit] Further reading

+ +

[edit] External links

+ + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + +
+ +
+ + +
+ + +
+
Personal tools
+ +
+ + +
+ + +
+
Namespaces
+ +
+ + + + +
+

+

+
Variants
+ +
+ + +
+
+ + +
+
Views
+ +
+ + + + +
+
Actions
+ +
+ + + + + + + +
+
+ + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/metalreview.html b/vendor/github.com/PuerkitoBio/goquery/testdata/metalreview.html new file mode 100644 index 00000000..fc4a38fc --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/testdata/metalreview.html @@ -0,0 +1,413 @@ + + + + + + + + + + + + + + Metal Reviews, News, Blogs, Interviews and Community | Metal Review + + + + + + + + + + +
+
+ + + +
+ + + + +
+
+ + + + +
+
+
+ +
+
+
+
+ + + + + +
+

Reviews

+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
    + +
  • +

    + Serpentine Path - Serpentine Path + Serpentine Path
    + Serpentine Path
    + +

    +
  • + +
  • +

    + Hunter + Hunter's Ground
    + No God But the Wild
    + +

    +
  • + +
  • +

    + Blut Aus Nord - 777 - Cosmosophy + Blut Aus Nord
    + 777 - Cosmosophy
    + Black +

    +
  • + +
  • +

    + Ufomammut - Oro: Opus Alter + Ufomammut
    + Oro: Opus Alter
    + Doom +

    +
  • + +
  • +

    + Resurgency - False Enlightenment + Resurgency
    + False Enlightenment
    + Death +

    +
  • + +
  • +

    + Morgoth - Cursed to Live + Morgoth
    + Cursed to Live
    + DeathLive +

    +
  • + +
  • +

    + Krallice - Years Past Matter + Krallice
    + Years Past Matter
    + Black +

    +
  • + +
  • +

    + Murder Construct - Results + Murder Construct
    + Results
    + Grindcore +

    +
  • + +
  • +

    + Grave - Endless Procession of Souls + Grave
    + Endless Procession of Souls
    + Death +

    +
  • + +
  • +

    + Master - The New Elite + Master
    + The New Elite
    + Death +

    +
  • + +
+
+
+
+
+
+
+ Serpentine Path - Serpentine PathHunter's Ground - No God But the WildBlut Aus Nord - 777 - CosmosophyUfomammut - Oro: Opus AlterResurgency - False EnlightenmentMorgoth - Cursed to LiveKrallice - Years Past MatterMurder Construct - ResultsGrave - Endless Procession of SoulsMaster - The New Elite +
+
+
+
+ + + + + + + +
+
+
+
+
+ + + + +
Goto Next Group
+
Goto Previous Group
+ + + + + +
+ +
+ + + + + + + +
+

Lashes

+
NEW Katatonia - Dead End Kings
45 minutes ago by Chaosjunkie
+
Katatonia - Dead End Kings
1 hour ago by Harry Dick Rotten
+
Resurgency - False Enlightenment
3 hours ago by Anonymous
+
Witchcraft - The Alchemist
5 hours ago by Luke_22
+
Katatonia - Dead End Kings
9 hours ago by chaosjunkie
+
Katatonia - Dead End Kings
10 hours ago by Compeller
+
Manetheren - Time
10 hours ago by xpmule
+
Ufomammut - Oro: Opus Alter
16 hours ago by Anonymous
+
Ufomammut - Oro: Opus Alter
17 hours ago by Harry Dick Rotten
+
Katatonia - Dead End Kings
yesterday by Chaosjunkie
+
Katatonia - Dead End Kings
yesterday by Anonymous
+
Katatonia - Dead End Kings
yesterday by Anonymous
+
Katatonia - Dead End Kings
yesterday by Anonymous
+
Katatonia - Dead End Kings
yesterday by frantic
+
Blut Aus Nord - 777 - Cosmosophy
yesterday by Dimensional Bleedthrough
+ +
+ +
+
+
+ + + +
+
+ + + + + + + + + + + diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/page.html b/vendor/github.com/PuerkitoBio/goquery/testdata/page.html new file mode 100644 index 00000000..92ec74ee --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/testdata/page.html @@ -0,0 +1,102 @@ + + + + + + + Provok.in + + + + + + + + + +
+
+
+   +
+
+
+
+
+
+

+ Provok.in +

+

+ Prove your point. +

+
+
+
+
+ Beta Version. Things may change. Or disappear. Or fail miserably. If it's the latter, please file an issue. +
+
+ +
+ Welcome, {{getUserName()}} ( logout ) +
+
+
+
+
+   +
+
+
+
+   +
+
+
+
+
+
+ × +

+ {{ title }} +

+

+ {{ message }} +

+
+
+
+
+
+
+
+
+
+
+   +
+
+
+
+   +
+
+ +
+
+   +
+
+
+ + \ No newline at end of file diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/page2.html b/vendor/github.com/PuerkitoBio/goquery/testdata/page2.html new file mode 100644 index 00000000..4c2f92f4 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/testdata/page2.html @@ -0,0 +1,24 @@ + + + + Tests for siblings + + +
+
+
+
+
+
+
+
+ + + diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/page3.html b/vendor/github.com/PuerkitoBio/goquery/testdata/page3.html new file mode 100644 index 00000000..17e86241 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/testdata/page3.html @@ -0,0 +1,24 @@ + + + + Tests for siblings + + +
+
hello
+
+
+
+
+
+
+ + + diff --git a/vendor/github.com/PuerkitoBio/goquery/traversal.go b/vendor/github.com/PuerkitoBio/goquery/traversal.go new file mode 100644 index 00000000..5fa5315a --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/traversal.go @@ -0,0 +1,698 @@ +package goquery + +import "golang.org/x/net/html" + +type siblingType int + +// Sibling type, used internally when iterating over children at the same +// level (siblings) to specify which nodes are requested. +const ( + siblingPrevUntil siblingType = iota - 3 + siblingPrevAll + siblingPrev + siblingAll + siblingNext + siblingNextAll + siblingNextUntil + siblingAllIncludingNonElements +) + +// Find gets the descendants of each element in the current set of matched +// elements, filtered by a selector. It returns a new Selection object +// containing these matched elements. +func (s *Selection) Find(selector string) *Selection { + return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector))) +} + +// FindMatcher gets the descendants of each element in the current set of matched +// elements, filtered by the matcher. It returns a new Selection object +// containing these matched elements. +func (s *Selection) FindMatcher(m Matcher) *Selection { + return pushStack(s, findWithMatcher(s.Nodes, m)) +} + +// FindSelection gets the descendants of each element in the current +// Selection, filtered by a Selection. It returns a new Selection object +// containing these matched elements. +func (s *Selection) FindSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, nil) + } + return s.FindNodes(sel.Nodes...) +} + +// FindNodes gets the descendants of each element in the current +// Selection, filtered by some nodes. It returns a new Selection object +// containing these matched elements. +func (s *Selection) FindNodes(nodes ...*html.Node) *Selection { + return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + if sliceContains(s.Nodes, n) { + return []*html.Node{n} + } + return nil + })) +} + +// Contents gets the children of each element in the Selection, +// including text and comment nodes. It returns a new Selection object +// containing these elements. +func (s *Selection) Contents() *Selection { + return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements)) +} + +// ContentsFiltered gets the children of each element in the Selection, +// filtered by the specified selector. It returns a new Selection +// object containing these elements. Since selectors only act on Element nodes, +// this function is an alias to ChildrenFiltered unless the selector is empty, +// in which case it is an alias to Contents. +func (s *Selection) ContentsFiltered(selector string) *Selection { + if selector != "" { + return s.ChildrenFiltered(selector) + } + return s.Contents() +} + +// ContentsMatcher gets the children of each element in the Selection, +// filtered by the specified matcher. It returns a new Selection +// object containing these elements. Since matchers only act on Element nodes, +// this function is an alias to ChildrenMatcher. +func (s *Selection) ContentsMatcher(m Matcher) *Selection { + return s.ChildrenMatcher(m) +} + +// Children gets the child elements of each element in the Selection. +// It returns a new Selection object containing these elements. +func (s *Selection) Children() *Selection { + return pushStack(s, getChildrenNodes(s.Nodes, siblingAll)) +} + +// ChildrenFiltered gets the child elements of each element in the Selection, +// filtered by the specified selector. It returns a new +// Selection object containing these elements. +func (s *Selection) ChildrenFiltered(selector string) *Selection { + return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector)) +} + +// ChildrenMatcher gets the child elements of each element in the Selection, +// filtered by the specified matcher. It returns a new +// Selection object containing these elements. +func (s *Selection) ChildrenMatcher(m Matcher) *Selection { + return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m) +} + +// Parent gets the parent of each element in the Selection. It returns a +// new Selection object containing the matched elements. +func (s *Selection) Parent() *Selection { + return pushStack(s, getParentNodes(s.Nodes)) +} + +// ParentFiltered gets the parent of each element in the Selection filtered by a +// selector. It returns a new Selection object containing the matched elements. +func (s *Selection) ParentFiltered(selector string) *Selection { + return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector)) +} + +// ParentMatcher gets the parent of each element in the Selection filtered by a +// matcher. It returns a new Selection object containing the matched elements. +func (s *Selection) ParentMatcher(m Matcher) *Selection { + return filterAndPush(s, getParentNodes(s.Nodes), m) +} + +// Closest gets the first element that matches the selector by testing the +// element itself and traversing up through its ancestors in the DOM tree. +func (s *Selection) Closest(selector string) *Selection { + cs := compileMatcher(selector) + return s.ClosestMatcher(cs) +} + +// ClosestMatcher gets the first element that matches the matcher by testing the +// element itself and traversing up through its ancestors in the DOM tree. +func (s *Selection) ClosestMatcher(m Matcher) *Selection { + return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node { + // For each node in the selection, test the node itself, then each parent + // until a match is found. + for ; n != nil; n = n.Parent { + if m.Match(n) { + return []*html.Node{n} + } + } + return nil + })) +} + +// ClosestNodes gets the first element that matches one of the nodes by testing the +// element itself and traversing up through its ancestors in the DOM tree. +func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection { + set := make(map[*html.Node]bool) + for _, n := range nodes { + set[n] = true + } + return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node { + // For each node in the selection, test the node itself, then each parent + // until a match is found. + for ; n != nil; n = n.Parent { + if set[n] { + return []*html.Node{n} + } + } + return nil + })) +} + +// ClosestSelection gets the first element that matches one of the nodes in the +// Selection by testing the element itself and traversing up through its ancestors +// in the DOM tree. +func (s *Selection) ClosestSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, nil) + } + return s.ClosestNodes(sel.Nodes...) +} + +// Parents gets the ancestors of each element in the current Selection. It +// returns a new Selection object with the matched elements. +func (s *Selection) Parents() *Selection { + return pushStack(s, getParentsNodes(s.Nodes, nil, nil)) +} + +// ParentsFiltered gets the ancestors of each element in the current +// Selection. It returns a new Selection object with the matched elements. +func (s *Selection) ParentsFiltered(selector string) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector)) +} + +// ParentsMatcher gets the ancestors of each element in the current +// Selection. It returns a new Selection object with the matched elements. +func (s *Selection) ParentsMatcher(m Matcher) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m) +} + +// ParentsUntil gets the ancestors of each element in the Selection, up to but +// not including the element matched by the selector. It returns a new Selection +// object containing the matched elements. +func (s *Selection) ParentsUntil(selector string) *Selection { + return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil)) +} + +// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but +// not including the element matched by the matcher. It returns a new Selection +// object containing the matched elements. +func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection { + return pushStack(s, getParentsNodes(s.Nodes, m, nil)) +} + +// ParentsUntilSelection gets the ancestors of each element in the Selection, +// up to but not including the elements in the specified Selection. It returns a +// new Selection object containing the matched elements. +func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection { + if sel == nil { + return s.Parents() + } + return s.ParentsUntilNodes(sel.Nodes...) +} + +// ParentsUntilNodes gets the ancestors of each element in the Selection, +// up to but not including the specified nodes. It returns a +// new Selection object containing the matched elements. +func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection { + return pushStack(s, getParentsNodes(s.Nodes, nil, nodes)) +} + +// ParentsFilteredUntil is like ParentsUntil, with the option to filter the +// results based on a selector string. It returns a new Selection +// object containing the matched elements. +func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector)) +} + +// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the +// results based on a matcher. It returns a new Selection object containing the matched elements. +func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter) +} + +// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection { + return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel) +} + +// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection { + if sel == nil { + return s.ParentsMatcher(filter) + } + return s.ParentsMatcherUntilNodes(filter, sel.Nodes...) +} + +// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector)) +} + +// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter) +} + +// Siblings gets the siblings of each element in the Selection. It returns +// a new Selection object containing the matched elements. +func (s *Selection) Siblings() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil)) +} + +// SiblingsFiltered gets the siblings of each element in the Selection +// filtered by a selector. It returns a new Selection object containing the +// matched elements. +func (s *Selection) SiblingsFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector)) +} + +// SiblingsMatcher gets the siblings of each element in the Selection +// filtered by a matcher. It returns a new Selection object containing the +// matched elements. +func (s *Selection) SiblingsMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m) +} + +// Next gets the immediately following sibling of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) Next() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil)) +} + +// NextFiltered gets the immediately following sibling of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector)) +} + +// NextMatcher gets the immediately following sibling of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m) +} + +// NextAll gets all the following siblings of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) NextAll() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil)) +} + +// NextAllFiltered gets all the following siblings of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextAllFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector)) +} + +// NextAllMatcher gets all the following siblings of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextAllMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m) +} + +// Prev gets the immediately preceding sibling of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) Prev() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil)) +} + +// PrevFiltered gets the immediately preceding sibling of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector)) +} + +// PrevMatcher gets the immediately preceding sibling of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m) +} + +// PrevAll gets all the preceding siblings of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) PrevAll() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil)) +} + +// PrevAllFiltered gets all the preceding siblings of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevAllFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector)) +} + +// PrevAllMatcher gets all the preceding siblings of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevAllMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m) +} + +// NextUntil gets all following siblings of each element up to but not +// including the element matched by the selector. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntil(selector string) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil, + compileMatcher(selector), nil)) +} + +// NextUntilMatcher gets all following siblings of each element up to but not +// including the element matched by the matcher. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntilMatcher(m Matcher) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil, + m, nil)) +} + +// NextUntilSelection gets all following siblings of each element up to but not +// including the element matched by the Selection. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntilSelection(sel *Selection) *Selection { + if sel == nil { + return s.NextAll() + } + return s.NextUntilNodes(sel.Nodes...) +} + +// NextUntilNodes gets all following siblings of each element up to but not +// including the element matched by the nodes. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil, + nil, nodes)) +} + +// PrevUntil gets all preceding siblings of each element up to but not +// including the element matched by the selector. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntil(selector string) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + compileMatcher(selector), nil)) +} + +// PrevUntilMatcher gets all preceding siblings of each element up to but not +// including the element matched by the matcher. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntilMatcher(m Matcher) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + m, nil)) +} + +// PrevUntilSelection gets all preceding siblings of each element up to but not +// including the element matched by the Selection. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntilSelection(sel *Selection) *Selection { + if sel == nil { + return s.PrevAll() + } + return s.PrevUntilNodes(sel.Nodes...) +} + +// PrevUntilNodes gets all preceding siblings of each element up to but not +// including the element matched by the nodes. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + nil, nodes)) +} + +// NextFilteredUntil is like NextUntil, with the option to filter +// the results based on a selector string. +// It returns a new Selection object containing the matched elements. +func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + compileMatcher(untilSelector), nil), compileMatcher(filterSelector)) +} + +// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter +// the results based on a matcher. +// It returns a new Selection object containing the matched elements. +func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + until, nil), filter) +} + +// NextFilteredUntilSelection is like NextUntilSelection, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection { + return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel) +} + +// NextMatcherUntilSelection is like NextUntilSelection, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection { + if sel == nil { + return s.NextMatcher(filter) + } + return s.NextMatcherUntilNodes(filter, sel.Nodes...) +} + +// NextFilteredUntilNodes is like NextUntilNodes, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + nil, nodes), compileMatcher(filterSelector)) +} + +// NextMatcherUntilNodes is like NextUntilNodes, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + nil, nodes), filter) +} + +// PrevFilteredUntil is like PrevUntil, with the option to filter +// the results based on a selector string. +// It returns a new Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + compileMatcher(untilSelector), nil), compileMatcher(filterSelector)) +} + +// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter +// the results based on a matcher. +// It returns a new Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + until, nil), filter) +} + +// PrevFilteredUntilSelection is like PrevUntilSelection, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection { + return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel) +} + +// PrevMatcherUntilSelection is like PrevUntilSelection, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection { + if sel == nil { + return s.PrevMatcher(filter) + } + return s.PrevMatcherUntilNodes(filter, sel.Nodes...) +} + +// PrevFilteredUntilNodes is like PrevUntilNodes, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + nil, nodes), compileMatcher(filterSelector)) +} + +// PrevMatcherUntilNodes is like PrevUntilNodes, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + nil, nodes), filter) +} + +// Filter and push filters the nodes based on a matcher, and pushes the results +// on the stack, with the srcSel as previous selection. +func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection { + // Create a temporary Selection with the specified nodes to filter using winnow + sel := &Selection{nodes, srcSel.document, nil} + // Filter based on matcher and push on stack + return pushStack(srcSel, winnow(sel, m, true)) +} + +// Internal implementation of Find that return raw nodes. +func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node { + // Map nodes to find the matches within the children of each node + return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) { + // Go down one level, becausejQuery's Find selects only within descendants + for c := n.FirstChild; c != nil; c = c.NextSibling { + if c.Type == html.ElementNode { + result = append(result, m.MatchAll(c)...) + } + } + return + }) +} + +// Internal implementation to get all parent nodes, stopping at the specified +// node (or nil if no stop). +func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node { + return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) { + for p := n.Parent; p != nil; p = p.Parent { + sel := newSingleSelection(p, nil) + if stopm != nil { + if sel.IsMatcher(stopm) { + break + } + } else if len(stopNodes) > 0 { + if sel.IsNodes(stopNodes...) { + break + } + } + if p.Type == html.ElementNode { + result = append(result, p) + } + } + return + }) +} + +// Internal implementation of sibling nodes that return a raw slice of matches. +func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node { + var f func(*html.Node) bool + + // If the requested siblings are ...Until, create the test function to + // determine if the until condition is reached (returns true if it is) + if st == siblingNextUntil || st == siblingPrevUntil { + f = func(n *html.Node) bool { + if untilm != nil { + // Matcher-based condition + sel := newSingleSelection(n, nil) + return sel.IsMatcher(untilm) + } else if len(untilNodes) > 0 { + // Nodes-based condition + sel := newSingleSelection(n, nil) + return sel.IsNodes(untilNodes...) + } + return false + } + } + + return mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + return getChildrenWithSiblingType(n.Parent, st, n, f) + }) +} + +// Gets the children nodes of each node in the specified slice of nodes, +// based on the sibling type request. +func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node { + return mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + return getChildrenWithSiblingType(n, st, nil, nil) + }) +} + +// Gets the children of the specified parent, based on the requested sibling +// type, skipping a specified node if required. +func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node, + untilFunc func(*html.Node) bool) (result []*html.Node) { + + // Create the iterator function + var iter = func(cur *html.Node) (ret *html.Node) { + // Based on the sibling type requested, iterate the right way + for { + switch st { + case siblingAll, siblingAllIncludingNonElements: + if cur == nil { + // First iteration, start with first child of parent + // Skip node if required + if ret = parent.FirstChild; ret == skipNode && skipNode != nil { + ret = skipNode.NextSibling + } + } else { + // Skip node if required + if ret = cur.NextSibling; ret == skipNode && skipNode != nil { + ret = skipNode.NextSibling + } + } + case siblingPrev, siblingPrevAll, siblingPrevUntil: + if cur == nil { + // Start with previous sibling of the skip node + ret = skipNode.PrevSibling + } else { + ret = cur.PrevSibling + } + case siblingNext, siblingNextAll, siblingNextUntil: + if cur == nil { + // Start with next sibling of the skip node + ret = skipNode.NextSibling + } else { + ret = cur.NextSibling + } + default: + panic("Invalid sibling type.") + } + if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements { + return + } + // Not a valid node, try again from this one + cur = ret + } + } + + for c := iter(nil); c != nil; c = iter(c) { + // If this is an ...Until case, test before append (returns true + // if the until condition is reached) + if st == siblingNextUntil || st == siblingPrevUntil { + if untilFunc(c) { + return + } + } + result = append(result, c) + if st == siblingNext || st == siblingPrev { + // Only one node was requested (immediate next or previous), so exit + return + } + } + return +} + +// Internal implementation of parent nodes that return a raw slice of Nodes. +func getParentNodes(nodes []*html.Node) []*html.Node { + return mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + if n.Parent != nil && n.Parent.Type == html.ElementNode { + return []*html.Node{n.Parent} + } + return nil + }) +} + +// Internal map function used by many traversing methods. Takes the source nodes +// to iterate on and the mapping function that returns an array of nodes. +// Returns an array of nodes mapped by calling the callback function once for +// each node in the source nodes. +func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) { + set := make(map[*html.Node]bool) + for i, n := range nodes { + if vals := f(i, n); len(vals) > 0 { + result = appendWithoutDuplicates(result, vals, set) + } + } + return result +} diff --git a/vendor/github.com/PuerkitoBio/goquery/traversal_test.go b/vendor/github.com/PuerkitoBio/goquery/traversal_test.go new file mode 100644 index 00000000..04383a41 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/traversal_test.go @@ -0,0 +1,793 @@ +package goquery + +import ( + "strings" + "testing" +) + +func TestFind(t *testing.T) { + sel := Doc().Find("div.row-fluid") + assertLength(t, sel.Nodes, 9) +} + +func TestFindRollback(t *testing.T) { + sel := Doc().Find("div.row-fluid") + sel2 := sel.Find("a").End() + assertEqual(t, sel, sel2) +} + +func TestFindNotSelf(t *testing.T) { + sel := Doc().Find("h1").Find("h1") + assertLength(t, sel.Nodes, 0) +} + +func TestFindInvalid(t *testing.T) { + sel := Doc().Find(":+ ^") + assertLength(t, sel.Nodes, 0) +} + +func TestFindBig(t *testing.T) { + doc := DocW() + sel := doc.Find("li") + assertLength(t, sel.Nodes, 373) + sel2 := doc.Find("span") + assertLength(t, sel2.Nodes, 448) + sel3 := sel.FindSelection(sel2) + assertLength(t, sel3.Nodes, 248) +} + +func TestChainedFind(t *testing.T) { + sel := Doc().Find("div.hero-unit").Find(".row-fluid") + assertLength(t, sel.Nodes, 4) +} + +func TestChainedFindInvalid(t *testing.T) { + sel := Doc().Find("div.hero-unit").Find("") + assertLength(t, sel.Nodes, 0) +} + +func TestChildren(t *testing.T) { + sel := Doc().Find(".pvk-content").Children() + assertLength(t, sel.Nodes, 5) +} + +func TestChildrenRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Children().End() + assertEqual(t, sel, sel2) +} + +func TestContents(t *testing.T) { + sel := Doc().Find(".pvk-content").Contents() + assertLength(t, sel.Nodes, 13) +} + +func TestContentsRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.Contents().End() + assertEqual(t, sel, sel2) +} + +func TestChildrenFiltered(t *testing.T) { + sel := Doc().Find(".pvk-content").ChildrenFiltered(".hero-unit") + assertLength(t, sel.Nodes, 1) +} + +func TestChildrenFilteredInvalid(t *testing.T) { + sel := Doc().Find(".pvk-content").ChildrenFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestChildrenFilteredRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.ChildrenFiltered(".hero-unit").End() + assertEqual(t, sel, sel2) +} + +func TestContentsFiltered(t *testing.T) { + sel := Doc().Find(".pvk-content").ContentsFiltered(".hero-unit") + assertLength(t, sel.Nodes, 1) +} + +func TestContentsFilteredInvalid(t *testing.T) { + sel := Doc().Find(".pvk-content").ContentsFiltered("~") + assertLength(t, sel.Nodes, 0) +} + +func TestContentsFilteredRollback(t *testing.T) { + sel := Doc().Find(".pvk-content") + sel2 := sel.ContentsFiltered(".hero-unit").End() + assertEqual(t, sel, sel2) +} + +func TestChildrenFilteredNone(t *testing.T) { + sel := Doc().Find(".pvk-content").ChildrenFiltered("a.btn") + assertLength(t, sel.Nodes, 0) +} + +func TestParent(t *testing.T) { + sel := Doc().Find(".container-fluid").Parent() + assertLength(t, sel.Nodes, 3) +} + +func TestParentRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.Parent().End() + assertEqual(t, sel, sel2) +} + +func TestParentBody(t *testing.T) { + sel := Doc().Find("body").Parent() + assertLength(t, sel.Nodes, 1) +} + +func TestParentFiltered(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentFiltered(".hero-unit") + assertLength(t, sel.Nodes, 1) + assertClass(t, sel, "hero-unit") +} + +func TestParentFilteredInvalid(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestParentFilteredRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ParentFiltered(".hero-unit").End() + assertEqual(t, sel, sel2) +} + +func TestParents(t *testing.T) { + sel := Doc().Find(".container-fluid").Parents() + assertLength(t, sel.Nodes, 8) +} + +func TestParentsOrder(t *testing.T) { + sel := Doc().Find("#cf2").Parents() + assertLength(t, sel.Nodes, 6) + assertSelectionIs(t, sel, ".hero-unit", ".pvk-content", "div.row-fluid", "#cf1", "body", "html") +} + +func TestParentsRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.Parents().End() + assertEqual(t, sel, sel2) +} + +func TestParentsFiltered(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentsFiltered("body") + assertLength(t, sel.Nodes, 1) +} + +func TestParentsFilteredInvalid(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentsFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestParentsFilteredRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ParentsFiltered("body").End() + assertEqual(t, sel, sel2) +} + +func TestParentsUntil(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentsUntil("body") + assertLength(t, sel.Nodes, 6) +} + +func TestParentsUntilInvalid(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentsUntil("") + assertLength(t, sel.Nodes, 8) +} + +func TestParentsUntilRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ParentsUntil("body").End() + assertEqual(t, sel, sel2) +} + +func TestParentsUntilSelection(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".pvk-content") + sel = sel.ParentsUntilSelection(sel2) + assertLength(t, sel.Nodes, 3) +} + +func TestParentsUntilSelectionRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".pvk-content") + sel2 = sel.ParentsUntilSelection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestParentsUntilNodes(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".pvk-content, .hero-unit") + sel = sel.ParentsUntilNodes(sel2.Nodes...) + assertLength(t, sel.Nodes, 2) +} + +func TestParentsUntilNodesRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".pvk-content, .hero-unit") + sel2 = sel.ParentsUntilNodes(sel2.Nodes...).End() + assertEqual(t, sel, sel2) +} + +func TestParentsFilteredUntil(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentsFilteredUntil(".pvk-content", "body") + assertLength(t, sel.Nodes, 2) +} + +func TestParentsFilteredUntilInvalid(t *testing.T) { + sel := Doc().Find(".container-fluid").ParentsFilteredUntil("", "") + assertLength(t, sel.Nodes, 0) +} + +func TestParentsFilteredUntilRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ParentsFilteredUntil(".pvk-content", "body").End() + assertEqual(t, sel, sel2) +} + +func TestParentsFilteredUntilSelection(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".row-fluid") + sel = sel.ParentsFilteredUntilSelection("div", sel2) + assertLength(t, sel.Nodes, 3) +} + +func TestParentsFilteredUntilSelectionRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".row-fluid") + sel2 = sel.ParentsFilteredUntilSelection("div", sel2).End() + assertEqual(t, sel, sel2) +} + +func TestParentsFilteredUntilNodes(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".row-fluid") + sel = sel.ParentsFilteredUntilNodes("body", sel2.Nodes...) + assertLength(t, sel.Nodes, 1) +} + +func TestParentsFilteredUntilNodesRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := Doc().Find(".row-fluid") + sel2 = sel.ParentsFilteredUntilNodes("body", sel2.Nodes...).End() + assertEqual(t, sel, sel2) +} + +func TestSiblings(t *testing.T) { + sel := Doc().Find("h1").Siblings() + assertLength(t, sel.Nodes, 1) +} + +func TestSiblingsRollback(t *testing.T) { + sel := Doc().Find("h1") + sel2 := sel.Siblings().End() + assertEqual(t, sel, sel2) +} + +func TestSiblings2(t *testing.T) { + sel := Doc().Find(".pvk-gutter").Siblings() + assertLength(t, sel.Nodes, 9) +} + +func TestSiblings3(t *testing.T) { + sel := Doc().Find("body>.container-fluid").Siblings() + assertLength(t, sel.Nodes, 0) +} + +func TestSiblingsFiltered(t *testing.T) { + sel := Doc().Find(".pvk-gutter").SiblingsFiltered(".pvk-content") + assertLength(t, sel.Nodes, 3) +} + +func TestSiblingsFilteredInvalid(t *testing.T) { + sel := Doc().Find(".pvk-gutter").SiblingsFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestSiblingsFilteredRollback(t *testing.T) { + sel := Doc().Find(".pvk-gutter") + sel2 := sel.SiblingsFiltered(".pvk-content").End() + assertEqual(t, sel, sel2) +} + +func TestNext(t *testing.T) { + sel := Doc().Find("h1").Next() + assertLength(t, sel.Nodes, 1) +} + +func TestNextRollback(t *testing.T) { + sel := Doc().Find("h1") + sel2 := sel.Next().End() + assertEqual(t, sel, sel2) +} + +func TestNext2(t *testing.T) { + sel := Doc().Find(".close").Next() + assertLength(t, sel.Nodes, 1) +} + +func TestNextNone(t *testing.T) { + sel := Doc().Find("small").Next() + assertLength(t, sel.Nodes, 0) +} + +func TestNextFiltered(t *testing.T) { + sel := Doc().Find(".container-fluid").NextFiltered("div") + assertLength(t, sel.Nodes, 2) +} + +func TestNextFilteredInvalid(t *testing.T) { + sel := Doc().Find(".container-fluid").NextFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestNextFilteredRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.NextFiltered("div").End() + assertEqual(t, sel, sel2) +} + +func TestNextFiltered2(t *testing.T) { + sel := Doc().Find(".container-fluid").NextFiltered("[ng-view]") + assertLength(t, sel.Nodes, 1) +} + +func TestPrev(t *testing.T) { + sel := Doc().Find(".red").Prev() + assertLength(t, sel.Nodes, 1) + assertClass(t, sel, "green") +} + +func TestPrevRollback(t *testing.T) { + sel := Doc().Find(".red") + sel2 := sel.Prev().End() + assertEqual(t, sel, sel2) +} + +func TestPrev2(t *testing.T) { + sel := Doc().Find(".row-fluid").Prev() + assertLength(t, sel.Nodes, 5) +} + +func TestPrevNone(t *testing.T) { + sel := Doc().Find("h2").Prev() + assertLength(t, sel.Nodes, 0) +} + +func TestPrevFiltered(t *testing.T) { + sel := Doc().Find(".row-fluid").PrevFiltered(".row-fluid") + assertLength(t, sel.Nodes, 5) +} + +func TestPrevFilteredInvalid(t *testing.T) { + sel := Doc().Find(".row-fluid").PrevFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestPrevFilteredRollback(t *testing.T) { + sel := Doc().Find(".row-fluid") + sel2 := sel.PrevFiltered(".row-fluid").End() + assertEqual(t, sel, sel2) +} + +func TestNextAll(t *testing.T) { + sel := Doc().Find("#cf2 div:nth-child(1)").NextAll() + assertLength(t, sel.Nodes, 3) +} + +func TestNextAllRollback(t *testing.T) { + sel := Doc().Find("#cf2 div:nth-child(1)") + sel2 := sel.NextAll().End() + assertEqual(t, sel, sel2) +} + +func TestNextAll2(t *testing.T) { + sel := Doc().Find("div[ng-cloak]").NextAll() + assertLength(t, sel.Nodes, 1) +} + +func TestNextAllNone(t *testing.T) { + sel := Doc().Find(".footer").NextAll() + assertLength(t, sel.Nodes, 0) +} + +func TestNextAllFiltered(t *testing.T) { + sel := Doc().Find("#cf2 .row-fluid").NextAllFiltered("[ng-cloak]") + assertLength(t, sel.Nodes, 2) +} + +func TestNextAllFilteredInvalid(t *testing.T) { + sel := Doc().Find("#cf2 .row-fluid").NextAllFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestNextAllFilteredRollback(t *testing.T) { + sel := Doc().Find("#cf2 .row-fluid") + sel2 := sel.NextAllFiltered("[ng-cloak]").End() + assertEqual(t, sel, sel2) +} + +func TestNextAllFiltered2(t *testing.T) { + sel := Doc().Find(".close").NextAllFiltered("h4") + assertLength(t, sel.Nodes, 1) +} + +func TestPrevAll(t *testing.T) { + sel := Doc().Find("[ng-view]").PrevAll() + assertLength(t, sel.Nodes, 2) +} + +func TestPrevAllOrder(t *testing.T) { + sel := Doc().Find("[ng-view]").PrevAll() + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel, "#cf4", "#cf3") +} + +func TestPrevAllRollback(t *testing.T) { + sel := Doc().Find("[ng-view]") + sel2 := sel.PrevAll().End() + assertEqual(t, sel, sel2) +} + +func TestPrevAll2(t *testing.T) { + sel := Doc().Find(".pvk-gutter").PrevAll() + assertLength(t, sel.Nodes, 6) +} + +func TestPrevAllFiltered(t *testing.T) { + sel := Doc().Find(".pvk-gutter").PrevAllFiltered(".pvk-content") + assertLength(t, sel.Nodes, 3) +} + +func TestPrevAllFilteredInvalid(t *testing.T) { + sel := Doc().Find(".pvk-gutter").PrevAllFiltered("") + assertLength(t, sel.Nodes, 0) +} + +func TestPrevAllFilteredRollback(t *testing.T) { + sel := Doc().Find(".pvk-gutter") + sel2 := sel.PrevAllFiltered(".pvk-content").End() + assertEqual(t, sel, sel2) +} + +func TestNextUntil(t *testing.T) { + sel := Doc().Find(".alert a").NextUntil("p") + assertLength(t, sel.Nodes, 1) + assertSelectionIs(t, sel, "h4") +} + +func TestNextUntilInvalid(t *testing.T) { + sel := Doc().Find(".alert a").NextUntil("") + assertLength(t, sel.Nodes, 2) +} + +func TestNextUntil2(t *testing.T) { + sel := Doc().Find("#cf2-1").NextUntil("[ng-cloak]") + assertLength(t, sel.Nodes, 1) + assertSelectionIs(t, sel, "#cf2-2") +} + +func TestNextUntilOrder(t *testing.T) { + sel := Doc().Find("#cf2-1").NextUntil("#cf2-4") + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel, "#cf2-2", "#cf2-3") +} + +func TestNextUntilRollback(t *testing.T) { + sel := Doc().Find("#cf2-1") + sel2 := sel.PrevUntil("#cf2-4").End() + assertEqual(t, sel, sel2) +} + +func TestNextUntilSelection(t *testing.T) { + sel := Doc2().Find("#n2") + sel2 := Doc2().Find("#n4") + sel2 = sel.NextUntilSelection(sel2) + assertLength(t, sel2.Nodes, 1) + assertSelectionIs(t, sel2, "#n3") +} + +func TestNextUntilSelectionRollback(t *testing.T) { + sel := Doc2().Find("#n2") + sel2 := Doc2().Find("#n4") + sel2 = sel.NextUntilSelection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestNextUntilNodes(t *testing.T) { + sel := Doc2().Find("#n2") + sel2 := Doc2().Find("#n5") + sel2 = sel.NextUntilNodes(sel2.Nodes...) + assertLength(t, sel2.Nodes, 2) + assertSelectionIs(t, sel2, "#n3", "#n4") +} + +func TestNextUntilNodesRollback(t *testing.T) { + sel := Doc2().Find("#n2") + sel2 := Doc2().Find("#n5") + sel2 = sel.NextUntilNodes(sel2.Nodes...).End() + assertEqual(t, sel, sel2) +} + +func TestPrevUntil(t *testing.T) { + sel := Doc().Find(".alert p").PrevUntil("a") + assertLength(t, sel.Nodes, 1) + assertSelectionIs(t, sel, "h4") +} + +func TestPrevUntilInvalid(t *testing.T) { + sel := Doc().Find(".alert p").PrevUntil("") + assertLength(t, sel.Nodes, 2) +} + +func TestPrevUntil2(t *testing.T) { + sel := Doc().Find("[ng-cloak]").PrevUntil(":not([ng-cloak])") + assertLength(t, sel.Nodes, 1) + assertSelectionIs(t, sel, "[ng-cloak]") +} + +func TestPrevUntilOrder(t *testing.T) { + sel := Doc().Find("#cf2-4").PrevUntil("#cf2-1") + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel, "#cf2-3", "#cf2-2") +} + +func TestPrevUntilRollback(t *testing.T) { + sel := Doc().Find("#cf2-4") + sel2 := sel.PrevUntil("#cf2-1").End() + assertEqual(t, sel, sel2) +} + +func TestPrevUntilSelection(t *testing.T) { + sel := Doc2().Find("#n4") + sel2 := Doc2().Find("#n2") + sel2 = sel.PrevUntilSelection(sel2) + assertLength(t, sel2.Nodes, 1) + assertSelectionIs(t, sel2, "#n3") +} + +func TestPrevUntilSelectionRollback(t *testing.T) { + sel := Doc2().Find("#n4") + sel2 := Doc2().Find("#n2") + sel2 = sel.PrevUntilSelection(sel2).End() + assertEqual(t, sel, sel2) +} + +func TestPrevUntilNodes(t *testing.T) { + sel := Doc2().Find("#n5") + sel2 := Doc2().Find("#n2") + sel2 = sel.PrevUntilNodes(sel2.Nodes...) + assertLength(t, sel2.Nodes, 2) + assertSelectionIs(t, sel2, "#n4", "#n3") +} + +func TestPrevUntilNodesRollback(t *testing.T) { + sel := Doc2().Find("#n5") + sel2 := Doc2().Find("#n2") + sel2 = sel.PrevUntilNodes(sel2.Nodes...).End() + assertEqual(t, sel, sel2) +} + +func TestNextFilteredUntil(t *testing.T) { + sel := Doc2().Find(".two").NextFilteredUntil(".even", ".six") + assertLength(t, sel.Nodes, 4) + assertSelectionIs(t, sel, "#n3", "#n5", "#nf3", "#nf5") +} + +func TestNextFilteredUntilInvalid(t *testing.T) { + sel := Doc2().Find(".two").NextFilteredUntil("", "") + assertLength(t, sel.Nodes, 0) +} + +func TestNextFilteredUntilRollback(t *testing.T) { + sel := Doc2().Find(".two") + sel2 := sel.NextFilteredUntil(".even", ".six").End() + assertEqual(t, sel, sel2) +} + +func TestNextFilteredUntilSelection(t *testing.T) { + sel := Doc2().Find(".even") + sel2 := Doc2().Find(".five") + sel = sel.NextFilteredUntilSelection(".even", sel2) + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel, "#n3", "#nf3") +} + +func TestNextFilteredUntilSelectionRollback(t *testing.T) { + sel := Doc2().Find(".even") + sel2 := Doc2().Find(".five") + sel3 := sel.NextFilteredUntilSelection(".even", sel2).End() + assertEqual(t, sel, sel3) +} + +func TestNextFilteredUntilNodes(t *testing.T) { + sel := Doc2().Find(".even") + sel2 := Doc2().Find(".four") + sel = sel.NextFilteredUntilNodes(".odd", sel2.Nodes...) + assertLength(t, sel.Nodes, 4) + assertSelectionIs(t, sel, "#n2", "#n6", "#nf2", "#nf6") +} + +func TestNextFilteredUntilNodesRollback(t *testing.T) { + sel := Doc2().Find(".even") + sel2 := Doc2().Find(".four") + sel3 := sel.NextFilteredUntilNodes(".odd", sel2.Nodes...).End() + assertEqual(t, sel, sel3) +} + +func TestPrevFilteredUntil(t *testing.T) { + sel := Doc2().Find(".five").PrevFilteredUntil(".odd", ".one") + assertLength(t, sel.Nodes, 4) + assertSelectionIs(t, sel, "#n4", "#n2", "#nf4", "#nf2") +} + +func TestPrevFilteredUntilInvalid(t *testing.T) { + sel := Doc2().Find(".five").PrevFilteredUntil("", "") + assertLength(t, sel.Nodes, 0) +} + +func TestPrevFilteredUntilRollback(t *testing.T) { + sel := Doc2().Find(".four") + sel2 := sel.PrevFilteredUntil(".odd", ".one").End() + assertEqual(t, sel, sel2) +} + +func TestPrevFilteredUntilSelection(t *testing.T) { + sel := Doc2().Find(".odd") + sel2 := Doc2().Find(".two") + sel = sel.PrevFilteredUntilSelection(".odd", sel2) + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel, "#n4", "#nf4") +} + +func TestPrevFilteredUntilSelectionRollback(t *testing.T) { + sel := Doc2().Find(".even") + sel2 := Doc2().Find(".five") + sel3 := sel.PrevFilteredUntilSelection(".even", sel2).End() + assertEqual(t, sel, sel3) +} + +func TestPrevFilteredUntilNodes(t *testing.T) { + sel := Doc2().Find(".even") + sel2 := Doc2().Find(".four") + sel = sel.PrevFilteredUntilNodes(".odd", sel2.Nodes...) + assertLength(t, sel.Nodes, 2) + assertSelectionIs(t, sel, "#n2", "#nf2") +} + +func TestPrevFilteredUntilNodesRollback(t *testing.T) { + sel := Doc2().Find(".even") + sel2 := Doc2().Find(".four") + sel3 := sel.PrevFilteredUntilNodes(".odd", sel2.Nodes...).End() + assertEqual(t, sel, sel3) +} + +func TestClosestItself(t *testing.T) { + sel := Doc2().Find(".three") + sel2 := sel.Closest(".row") + assertLength(t, sel2.Nodes, sel.Length()) + assertSelectionIs(t, sel2, "#n3", "#nf3") +} + +func TestClosestNoDupes(t *testing.T) { + sel := Doc().Find(".span12") + sel2 := sel.Closest(".pvk-content") + assertLength(t, sel2.Nodes, 1) + assertClass(t, sel2, "pvk-content") +} + +func TestClosestNone(t *testing.T) { + sel := Doc().Find("h4") + sel2 := sel.Closest("a") + assertLength(t, sel2.Nodes, 0) +} + +func TestClosestInvalid(t *testing.T) { + sel := Doc().Find("h4") + sel2 := sel.Closest("") + assertLength(t, sel2.Nodes, 0) +} + +func TestClosestMany(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.Closest(".pvk-content") + assertLength(t, sel2.Nodes, 2) + assertSelectionIs(t, sel2, "#pc1", "#pc2") +} + +func TestClosestRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.Closest(".pvk-content").End() + assertEqual(t, sel, sel2) +} + +func TestClosestSelectionItself(t *testing.T) { + sel := Doc2().Find(".three") + sel2 := sel.ClosestSelection(Doc2().Find(".row")) + assertLength(t, sel2.Nodes, sel.Length()) +} + +func TestClosestSelectionNoDupes(t *testing.T) { + sel := Doc().Find(".span12") + sel2 := sel.ClosestSelection(Doc().Find(".pvk-content")) + assertLength(t, sel2.Nodes, 1) + assertClass(t, sel2, "pvk-content") +} + +func TestClosestSelectionNone(t *testing.T) { + sel := Doc().Find("h4") + sel2 := sel.ClosestSelection(Doc().Find("a")) + assertLength(t, sel2.Nodes, 0) +} + +func TestClosestSelectionMany(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ClosestSelection(Doc().Find(".pvk-content")) + assertLength(t, sel2.Nodes, 2) + assertSelectionIs(t, sel2, "#pc1", "#pc2") +} + +func TestClosestSelectionRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ClosestSelection(Doc().Find(".pvk-content")).End() + assertEqual(t, sel, sel2) +} + +func TestClosestNodesItself(t *testing.T) { + sel := Doc2().Find(".three") + sel2 := sel.ClosestNodes(Doc2().Find(".row").Nodes...) + assertLength(t, sel2.Nodes, sel.Length()) +} + +func TestClosestNodesNoDupes(t *testing.T) { + sel := Doc().Find(".span12") + sel2 := sel.ClosestNodes(Doc().Find(".pvk-content").Nodes...) + assertLength(t, sel2.Nodes, 1) + assertClass(t, sel2, "pvk-content") +} + +func TestClosestNodesNone(t *testing.T) { + sel := Doc().Find("h4") + sel2 := sel.ClosestNodes(Doc().Find("a").Nodes...) + assertLength(t, sel2.Nodes, 0) +} + +func TestClosestNodesMany(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ClosestNodes(Doc().Find(".pvk-content").Nodes...) + assertLength(t, sel2.Nodes, 2) + assertSelectionIs(t, sel2, "#pc1", "#pc2") +} + +func TestClosestNodesRollback(t *testing.T) { + sel := Doc().Find(".container-fluid") + sel2 := sel.ClosestNodes(Doc().Find(".pvk-content").Nodes...).End() + assertEqual(t, sel, sel2) +} + +func TestIssue26(t *testing.T) { + img1 := `150x150` + img2 := `150x150` + cases := []struct { + s string + l int + }{ + {s: img1 + img2, l: 2}, + {s: img1, l: 1}, + {s: img2, l: 1}, + } + for _, c := range cases { + doc, err := NewDocumentFromReader(strings.NewReader(c.s)) + if err != nil { + t.Fatal(err) + } + sel := doc.Find("img[src]") + assertLength(t, sel.Nodes, c.l) + } +} diff --git a/vendor/github.com/PuerkitoBio/goquery/type.go b/vendor/github.com/PuerkitoBio/goquery/type.go new file mode 100644 index 00000000..e2169fa3 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/type.go @@ -0,0 +1,135 @@ +package goquery + +import ( + "errors" + "io" + "net/http" + "net/url" + + "github.com/andybalholm/cascadia" + + "golang.org/x/net/html" +) + +// Document represents an HTML document to be manipulated. Unlike jQuery, which +// is loaded as part of a DOM document, and thus acts upon its containing +// document, GoQuery doesn't know which HTML document to act upon. So it needs +// to be told, and that's what the Document class is for. It holds the root +// document node to manipulate, and can make selections on this document. +type Document struct { + *Selection + Url *url.URL + rootNode *html.Node +} + +// NewDocumentFromNode is a Document constructor that takes a root html Node +// as argument. +func NewDocumentFromNode(root *html.Node) *Document { + return newDocument(root, nil) +} + +// NewDocument is a Document constructor that takes a string URL as argument. +// It loads the specified document, parses it, and stores the root Document +// node, ready to be manipulated. +func NewDocument(url string) (*Document, error) { + // Load the URL + res, e := http.Get(url) + if e != nil { + return nil, e + } + return NewDocumentFromResponse(res) +} + +// NewDocumentFromReader returns a Document from a generic reader. +// It returns an error as second value if the reader's data cannot be parsed +// as html. It does *not* check if the reader is also an io.Closer, so the +// provided reader is never closed by this call, it is the responsibility +// of the caller to close it if required. +func NewDocumentFromReader(r io.Reader) (*Document, error) { + root, e := html.Parse(r) + if e != nil { + return nil, e + } + return newDocument(root, nil), nil +} + +// NewDocumentFromResponse is another Document constructor that takes an http response as argument. +// It loads the specified response's document, parses it, and stores the root Document +// node, ready to be manipulated. The response's body is closed on return. +func NewDocumentFromResponse(res *http.Response) (*Document, error) { + if res == nil { + return nil, errors.New("Response is nil") + } + defer res.Body.Close() + if res.Request == nil { + return nil, errors.New("Response.Request is nil") + } + + // Parse the HTML into nodes + root, e := html.Parse(res.Body) + if e != nil { + return nil, e + } + + // Create and fill the document + return newDocument(root, res.Request.URL), nil +} + +// CloneDocument creates a deep-clone of a document. +func CloneDocument(doc *Document) *Document { + return newDocument(cloneNode(doc.rootNode), doc.Url) +} + +// Private constructor, make sure all fields are correctly filled. +func newDocument(root *html.Node, url *url.URL) *Document { + // Create and fill the document + d := &Document{nil, url, root} + d.Selection = newSingleSelection(root, d) + return d +} + +// Selection represents a collection of nodes matching some criteria. The +// initial Selection can be created by using Document.Find, and then +// manipulated using the jQuery-like chainable syntax and methods. +type Selection struct { + Nodes []*html.Node + document *Document + prevSel *Selection +} + +// Helper constructor to create an empty selection +func newEmptySelection(doc *Document) *Selection { + return &Selection{nil, doc, nil} +} + +// Helper constructor to create a selection of only one node +func newSingleSelection(node *html.Node, doc *Document) *Selection { + return &Selection{[]*html.Node{node}, doc, nil} +} + +// Matcher is an interface that defines the methods to match +// HTML nodes against a compiled selector string. Cascadia's +// Selector implements this interface. +type Matcher interface { + Match(*html.Node) bool + MatchAll(*html.Node) []*html.Node + Filter([]*html.Node) []*html.Node +} + +// compileMatcher compiles the selector string s and returns +// the corresponding Matcher. If s is an invalid selector string, +// it returns a Matcher that fails all matches. +func compileMatcher(s string) Matcher { + cs, err := cascadia.Compile(s) + if err != nil { + return invalidMatcher{} + } + return cs +} + +// invalidMatcher is a Matcher that always fails to match. +type invalidMatcher struct{} + +func (invalidMatcher) Match(n *html.Node) bool { return false } +func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil } +func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil } diff --git a/vendor/github.com/PuerkitoBio/goquery/type_test.go b/vendor/github.com/PuerkitoBio/goquery/type_test.go new file mode 100644 index 00000000..1e82d5ee --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/type_test.go @@ -0,0 +1,202 @@ +package goquery + +import ( + "bytes" + "fmt" + "os" + "strings" + "testing" + + "golang.org/x/net/html" +) + +// Test helper functions and members +var doc *Document +var doc2 *Document +var doc3 *Document +var docB *Document +var docW *Document + +func Doc() *Document { + if doc == nil { + doc = loadDoc("page.html") + } + return doc +} + +func Doc2() *Document { + if doc2 == nil { + doc2 = loadDoc("page2.html") + } + return doc2 +} + +func Doc2Clone() *Document { + return CloneDocument(Doc2()) +} + +func Doc3() *Document { + if doc3 == nil { + doc3 = loadDoc("page3.html") + } + return doc3 +} + +func Doc3Clone() *Document { + return CloneDocument(Doc3()) +} + +func DocB() *Document { + if docB == nil { + docB = loadDoc("gotesting.html") + } + return docB +} + +func DocW() *Document { + if docW == nil { + docW = loadDoc("gowiki.html") + } + return docW +} + +func assertLength(t *testing.T, nodes []*html.Node, length int) { + if len(nodes) != length { + t.Errorf("Expected %d nodes, found %d.", length, len(nodes)) + for i, n := range nodes { + t.Logf("Node %d: %+v.", i, n) + } + } +} + +func assertClass(t *testing.T, sel *Selection, class string) { + if !sel.HasClass(class) { + t.Errorf("Expected node to have class %s, found %+v.", class, sel.Get(0)) + } +} + +func assertPanic(t *testing.T) { + if e := recover(); e == nil { + t.Error("Expected a panic.") + } +} + +func assertEqual(t *testing.T, s1 *Selection, s2 *Selection) { + if s1 != s2 { + t.Error("Expected selection objects to be the same.") + } +} + +func assertSelectionIs(t *testing.T, sel *Selection, is ...string) { + for i := 0; i < sel.Length(); i++ { + if !sel.Eq(i).Is(is[i]) { + t.Errorf("Expected node %d to be %s, found %+v", i, is[i], sel.Get(i)) + } + } +} + +func printSel(t *testing.T, sel *Selection) { + if testing.Verbose() { + h, err := sel.Html() + if err != nil { + t.Fatal(err) + } + t.Log(h) + } +} + +func loadDoc(page string) *Document { + var f *os.File + var e error + + if f, e = os.Open(fmt.Sprintf("./testdata/%s", page)); e != nil { + panic(e.Error()) + } + defer f.Close() + + var node *html.Node + if node, e = html.Parse(f); e != nil { + panic(e.Error()) + } + return NewDocumentFromNode(node) +} + +func TestNewDocument(t *testing.T) { + if f, e := os.Open("./testdata/page.html"); e != nil { + t.Error(e.Error()) + } else { + defer f.Close() + if node, e := html.Parse(f); e != nil { + t.Error(e.Error()) + } else { + doc = NewDocumentFromNode(node) + } + } +} + +func TestNewDocumentFromReader(t *testing.T) { + cases := []struct { + src string + err bool + sel string + cnt int + }{ + 0: { + src: ` + + +Test + +

Hi

+ +`, + sel: "h1", + cnt: 1, + }, + 1: { + // Actually pretty hard to make html.Parse return an error + // based on content... + src: `>>qq>`, + }, + } + buf := bytes.NewBuffer(nil) + + for i, c := range cases { + buf.Reset() + buf.WriteString(c.src) + + d, e := NewDocumentFromReader(buf) + if (e != nil) != c.err { + if c.err { + t.Errorf("[%d] - expected error, got none", i) + } else { + t.Errorf("[%d] - expected no error, got %s", i, e) + } + } + if c.sel != "" { + s := d.Find(c.sel) + if s.Length() != c.cnt { + t.Errorf("[%d] - expected %d nodes, found %d", i, c.cnt, s.Length()) + } + } + } +} + +func TestNewDocumentFromResponseNil(t *testing.T) { + _, e := NewDocumentFromResponse(nil) + if e == nil { + t.Error("Expected error, got none") + } +} + +func TestIssue103(t *testing.T) { + d, err := NewDocumentFromReader(strings.NewReader("Scientists Stored These Images in DNA—Then Flawlessly Retrieved Them")) + if err != nil { + t.Error(err) + } + text := d.Find("title").Text() + for i, r := range text { + t.Logf("%d: %d - %q\n", i, r, string(r)) + } + t.Log(text) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/utilities.go b/vendor/github.com/PuerkitoBio/goquery/utilities.go new file mode 100644 index 00000000..b4c061a4 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/utilities.go @@ -0,0 +1,161 @@ +package goquery + +import ( + "bytes" + + "golang.org/x/net/html" +) + +// used to determine if a set (map[*html.Node]bool) should be used +// instead of iterating over a slice. The set uses more memory and +// is slower than slice iteration for small N. +const minNodesForSet = 1000 + +var nodeNames = []string{ + html.ErrorNode: "#error", + html.TextNode: "#text", + html.DocumentNode: "#document", + html.CommentNode: "#comment", +} + +// NodeName returns the node name of the first element in the selection. +// It tries to behave in a similar way as the DOM's nodeName property +// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName). +// +// Go's net/html package defines the following node types, listed with +// the corresponding returned value from this function: +// +// ErrorNode : #error +// TextNode : #text +// DocumentNode : #document +// ElementNode : the element's tag name +// CommentNode : #comment +// DoctypeNode : the name of the document type +// +func NodeName(s *Selection) string { + if s.Length() == 0 { + return "" + } + switch n := s.Get(0); n.Type { + case html.ElementNode, html.DoctypeNode: + return n.Data + default: + if n.Type >= 0 && int(n.Type) < len(nodeNames) { + return nodeNames[n.Type] + } + return "" + } +} + +// OuterHtml returns the outer HTML rendering of the first item in +// the selection - that is, the HTML including the first element's +// tag and attributes. +// +// Unlike InnerHtml, this is a function and not a method on the Selection, +// because this is not a jQuery method (in javascript-land, this is +// a property provided by the DOM). +func OuterHtml(s *Selection) (string, error) { + var buf bytes.Buffer + + if s.Length() == 0 { + return "", nil + } + n := s.Get(0) + if err := html.Render(&buf, n); err != nil { + return "", err + } + return buf.String(), nil +} + +// Loop through all container nodes to search for the target node. +func sliceContains(container []*html.Node, contained *html.Node) bool { + for _, n := range container { + if nodeContains(n, contained) { + return true + } + } + + return false +} + +// Checks if the contained node is within the container node. +func nodeContains(container *html.Node, contained *html.Node) bool { + // Check if the parent of the contained node is the container node, traversing + // upward until the top is reached, or the container is found. + for contained = contained.Parent; contained != nil; contained = contained.Parent { + if container == contained { + return true + } + } + return false +} + +// Checks if the target node is in the slice of nodes. +func isInSlice(slice []*html.Node, node *html.Node) bool { + return indexInSlice(slice, node) > -1 +} + +// Returns the index of the target node in the slice, or -1. +func indexInSlice(slice []*html.Node, node *html.Node) int { + if node != nil { + for i, n := range slice { + if n == node { + return i + } + } + } + return -1 +} + +// Appends the new nodes to the target slice, making sure no duplicate is added. +// There is no check to the original state of the target slice, so it may still +// contain duplicates. The target slice is returned because append() may create +// a new underlying array. If targetSet is nil, a local set is created with the +// target if len(target) + len(nodes) is greater than minNodesForSet. +func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node { + // if there are not that many nodes, don't use the map, faster to just use nested loops + // (unless a non-nil targetSet is passed, in which case the caller knows better). + if targetSet == nil && len(target)+len(nodes) < minNodesForSet { + for _, n := range nodes { + if !isInSlice(target, n) { + target = append(target, n) + } + } + return target + } + + // if a targetSet is passed, then assume it is reliable, otherwise create one + // and initialize it with the current target contents. + if targetSet == nil { + targetSet = make(map[*html.Node]bool, len(target)) + for _, n := range target { + targetSet[n] = true + } + } + for _, n := range nodes { + if !targetSet[n] { + target = append(target, n) + targetSet[n] = true + } + } + + return target +} + +// Loop through a selection, returning only those nodes that pass the predicate +// function. +func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) { + for i, n := range sel.Nodes { + if predicate(i, newSingleSelection(n, sel.document)) { + result = append(result, n) + } + } + return result +} + +// Creates a new Selection object based on the specified nodes, and keeps the +// source Selection object on the stack (linked list). +func pushStack(fromSel *Selection, nodes []*html.Node) *Selection { + result := &Selection{nodes, fromSel.document, fromSel} + return result +} diff --git a/vendor/github.com/PuerkitoBio/goquery/utilities_test.go b/vendor/github.com/PuerkitoBio/goquery/utilities_test.go new file mode 100644 index 00000000..c8e9d540 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/utilities_test.go @@ -0,0 +1,128 @@ +package goquery + +import ( + "reflect" + "sort" + "strings" + "testing" + + "golang.org/x/net/html" +) + +var allNodes = ` + + + + + +

+ This is some text. +

+
+

+

+ +` + +func TestNodeName(t *testing.T) { + doc, err := NewDocumentFromReader(strings.NewReader(allNodes)) + if err != nil { + t.Fatal(err) + } + + n0 := doc.Nodes[0] + nDT := n0.FirstChild + sMeta := doc.Find("meta") + nMeta := sMeta.Get(0) + sP := doc.Find("p") + nP := sP.Get(0) + nComment := nP.FirstChild + nText := nComment.NextSibling + + cases := []struct { + node *html.Node + typ html.NodeType + want string + }{ + {n0, html.DocumentNode, nodeNames[html.DocumentNode]}, + {nDT, html.DoctypeNode, "html"}, + {nMeta, html.ElementNode, "meta"}, + {nP, html.ElementNode, "p"}, + {nComment, html.CommentNode, nodeNames[html.CommentNode]}, + {nText, html.TextNode, nodeNames[html.TextNode]}, + } + for i, c := range cases { + got := NodeName(newSingleSelection(c.node, doc)) + if c.node.Type != c.typ { + t.Errorf("%d: want type %v, got %v", i, c.typ, c.node.Type) + } + if got != c.want { + t.Errorf("%d: want %q, got %q", i, c.want, got) + } + } +} + +func TestNodeNameMultiSel(t *testing.T) { + doc, err := NewDocumentFromReader(strings.NewReader(allNodes)) + if err != nil { + t.Fatal(err) + } + + in := []string{"p", "h1", "div"} + var out []string + doc.Find(strings.Join(in, ", ")).Each(func(i int, s *Selection) { + got := NodeName(s) + out = append(out, got) + }) + sort.Strings(in) + sort.Strings(out) + if !reflect.DeepEqual(in, out) { + t.Errorf("want %v, got %v", in, out) + } +} + +func TestOuterHtml(t *testing.T) { + doc, err := NewDocumentFromReader(strings.NewReader(allNodes)) + if err != nil { + t.Fatal(err) + } + + n0 := doc.Nodes[0] + nDT := n0.FirstChild + sMeta := doc.Find("meta") + sP := doc.Find("p") + nP := sP.Get(0) + nComment := nP.FirstChild + nText := nComment.NextSibling + sHeaders := doc.Find(".header") + + cases := []struct { + node *html.Node + sel *Selection + want string + }{ + {nDT, nil, ""}, // render makes DOCTYPE all caps + {nil, sMeta, ``}, // and auto-closes the meta + {nil, sP, `

+ This is some text. +

`}, + {nComment, nil, ""}, + {nText, nil, ` + This is some text. + `}, + {nil, sHeaders, `

`}, + } + for i, c := range cases { + if c.sel == nil { + c.sel = newSingleSelection(c.node, doc) + } + got, err := OuterHtml(c.sel) + if err != nil { + t.Fatal(err) + } + + if got != c.want { + t.Errorf("%d: want %q, got %q", i, c.want, got) + } + } +} diff --git a/vendor/github.com/andybalholm/cascadia/.travis.yml b/vendor/github.com/andybalholm/cascadia/.travis.yml new file mode 100644 index 00000000..6f227517 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/.travis.yml @@ -0,0 +1,14 @@ +language: go + +go: + - 1.3 + - 1.4 + +install: + - go get github.com/andybalholm/cascadia + +script: + - go test -v + +notifications: + email: false diff --git a/vendor/github.com/andybalholm/cascadia/LICENSE b/vendor/github.com/andybalholm/cascadia/LICENSE new file mode 100755 index 00000000..ee5ad35a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2011 Andy Balholm. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/andybalholm/cascadia/README.md b/vendor/github.com/andybalholm/cascadia/README.md new file mode 100644 index 00000000..9021cb92 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/README.md @@ -0,0 +1,7 @@ +# cascadia + +[![](https://travis-ci.org/andybalholm/cascadia.svg)](https://travis-ci.org/andybalholm/cascadia) + +The Cascadia package implements CSS selectors for use with the parse trees produced by the html package. + +To test CSS selectors without writing Go code, check out [cascadia](https://github.com/suntong/cascadia) the command line tool, a thin wrapper around this package. diff --git a/vendor/github.com/andybalholm/cascadia/benchmark_test.go b/vendor/github.com/andybalholm/cascadia/benchmark_test.go new file mode 100644 index 00000000..42bf5000 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/benchmark_test.go @@ -0,0 +1,53 @@ +package cascadia + +import ( + "strings" + "testing" + + "golang.org/x/net/html" +) + +func MustParseHTML(doc string) *html.Node { + dom, err := html.Parse(strings.NewReader(doc)) + if err != nil { + panic(err) + } + return dom +} + +var selector = MustCompile(`div.matched`) +var doc = ` + + +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + +` +var dom = MustParseHTML(doc) + +func BenchmarkMatchAll(b *testing.B) { + var matches []*html.Node + for i := 0; i < b.N; i++ { + matches = selector.MatchAll(dom) + } + _ = matches +} diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test0 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test0 new file mode 100644 index 00000000..83a6561a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test0 @@ -0,0 +1 @@ +address \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test1 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test1 new file mode 100644 index 00000000..f59ec20a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test1 @@ -0,0 +1 @@ +* \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test10 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test10 new file mode 100644 index 00000000..0ee70eb0 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test10 @@ -0,0 +1 @@ +p[title] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test11 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test11 new file mode 100644 index 00000000..66340c4a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test11 @@ -0,0 +1 @@ +address[title="foo"] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test12 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test12 new file mode 100644 index 00000000..6303f086 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test12 @@ -0,0 +1 @@ +[ title ~= foo ] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test13 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test13 new file mode 100644 index 00000000..45d91eb6 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test13 @@ -0,0 +1 @@ +[title~="hello world"] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test14 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test14 new file mode 100644 index 00000000..62e7d68a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test14 @@ -0,0 +1 @@ +[lang|="en"] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test15 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test15 new file mode 100644 index 00000000..fe9ab530 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test15 @@ -0,0 +1 @@ +[title^="foo"] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test16 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test16 new file mode 100644 index 00000000..dbee7332 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test16 @@ -0,0 +1 @@ +[title$="bar"] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test17 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test17 new file mode 100644 index 00000000..e4fb4032 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test17 @@ -0,0 +1 @@ +[title*="bar"] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test18 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test18 new file mode 100644 index 00000000..60750143 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test18 @@ -0,0 +1 @@ +.t1:not(.t2) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test19 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test19 new file mode 100644 index 00000000..f04dfafd --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test19 @@ -0,0 +1 @@ +div:not(.t1) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test2 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test2 new file mode 100644 index 00000000..5529b9b4 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test2 @@ -0,0 +1 @@ +#foo \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test20 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test20 new file mode 100644 index 00000000..a1c88349 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test20 @@ -0,0 +1 @@ +li:nth-child(odd) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test21 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test21 new file mode 100644 index 00000000..b99fcb6a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test21 @@ -0,0 +1 @@ +li:nth-child(even) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test22 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test22 new file mode 100644 index 00000000..a9ee2179 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test22 @@ -0,0 +1 @@ +li:nth-child(-n+2) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test23 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test23 new file mode 100644 index 00000000..aad519d5 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test23 @@ -0,0 +1 @@ +li:nth-child(3n+1) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test24 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test24 new file mode 100644 index 00000000..436a2191 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test24 @@ -0,0 +1 @@ +li:nth-last-child(odd) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test25 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test25 new file mode 100644 index 00000000..46f6cbc2 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test25 @@ -0,0 +1 @@ +li:nth-last-child(even) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test26 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test26 new file mode 100644 index 00000000..d18bf3b4 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test26 @@ -0,0 +1 @@ +li:nth-last-child(-n+2) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test27 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test27 new file mode 100644 index 00000000..d1d6eb31 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test27 @@ -0,0 +1 @@ +li:nth-last-child(3n+1) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test28 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test28 new file mode 100644 index 00000000..9a0c9493 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test28 @@ -0,0 +1 @@ +span:first-child \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test29 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test29 new file mode 100644 index 00000000..7058608e --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test29 @@ -0,0 +1 @@ +span:last-child \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test3 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test3 new file mode 100644 index 00000000..ee024825 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test3 @@ -0,0 +1 @@ +li#t1 \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test30 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test30 new file mode 100644 index 00000000..536e6ffb --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test30 @@ -0,0 +1 @@ +p:nth-of-type(2) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test31 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test31 new file mode 100644 index 00000000..61c1fc75 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test31 @@ -0,0 +1 @@ +p:nth-last-of-type(2) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test32 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test32 new file mode 100644 index 00000000..d9a9dc54 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test32 @@ -0,0 +1 @@ +p:last-of-type \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test33 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test33 new file mode 100644 index 00000000..9052c419 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test33 @@ -0,0 +1 @@ +p:first-of-type \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test34 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test34 new file mode 100644 index 00000000..60bd1247 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test34 @@ -0,0 +1 @@ +p:only-child \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test35 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test35 new file mode 100644 index 00000000..87d9dbc6 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test35 @@ -0,0 +1 @@ +p:only-of-type \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test36 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test36 new file mode 100644 index 00000000..8e929542 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test36 @@ -0,0 +1 @@ +:empty \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test37 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test37 new file mode 100644 index 00000000..ba3455f5 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test37 @@ -0,0 +1 @@ +div p \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test38 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test38 new file mode 100644 index 00000000..d6f24c0a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test38 @@ -0,0 +1 @@ +div table p \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test39 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test39 new file mode 100644 index 00000000..a72a605c --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test39 @@ -0,0 +1 @@ +div > p \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test4 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test4 new file mode 100644 index 00000000..7b253d3e --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test4 @@ -0,0 +1 @@ +*#t4 \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test40 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test40 new file mode 100644 index 00000000..407ea3c4 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test40 @@ -0,0 +1 @@ +p ~ p \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test41 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test41 new file mode 100644 index 00000000..e36e0942 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test41 @@ -0,0 +1 @@ +p + p \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test42 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test42 new file mode 100644 index 00000000..fa59ada0 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test42 @@ -0,0 +1 @@ +li, p \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test43 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test43 new file mode 100644 index 00000000..e946ff3a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test43 @@ -0,0 +1 @@ +p +/*This is a comment*/ p \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test44 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test44 new file mode 100644 index 00000000..df68954a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test44 @@ -0,0 +1 @@ +p:contains("that wraps") \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test45 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test45 new file mode 100644 index 00000000..5c479a9a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test45 @@ -0,0 +1 @@ +p:containsOwn("that wraps") \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test46 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test46 new file mode 100644 index 00000000..a189d03c --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test46 @@ -0,0 +1 @@ +:containsOwn("inner") \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test47 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test47 new file mode 100644 index 00000000..bfba3689 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test47 @@ -0,0 +1 @@ +p:containsOwn("block") \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test48 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test48 new file mode 100644 index 00000000..4a6abb3c --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test48 @@ -0,0 +1 @@ +div:has(#p1) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test49 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test49 new file mode 100644 index 00000000..2048a0cd --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test49 @@ -0,0 +1 @@ +div:has(:containsOwn("2")) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test5 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test5 new file mode 100644 index 00000000..702c0f1e --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test5 @@ -0,0 +1 @@ +.t1 \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test50 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test50 new file mode 100644 index 00000000..c062e4c1 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test50 @@ -0,0 +1 @@ +body :has(:containsOwn("2")) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test51 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test51 new file mode 100644 index 00000000..15122604 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test51 @@ -0,0 +1 @@ +body :haschild(:containsOwn("2")) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test52 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test52 new file mode 100644 index 00000000..c3dee62a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test52 @@ -0,0 +1 @@ +p:matches([\d]) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test53 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test53 new file mode 100644 index 00000000..90f71d78 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test53 @@ -0,0 +1 @@ +p:matches([a-z]) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test54 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test54 new file mode 100644 index 00000000..88b4c283 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test54 @@ -0,0 +1 @@ +p:matches([a-zA-Z]) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test55 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test55 new file mode 100644 index 00000000..699b41ff --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test55 @@ -0,0 +1 @@ +p:matches([^\d]) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test56 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test56 new file mode 100644 index 00000000..83d4c474 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test56 @@ -0,0 +1 @@ +p:matches(^(0|a)) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test57 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test57 new file mode 100644 index 00000000..e8507ba6 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test57 @@ -0,0 +1 @@ +p:matches(^\d+$) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test58 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test58 new file mode 100644 index 00000000..e29dba9b --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test58 @@ -0,0 +1 @@ +p:not(:matches(^\d+$)) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test59 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test59 new file mode 100644 index 00000000..b5f72069 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test59 @@ -0,0 +1 @@ +div :matchesOwn(^\d+$) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test6 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test6 new file mode 100644 index 00000000..cf58afcc --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test6 @@ -0,0 +1 @@ +p.t1 \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test60 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test60 new file mode 100644 index 00000000..2154ba88 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test60 @@ -0,0 +1 @@ +[href#=(fina)]:not([href#=(\/\/[^\/]+untrusted)]) \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test61 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test61 new file mode 100644 index 00000000..1c8f5258 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test61 @@ -0,0 +1 @@ +[href#=(^https:\/\/[^\/]*\/?news)] \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test7 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test7 new file mode 100644 index 00000000..9ad6c209 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test7 @@ -0,0 +1 @@ +div.teST \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test8 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test8 new file mode 100644 index 00000000..2ed12842 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test8 @@ -0,0 +1 @@ +.t1.fail \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test9 b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test9 new file mode 100644 index 00000000..8c1c2a27 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/corpus/test9 @@ -0,0 +1 @@ +p.t1.t2 \ No newline at end of file diff --git a/vendor/github.com/andybalholm/cascadia/fuzz/fuzz.go b/vendor/github.com/andybalholm/cascadia/fuzz/fuzz.go new file mode 100644 index 00000000..d1a46f8c --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/fuzz/fuzz.go @@ -0,0 +1,15 @@ +package fuzz + +import "github.com/andybalholm/cascadia" + +// Fuzz is the entrypoint used by the go-fuzz framework +func Fuzz(data []byte) int { + sel, err := cascadia.Compile(string(data)) + if err != nil { + if sel != nil { + panic("sel != nil on error") + } + return 0 + } + return 1 +} diff --git a/vendor/github.com/andybalholm/cascadia/parser.go b/vendor/github.com/andybalholm/cascadia/parser.go new file mode 100644 index 00000000..495db9cc --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/parser.go @@ -0,0 +1,835 @@ +// Package cascadia is an implementation of CSS selectors. +package cascadia + +import ( + "errors" + "fmt" + "regexp" + "strconv" + "strings" + + "golang.org/x/net/html" +) + +// a parser for CSS selectors +type parser struct { + s string // the source text + i int // the current position +} + +// parseEscape parses a backslash escape. +func (p *parser) parseEscape() (result string, err error) { + if len(p.s) < p.i+2 || p.s[p.i] != '\\' { + return "", errors.New("invalid escape sequence") + } + + start := p.i + 1 + c := p.s[start] + switch { + case c == '\r' || c == '\n' || c == '\f': + return "", errors.New("escaped line ending outside string") + case hexDigit(c): + // unicode escape (hex) + var i int + for i = start; i < p.i+6 && i < len(p.s) && hexDigit(p.s[i]); i++ { + // empty + } + v, _ := strconv.ParseUint(p.s[start:i], 16, 21) + if len(p.s) > i { + switch p.s[i] { + case '\r': + i++ + if len(p.s) > i && p.s[i] == '\n' { + i++ + } + case ' ', '\t', '\n', '\f': + i++ + } + } + p.i = i + return string(rune(v)), nil + } + + // Return the literal character after the backslash. + result = p.s[start : start+1] + p.i += 2 + return result, nil +} + +func hexDigit(c byte) bool { + return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' +} + +// nameStart returns whether c can be the first character of an identifier +// (not counting an initial hyphen, or an escape sequence). +func nameStart(c byte) bool { + return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 +} + +// nameChar returns whether c can be a character within an identifier +// (not counting an escape sequence). +func nameChar(c byte) bool { + return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 || + c == '-' || '0' <= c && c <= '9' +} + +// parseIdentifier parses an identifier. +func (p *parser) parseIdentifier() (result string, err error) { + startingDash := false + if len(p.s) > p.i && p.s[p.i] == '-' { + startingDash = true + p.i++ + } + + if len(p.s) <= p.i { + return "", errors.New("expected identifier, found EOF instead") + } + + if c := p.s[p.i]; !(nameStart(c) || c == '\\') { + return "", fmt.Errorf("expected identifier, found %c instead", c) + } + + result, err = p.parseName() + if startingDash && err == nil { + result = "-" + result + } + return +} + +// parseName parses a name (which is like an identifier, but doesn't have +// extra restrictions on the first character). +func (p *parser) parseName() (result string, err error) { + i := p.i +loop: + for i < len(p.s) { + c := p.s[i] + switch { + case nameChar(c): + start := i + for i < len(p.s) && nameChar(p.s[i]) { + i++ + } + result += p.s[start:i] + case c == '\\': + p.i = i + val, err := p.parseEscape() + if err != nil { + return "", err + } + i = p.i + result += val + default: + break loop + } + } + + if result == "" { + return "", errors.New("expected name, found EOF instead") + } + + p.i = i + return result, nil +} + +// parseString parses a single- or double-quoted string. +func (p *parser) parseString() (result string, err error) { + i := p.i + if len(p.s) < i+2 { + return "", errors.New("expected string, found EOF instead") + } + + quote := p.s[i] + i++ + +loop: + for i < len(p.s) { + switch p.s[i] { + case '\\': + if len(p.s) > i+1 { + switch c := p.s[i+1]; c { + case '\r': + if len(p.s) > i+2 && p.s[i+2] == '\n' { + i += 3 + continue loop + } + fallthrough + case '\n', '\f': + i += 2 + continue loop + } + } + p.i = i + val, err := p.parseEscape() + if err != nil { + return "", err + } + i = p.i + result += val + case quote: + break loop + case '\r', '\n', '\f': + return "", errors.New("unexpected end of line in string") + default: + start := i + for i < len(p.s) { + if c := p.s[i]; c == quote || c == '\\' || c == '\r' || c == '\n' || c == '\f' { + break + } + i++ + } + result += p.s[start:i] + } + } + + if i >= len(p.s) { + return "", errors.New("EOF in string") + } + + // Consume the final quote. + i++ + + p.i = i + return result, nil +} + +// parseRegex parses a regular expression; the end is defined by encountering an +// unmatched closing ')' or ']' which is not consumed +func (p *parser) parseRegex() (rx *regexp.Regexp, err error) { + i := p.i + if len(p.s) < i+2 { + return nil, errors.New("expected regular expression, found EOF instead") + } + + // number of open parens or brackets; + // when it becomes negative, finished parsing regex + open := 0 + +loop: + for i < len(p.s) { + switch p.s[i] { + case '(', '[': + open++ + case ')', ']': + open-- + if open < 0 { + break loop + } + } + i++ + } + + if i >= len(p.s) { + return nil, errors.New("EOF in regular expression") + } + rx, err = regexp.Compile(p.s[p.i:i]) + p.i = i + return rx, err +} + +// skipWhitespace consumes whitespace characters and comments. +// It returns true if there was actually anything to skip. +func (p *parser) skipWhitespace() bool { + i := p.i + for i < len(p.s) { + switch p.s[i] { + case ' ', '\t', '\r', '\n', '\f': + i++ + continue + case '/': + if strings.HasPrefix(p.s[i:], "/*") { + end := strings.Index(p.s[i+len("/*"):], "*/") + if end != -1 { + i += end + len("/**/") + continue + } + } + } + break + } + + if i > p.i { + p.i = i + return true + } + + return false +} + +// consumeParenthesis consumes an opening parenthesis and any following +// whitespace. It returns true if there was actually a parenthesis to skip. +func (p *parser) consumeParenthesis() bool { + if p.i < len(p.s) && p.s[p.i] == '(' { + p.i++ + p.skipWhitespace() + return true + } + return false +} + +// consumeClosingParenthesis consumes a closing parenthesis and any preceding +// whitespace. It returns true if there was actually a parenthesis to skip. +func (p *parser) consumeClosingParenthesis() bool { + i := p.i + p.skipWhitespace() + if p.i < len(p.s) && p.s[p.i] == ')' { + p.i++ + return true + } + p.i = i + return false +} + +// parseTypeSelector parses a type selector (one that matches by tag name). +func (p *parser) parseTypeSelector() (result Selector, err error) { + tag, err := p.parseIdentifier() + if err != nil { + return nil, err + } + + return typeSelector(tag), nil +} + +// parseIDSelector parses a selector that matches by id attribute. +func (p *parser) parseIDSelector() (Selector, error) { + if p.i >= len(p.s) { + return nil, fmt.Errorf("expected id selector (#id), found EOF instead") + } + if p.s[p.i] != '#' { + return nil, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i]) + } + + p.i++ + id, err := p.parseName() + if err != nil { + return nil, err + } + + return attributeEqualsSelector("id", id), nil +} + +// parseClassSelector parses a selector that matches by class attribute. +func (p *parser) parseClassSelector() (Selector, error) { + if p.i >= len(p.s) { + return nil, fmt.Errorf("expected class selector (.class), found EOF instead") + } + if p.s[p.i] != '.' { + return nil, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i]) + } + + p.i++ + class, err := p.parseIdentifier() + if err != nil { + return nil, err + } + + return attributeIncludesSelector("class", class), nil +} + +// parseAttributeSelector parses a selector that matches by attribute value. +func (p *parser) parseAttributeSelector() (Selector, error) { + if p.i >= len(p.s) { + return nil, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead") + } + if p.s[p.i] != '[' { + return nil, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i]) + } + + p.i++ + p.skipWhitespace() + key, err := p.parseIdentifier() + if err != nil { + return nil, err + } + + p.skipWhitespace() + if p.i >= len(p.s) { + return nil, errors.New("unexpected EOF in attribute selector") + } + + if p.s[p.i] == ']' { + p.i++ + return attributeExistsSelector(key), nil + } + + if p.i+2 >= len(p.s) { + return nil, errors.New("unexpected EOF in attribute selector") + } + + op := p.s[p.i : p.i+2] + if op[0] == '=' { + op = "=" + } else if op[1] != '=' { + return nil, fmt.Errorf(`expected equality operator, found "%s" instead`, op) + } + p.i += len(op) + + p.skipWhitespace() + if p.i >= len(p.s) { + return nil, errors.New("unexpected EOF in attribute selector") + } + var val string + var rx *regexp.Regexp + if op == "#=" { + rx, err = p.parseRegex() + } else { + switch p.s[p.i] { + case '\'', '"': + val, err = p.parseString() + default: + val, err = p.parseIdentifier() + } + } + if err != nil { + return nil, err + } + + p.skipWhitespace() + if p.i >= len(p.s) { + return nil, errors.New("unexpected EOF in attribute selector") + } + if p.s[p.i] != ']' { + return nil, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i]) + } + p.i++ + + switch op { + case "=": + return attributeEqualsSelector(key, val), nil + case "!=": + return attributeNotEqualSelector(key, val), nil + case "~=": + return attributeIncludesSelector(key, val), nil + case "|=": + return attributeDashmatchSelector(key, val), nil + case "^=": + return attributePrefixSelector(key, val), nil + case "$=": + return attributeSuffixSelector(key, val), nil + case "*=": + return attributeSubstringSelector(key, val), nil + case "#=": + return attributeRegexSelector(key, rx), nil + } + + return nil, fmt.Errorf("attribute operator %q is not supported", op) +} + +var errExpectedParenthesis = errors.New("expected '(' but didn't find it") +var errExpectedClosingParenthesis = errors.New("expected ')' but didn't find it") +var errUnmatchedParenthesis = errors.New("unmatched '('") + +// parsePseudoclassSelector parses a pseudoclass selector like :not(p). +func (p *parser) parsePseudoclassSelector() (Selector, error) { + if p.i >= len(p.s) { + return nil, fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead") + } + if p.s[p.i] != ':' { + return nil, fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i]) + } + + p.i++ + name, err := p.parseIdentifier() + if err != nil { + return nil, err + } + name = toLowerASCII(name) + + switch name { + case "not", "has", "haschild": + if !p.consumeParenthesis() { + return nil, errExpectedParenthesis + } + sel, parseErr := p.parseSelectorGroup() + if parseErr != nil { + return nil, parseErr + } + if !p.consumeClosingParenthesis() { + return nil, errExpectedClosingParenthesis + } + + switch name { + case "not": + return negatedSelector(sel), nil + case "has": + return hasDescendantSelector(sel), nil + case "haschild": + return hasChildSelector(sel), nil + } + + case "contains", "containsown": + if !p.consumeParenthesis() { + return nil, errExpectedParenthesis + } + if p.i == len(p.s) { + return nil, errUnmatchedParenthesis + } + var val string + switch p.s[p.i] { + case '\'', '"': + val, err = p.parseString() + default: + val, err = p.parseIdentifier() + } + if err != nil { + return nil, err + } + val = strings.ToLower(val) + p.skipWhitespace() + if p.i >= len(p.s) { + return nil, errors.New("unexpected EOF in pseudo selector") + } + if !p.consumeClosingParenthesis() { + return nil, errExpectedClosingParenthesis + } + + switch name { + case "contains": + return textSubstrSelector(val), nil + case "containsown": + return ownTextSubstrSelector(val), nil + } + + case "matches", "matchesown": + if !p.consumeParenthesis() { + return nil, errExpectedParenthesis + } + rx, err := p.parseRegex() + if err != nil { + return nil, err + } + if p.i >= len(p.s) { + return nil, errors.New("unexpected EOF in pseudo selector") + } + if !p.consumeClosingParenthesis() { + return nil, errExpectedClosingParenthesis + } + + switch name { + case "matches": + return textRegexSelector(rx), nil + case "matchesown": + return ownTextRegexSelector(rx), nil + } + + case "nth-child", "nth-last-child", "nth-of-type", "nth-last-of-type": + if !p.consumeParenthesis() { + return nil, errExpectedParenthesis + } + a, b, err := p.parseNth() + if err != nil { + return nil, err + } + if !p.consumeClosingParenthesis() { + return nil, errExpectedClosingParenthesis + } + if a == 0 { + switch name { + case "nth-child": + return simpleNthChildSelector(b, false), nil + case "nth-of-type": + return simpleNthChildSelector(b, true), nil + case "nth-last-child": + return simpleNthLastChildSelector(b, false), nil + case "nth-last-of-type": + return simpleNthLastChildSelector(b, true), nil + } + } + return nthChildSelector(a, b, + name == "nth-last-child" || name == "nth-last-of-type", + name == "nth-of-type" || name == "nth-last-of-type"), + nil + + case "first-child": + return simpleNthChildSelector(1, false), nil + case "last-child": + return simpleNthLastChildSelector(1, false), nil + case "first-of-type": + return simpleNthChildSelector(1, true), nil + case "last-of-type": + return simpleNthLastChildSelector(1, true), nil + case "only-child": + return onlyChildSelector(false), nil + case "only-of-type": + return onlyChildSelector(true), nil + case "input": + return inputSelector, nil + case "empty": + return emptyElementSelector, nil + case "root": + return rootSelector, nil + } + + return nil, fmt.Errorf("unknown pseudoclass :%s", name) +} + +// parseInteger parses a decimal integer. +func (p *parser) parseInteger() (int, error) { + i := p.i + start := i + for i < len(p.s) && '0' <= p.s[i] && p.s[i] <= '9' { + i++ + } + if i == start { + return 0, errors.New("expected integer, but didn't find it") + } + p.i = i + + val, err := strconv.Atoi(p.s[start:i]) + if err != nil { + return 0, err + } + + return val, nil +} + +// parseNth parses the argument for :nth-child (normally of the form an+b). +func (p *parser) parseNth() (a, b int, err error) { + // initial state + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '-': + p.i++ + goto negativeA + case '+': + p.i++ + goto positiveA + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + goto positiveA + case 'n', 'N': + a = 1 + p.i++ + goto readN + case 'o', 'O', 'e', 'E': + id, nameErr := p.parseName() + if nameErr != nil { + return 0, 0, nameErr + } + id = toLowerASCII(id) + if id == "odd" { + return 2, 1, nil + } + if id == "even" { + return 2, 0, nil + } + return 0, 0, fmt.Errorf("expected 'odd' or 'even', but found '%s' instead", id) + default: + goto invalid + } + +positiveA: + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + a, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + goto readA + case 'n', 'N': + a = 1 + p.i++ + goto readN + default: + goto invalid + } + +negativeA: + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + a, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + a = -a + goto readA + case 'n', 'N': + a = -1 + p.i++ + goto readN + default: + goto invalid + } + +readA: + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case 'n', 'N': + p.i++ + goto readN + default: + // The number we read as a is actually b. + return 0, a, nil + } + +readN: + p.skipWhitespace() + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '+': + p.i++ + p.skipWhitespace() + b, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + return a, b, nil + case '-': + p.i++ + p.skipWhitespace() + b, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + return a, -b, nil + default: + return a, 0, nil + } + +eof: + return 0, 0, errors.New("unexpected EOF while attempting to parse expression of form an+b") + +invalid: + return 0, 0, errors.New("unexpected character while attempting to parse expression of form an+b") +} + +// parseSimpleSelectorSequence parses a selector sequence that applies to +// a single element. +func (p *parser) parseSimpleSelectorSequence() (Selector, error) { + var result Selector + + if p.i >= len(p.s) { + return nil, errors.New("expected selector, found EOF instead") + } + + switch p.s[p.i] { + case '*': + // It's the universal selector. Just skip over it, since it doesn't affect the meaning. + p.i++ + case '#', '.', '[', ':': + // There's no type selector. Wait to process the other till the main loop. + default: + r, err := p.parseTypeSelector() + if err != nil { + return nil, err + } + result = r + } + +loop: + for p.i < len(p.s) { + var ns Selector + var err error + switch p.s[p.i] { + case '#': + ns, err = p.parseIDSelector() + case '.': + ns, err = p.parseClassSelector() + case '[': + ns, err = p.parseAttributeSelector() + case ':': + ns, err = p.parsePseudoclassSelector() + default: + break loop + } + if err != nil { + return nil, err + } + if result == nil { + result = ns + } else { + result = intersectionSelector(result, ns) + } + } + + if result == nil { + result = func(n *html.Node) bool { + return n.Type == html.ElementNode + } + } + + return result, nil +} + +// parseSelector parses a selector that may include combinators. +func (p *parser) parseSelector() (result Selector, err error) { + p.skipWhitespace() + result, err = p.parseSimpleSelectorSequence() + if err != nil { + return + } + + for { + var combinator byte + if p.skipWhitespace() { + combinator = ' ' + } + if p.i >= len(p.s) { + return + } + + switch p.s[p.i] { + case '+', '>', '~': + combinator = p.s[p.i] + p.i++ + p.skipWhitespace() + case ',', ')': + // These characters can't begin a selector, but they can legally occur after one. + return + } + + if combinator == 0 { + return + } + + c, err := p.parseSimpleSelectorSequence() + if err != nil { + return nil, err + } + + switch combinator { + case ' ': + result = descendantSelector(result, c) + case '>': + result = childSelector(result, c) + case '+': + result = siblingSelector(result, c, true) + case '~': + result = siblingSelector(result, c, false) + } + } + + panic("unreachable") +} + +// parseSelectorGroup parses a group of selectors, separated by commas. +func (p *parser) parseSelectorGroup() (result Selector, err error) { + result, err = p.parseSelector() + if err != nil { + return + } + + for p.i < len(p.s) { + if p.s[p.i] != ',' { + return result, nil + } + p.i++ + c, err := p.parseSelector() + if err != nil { + return nil, err + } + result = unionSelector(result, c) + } + + return +} diff --git a/vendor/github.com/andybalholm/cascadia/parser_test.go b/vendor/github.com/andybalholm/cascadia/parser_test.go new file mode 100644 index 00000000..47dd4a69 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/parser_test.go @@ -0,0 +1,86 @@ +package cascadia + +import ( + "testing" +) + +var identifierTests = map[string]string{ + "x": "x", + "96": "", + "-x": "-x", + `r\e9 sumé`: "résumé", + `a\"b`: `a"b`, +} + +func TestParseIdentifier(t *testing.T) { + for source, want := range identifierTests { + p := &parser{s: source} + got, err := p.parseIdentifier() + + if err != nil { + if want == "" { + // It was supposed to be an error. + continue + } + t.Errorf("parsing %q: got error (%s), want %q", source, err, want) + continue + } + + if want == "" { + if err == nil { + t.Errorf("parsing %q: got %q, want error", source, got) + } + continue + } + + if p.i < len(source) { + t.Errorf("parsing %q: %d bytes left over", source, len(source)-p.i) + continue + } + + if got != want { + t.Errorf("parsing %q: got %q, want %q", source, got, want) + } + } +} + +var stringTests = map[string]string{ + `"x"`: "x", + `'x'`: "x", + `'x`: "", + "'x\\\r\nx'": "xx", + `"r\e9 sumé"`: "résumé", + `"a\"b"`: `a"b`, +} + +func TestParseString(t *testing.T) { + for source, want := range stringTests { + p := &parser{s: source} + got, err := p.parseString() + + if err != nil { + if want == "" { + // It was supposed to be an error. + continue + } + t.Errorf("parsing %q: got error (%s), want %q", source, err, want) + continue + } + + if want == "" { + if err == nil { + t.Errorf("parsing %q: got %q, want error", source, got) + } + continue + } + + if p.i < len(source) { + t.Errorf("parsing %q: %d bytes left over", source, len(source)-p.i) + continue + } + + if got != want { + t.Errorf("parsing %q: got %q, want %q", source, got, want) + } + } +} diff --git a/vendor/github.com/andybalholm/cascadia/selector.go b/vendor/github.com/andybalholm/cascadia/selector.go new file mode 100644 index 00000000..9fb05ccb --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/selector.go @@ -0,0 +1,622 @@ +package cascadia + +import ( + "bytes" + "fmt" + "regexp" + "strings" + + "golang.org/x/net/html" +) + +// the Selector type, and functions for creating them + +// A Selector is a function which tells whether a node matches or not. +type Selector func(*html.Node) bool + +// hasChildMatch returns whether n has any child that matches a. +func hasChildMatch(n *html.Node, a Selector) bool { + for c := n.FirstChild; c != nil; c = c.NextSibling { + if a(c) { + return true + } + } + return false +} + +// hasDescendantMatch performs a depth-first search of n's descendants, +// testing whether any of them match a. It returns true as soon as a match is +// found, or false if no match is found. +func hasDescendantMatch(n *html.Node, a Selector) bool { + for c := n.FirstChild; c != nil; c = c.NextSibling { + if a(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) { + return true + } + } + return false +} + +// Compile parses a selector and returns, if successful, a Selector object +// that can be used to match against html.Node objects. +func Compile(sel string) (Selector, error) { + p := &parser{s: sel} + compiled, err := p.parseSelectorGroup() + if err != nil { + return nil, err + } + + if p.i < len(sel) { + return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i) + } + + return compiled, nil +} + +// MustCompile is like Compile, but panics instead of returning an error. +func MustCompile(sel string) Selector { + compiled, err := Compile(sel) + if err != nil { + panic(err) + } + return compiled +} + +// MatchAll returns a slice of the nodes that match the selector, +// from n and its children. +func (s Selector) MatchAll(n *html.Node) []*html.Node { + return s.matchAllInto(n, nil) +} + +func (s Selector) matchAllInto(n *html.Node, storage []*html.Node) []*html.Node { + if s(n) { + storage = append(storage, n) + } + + for child := n.FirstChild; child != nil; child = child.NextSibling { + storage = s.matchAllInto(child, storage) + } + + return storage +} + +// Match returns true if the node matches the selector. +func (s Selector) Match(n *html.Node) bool { + return s(n) +} + +// MatchFirst returns the first node that matches s, from n and its children. +func (s Selector) MatchFirst(n *html.Node) *html.Node { + if s.Match(n) { + return n + } + + for c := n.FirstChild; c != nil; c = c.NextSibling { + m := s.MatchFirst(c) + if m != nil { + return m + } + } + return nil +} + +// Filter returns the nodes in nodes that match the selector. +func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) { + for _, n := range nodes { + if s(n) { + result = append(result, n) + } + } + return result +} + +// typeSelector returns a Selector that matches elements with a given tag name. +func typeSelector(tag string) Selector { + tag = toLowerASCII(tag) + return func(n *html.Node) bool { + return n.Type == html.ElementNode && n.Data == tag + } +} + +// toLowerASCII returns s with all ASCII capital letters lowercased. +func toLowerASCII(s string) string { + var b []byte + for i := 0; i < len(s); i++ { + if c := s[i]; 'A' <= c && c <= 'Z' { + if b == nil { + b = make([]byte, len(s)) + copy(b, s) + } + b[i] = s[i] + ('a' - 'A') + } + } + + if b == nil { + return s + } + + return string(b) +} + +// attributeSelector returns a Selector that matches elements +// where the attribute named key satisifes the function f. +func attributeSelector(key string, f func(string) bool) Selector { + key = toLowerASCII(key) + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + for _, a := range n.Attr { + if a.Key == key && f(a.Val) { + return true + } + } + return false + } +} + +// attributeExistsSelector returns a Selector that matches elements that have +// an attribute named key. +func attributeExistsSelector(key string) Selector { + return attributeSelector(key, func(string) bool { return true }) +} + +// attributeEqualsSelector returns a Selector that matches elements where +// the attribute named key has the value val. +func attributeEqualsSelector(key, val string) Selector { + return attributeSelector(key, + func(s string) bool { + return s == val + }) +} + +// attributeNotEqualSelector returns a Selector that matches elements where +// the attribute named key does not have the value val. +func attributeNotEqualSelector(key, val string) Selector { + key = toLowerASCII(key) + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + for _, a := range n.Attr { + if a.Key == key && a.Val == val { + return false + } + } + return true + } +} + +// attributeIncludesSelector returns a Selector that matches elements where +// the attribute named key is a whitespace-separated list that includes val. +func attributeIncludesSelector(key, val string) Selector { + return attributeSelector(key, + func(s string) bool { + for s != "" { + i := strings.IndexAny(s, " \t\r\n\f") + if i == -1 { + return s == val + } + if s[:i] == val { + return true + } + s = s[i+1:] + } + return false + }) +} + +// attributeDashmatchSelector returns a Selector that matches elements where +// the attribute named key equals val or starts with val plus a hyphen. +func attributeDashmatchSelector(key, val string) Selector { + return attributeSelector(key, + func(s string) bool { + if s == val { + return true + } + if len(s) <= len(val) { + return false + } + if s[:len(val)] == val && s[len(val)] == '-' { + return true + } + return false + }) +} + +// attributePrefixSelector returns a Selector that matches elements where +// the attribute named key starts with val. +func attributePrefixSelector(key, val string) Selector { + return attributeSelector(key, + func(s string) bool { + if strings.TrimSpace(s) == "" { + return false + } + return strings.HasPrefix(s, val) + }) +} + +// attributeSuffixSelector returns a Selector that matches elements where +// the attribute named key ends with val. +func attributeSuffixSelector(key, val string) Selector { + return attributeSelector(key, + func(s string) bool { + if strings.TrimSpace(s) == "" { + return false + } + return strings.HasSuffix(s, val) + }) +} + +// attributeSubstringSelector returns a Selector that matches nodes where +// the attribute named key contains val. +func attributeSubstringSelector(key, val string) Selector { + return attributeSelector(key, + func(s string) bool { + if strings.TrimSpace(s) == "" { + return false + } + return strings.Contains(s, val) + }) +} + +// attributeRegexSelector returns a Selector that matches nodes where +// the attribute named key matches the regular expression rx +func attributeRegexSelector(key string, rx *regexp.Regexp) Selector { + return attributeSelector(key, + func(s string) bool { + return rx.MatchString(s) + }) +} + +// intersectionSelector returns a selector that matches nodes that match +// both a and b. +func intersectionSelector(a, b Selector) Selector { + return func(n *html.Node) bool { + return a(n) && b(n) + } +} + +// unionSelector returns a selector that matches elements that match +// either a or b. +func unionSelector(a, b Selector) Selector { + return func(n *html.Node) bool { + return a(n) || b(n) + } +} + +// negatedSelector returns a selector that matches elements that do not match a. +func negatedSelector(a Selector) Selector { + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + return !a(n) + } +} + +// writeNodeText writes the text contained in n and its descendants to b. +func writeNodeText(n *html.Node, b *bytes.Buffer) { + switch n.Type { + case html.TextNode: + b.WriteString(n.Data) + case html.ElementNode: + for c := n.FirstChild; c != nil; c = c.NextSibling { + writeNodeText(c, b) + } + } +} + +// nodeText returns the text contained in n and its descendants. +func nodeText(n *html.Node) string { + var b bytes.Buffer + writeNodeText(n, &b) + return b.String() +} + +// nodeOwnText returns the contents of the text nodes that are direct +// children of n. +func nodeOwnText(n *html.Node) string { + var b bytes.Buffer + for c := n.FirstChild; c != nil; c = c.NextSibling { + if c.Type == html.TextNode { + b.WriteString(c.Data) + } + } + return b.String() +} + +// textSubstrSelector returns a selector that matches nodes that +// contain the given text. +func textSubstrSelector(val string) Selector { + return func(n *html.Node) bool { + text := strings.ToLower(nodeText(n)) + return strings.Contains(text, val) + } +} + +// ownTextSubstrSelector returns a selector that matches nodes that +// directly contain the given text +func ownTextSubstrSelector(val string) Selector { + return func(n *html.Node) bool { + text := strings.ToLower(nodeOwnText(n)) + return strings.Contains(text, val) + } +} + +// textRegexSelector returns a selector that matches nodes whose text matches +// the specified regular expression +func textRegexSelector(rx *regexp.Regexp) Selector { + return func(n *html.Node) bool { + return rx.MatchString(nodeText(n)) + } +} + +// ownTextRegexSelector returns a selector that matches nodes whose text +// directly matches the specified regular expression +func ownTextRegexSelector(rx *regexp.Regexp) Selector { + return func(n *html.Node) bool { + return rx.MatchString(nodeOwnText(n)) + } +} + +// hasChildSelector returns a selector that matches elements +// with a child that matches a. +func hasChildSelector(a Selector) Selector { + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + return hasChildMatch(n, a) + } +} + +// hasDescendantSelector returns a selector that matches elements +// with any descendant that matches a. +func hasDescendantSelector(a Selector) Selector { + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + return hasDescendantMatch(n, a) + } +} + +// nthChildSelector returns a selector that implements :nth-child(an+b). +// If last is true, implements :nth-last-child instead. +// If ofType is true, implements :nth-of-type instead. +func nthChildSelector(a, b int, last, ofType bool) Selector { + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + if parent.Type == html.DocumentNode { + return false + } + + i := -1 + count := 0 + for c := parent.FirstChild; c != nil; c = c.NextSibling { + if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) { + continue + } + count++ + if c == n { + i = count + if !last { + break + } + } + } + + if i == -1 { + // This shouldn't happen, since n should always be one of its parent's children. + return false + } + + if last { + i = count - i + 1 + } + + i -= b + if a == 0 { + return i == 0 + } + + return i%a == 0 && i/a >= 0 + } +} + +// simpleNthChildSelector returns a selector that implements :nth-child(b). +// If ofType is true, implements :nth-of-type instead. +func simpleNthChildSelector(b int, ofType bool) Selector { + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + if parent.Type == html.DocumentNode { + return false + } + + count := 0 + for c := parent.FirstChild; c != nil; c = c.NextSibling { + if c.Type != html.ElementNode || (ofType && c.Data != n.Data) { + continue + } + count++ + if c == n { + return count == b + } + if count >= b { + return false + } + } + return false + } +} + +// simpleNthLastChildSelector returns a selector that implements +// :nth-last-child(b). If ofType is true, implements :nth-last-of-type +// instead. +func simpleNthLastChildSelector(b int, ofType bool) Selector { + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + if parent.Type == html.DocumentNode { + return false + } + + count := 0 + for c := parent.LastChild; c != nil; c = c.PrevSibling { + if c.Type != html.ElementNode || (ofType && c.Data != n.Data) { + continue + } + count++ + if c == n { + return count == b + } + if count >= b { + return false + } + } + return false + } +} + +// onlyChildSelector returns a selector that implements :only-child. +// If ofType is true, it implements :only-of-type instead. +func onlyChildSelector(ofType bool) Selector { + return func(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + if parent.Type == html.DocumentNode { + return false + } + + count := 0 + for c := parent.FirstChild; c != nil; c = c.NextSibling { + if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) { + continue + } + count++ + if count > 1 { + return false + } + } + + return count == 1 + } +} + +// inputSelector is a Selector that matches input, select, textarea and button elements. +func inputSelector(n *html.Node) bool { + return n.Type == html.ElementNode && (n.Data == "input" || n.Data == "select" || n.Data == "textarea" || n.Data == "button") +} + +// emptyElementSelector is a Selector that matches empty elements. +func emptyElementSelector(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + for c := n.FirstChild; c != nil; c = c.NextSibling { + switch c.Type { + case html.ElementNode, html.TextNode: + return false + } + } + + return true +} + +// descendantSelector returns a Selector that matches an element if +// it matches d and has an ancestor that matches a. +func descendantSelector(a, d Selector) Selector { + return func(n *html.Node) bool { + if !d(n) { + return false + } + + for p := n.Parent; p != nil; p = p.Parent { + if a(p) { + return true + } + } + + return false + } +} + +// childSelector returns a Selector that matches an element if +// it matches d and its parent matches a. +func childSelector(a, d Selector) Selector { + return func(n *html.Node) bool { + return d(n) && n.Parent != nil && a(n.Parent) + } +} + +// siblingSelector returns a Selector that matches an element +// if it matches s2 and in is preceded by an element that matches s1. +// If adjacent is true, the sibling must be immediately before the element. +func siblingSelector(s1, s2 Selector, adjacent bool) Selector { + return func(n *html.Node) bool { + if !s2(n) { + return false + } + + if adjacent { + for n = n.PrevSibling; n != nil; n = n.PrevSibling { + if n.Type == html.TextNode || n.Type == html.CommentNode { + continue + } + return s1(n) + } + return false + } + + // Walk backwards looking for element that matches s1 + for c := n.PrevSibling; c != nil; c = c.PrevSibling { + if s1(c) { + return true + } + } + + return false + } +} + +// rootSelector implements :root +func rootSelector(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + if n.Parent == nil { + return false + } + return n.Parent.Type == html.DocumentNode +} diff --git a/vendor/github.com/andybalholm/cascadia/selector_test.go b/vendor/github.com/andybalholm/cascadia/selector_test.go new file mode 100644 index 00000000..7ff77e65 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/selector_test.go @@ -0,0 +1,654 @@ +package cascadia + +import ( + "bytes" + "strings" + "testing" + + "golang.org/x/net/html" +) + +type selectorTest struct { + HTML, selector string + results []string +} + +func nodeString(n *html.Node) string { + buf := bytes.NewBufferString("") + html.Render(buf, n) + return buf.String() +} + +var selectorTests = []selectorTest{ + { + `
This address...
`, + "address", + []string{ + "
This address...
", + }, + }, + { + `text`, + "*", + []string{ + "text", + "", + "text", + }, + }, + { + ``, + "*", + []string{ + "", + "", + "", + }, + }, + { + `

`, + "#foo", + []string{ + `

`, + }, + }, + { + `