Vendor dependencies

This commit is contained in:
Zhiming Wang 2020-02-08 20:44:46 +08:00
commit f4850a6ba0
No known key found for this signature in database
GPG Key ID: 5B58F95EC95965D8
577 changed files with 299316 additions and 0 deletions

View File

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2018 Henry Slawniak <https://datacenterscumbags.com/>
Copyright (c) 2018 Mercury Engineering <https://mercury.engineering/>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,27 @@
# CookieMonster
[![GoDoc](https://godoc.org/github.com/MercuryEngineering/CookieMonster?status.svg)](https://godoc.org/github.com/MercuryEngineering/CookieMonster) [![Build Status](https://travis-ci.org/MercuryEngineering/CookieMonster.svg?branch=master)](https://travis-ci.org/MercuryEngineering/CookieMonster)
A simple package for parsing [Netscape Cookie File](http://curl.haxx.se/rfc/cookie_spec.html) formatted cookies into Go [Cookies](https://golang.org/pkg/net/http/#Cookie)
### Install
`go get -u github.com/MercuryEngineering/CookieMonster`
### Example
```
import (
"fmt"
"github.com/MercuryEngineering/CookieMonster"
)
cookies, err := cookiemonster.ParseFile("cookies.txt")
if err != nil {
panic(err)
}
for _, cookie := range cookies {
fmt.Printf("%s=%s\n", cookie.Name, cookie.Value)
}
```

View File

@ -0,0 +1,99 @@
// Copyright (c) 2018 Henry Slawniak <https://datacenterscumbags.com/>
// Copyright (c) 2018 Mercury Engineering <https://mercury.engineering/>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// Package cookiemonster provides methods for parsing Netscape format cookie files into slices of http.Cookie
package cookiemonster
import (
"bufio"
"bytes"
"io"
"net/http"
"os"
"strconv"
"strings"
"time"
)
// ParseFile parses the file located at path and will return a slice of *http.Cookie or an error
func ParseFile(path string) ([]*http.Cookie, error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
defer f.Close()
return Parse(f)
}
// ParseString parses s and will return a slice of *http.Cookie or an error
func ParseString(s string) ([]*http.Cookie, error) {
return Parse(bytes.NewReader([]byte(s)))
}
// Parse will parse r and will return a slice of *http.Cookie or an error
func Parse(r io.Reader) ([]*http.Cookie, error) {
scanner := bufio.NewScanner(r)
cookies := []*http.Cookie{}
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, "#") || line == "" {
// Ignore comments and blank lines
continue
}
split := strings.Split(line, "\t")
if len(split) < 7 {
// Ignore lines that are not long enough
continue
}
expiresSplit := strings.Split(split[4], ".")
expiresSec, err := strconv.Atoi(expiresSplit[0])
if err != nil {
return nil, err
}
expiresNsec := 0
if len(expiresSplit) > 1 {
expiresNsec, err = strconv.Atoi(expiresSplit[1])
if err != nil {
expiresNsec = 0
}
}
cookie := &http.Cookie{
Name: split[5],
Value: split[6],
Path: split[2],
Domain: split[0],
Expires: time.Unix(int64(expiresSec), int64(expiresNsec)),
Secure: strings.ToLower(split[3]) == "true",
HttpOnly: strings.ToLower(split[1]) == "true",
}
cookies = append(cookies, cookie)
}
return cookies, nil
}

View File

@ -0,0 +1,6 @@
# Netscape HTTP Cookie File
# http://curl.haxx.se/rfc/cookie_spec.html
# This file was generated by EditThisCookie
# URL: https://mercury.engineering/
mercury.engineering TRUE / FALSE 0 beep boop
mercury.engineering TRUE / FALSE 0 test test_value

12
vendor/github.com/PuerkitoBio/goquery/LICENSE generated vendored Normal file
View File

@ -0,0 +1,12 @@
Copyright (c) 2012-2016, Martin Angers & Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

183
vendor/github.com/PuerkitoBio/goquery/README.md generated vendored Normal file
View File

@ -0,0 +1,183 @@
# goquery - a little like that j-thing, only in Go
[![build status](https://secure.travis-ci.org/PuerkitoBio/goquery.svg?branch=master)](http://travis-ci.org/PuerkitoBio/goquery) [![GoDoc](https://godoc.org/github.com/PuerkitoBio/goquery?status.png)](http://godoc.org/github.com/PuerkitoBio/goquery) [![Sourcegraph Badge](https://sourcegraph.com/github.com/PuerkitoBio/goquery/-/badge.svg)](https://sourcegraph.com/github.com/PuerkitoBio/goquery?badge)
goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off.
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this.
Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...).
## Table of Contents
* [Installation](#installation)
* [Changelog](#changelog)
* [API](#api)
* [Examples](#examples)
* [Related Projects](#related-projects)
* [Support](#support)
* [License](#license)
## Installation
Please note that because of the net/html dependency, goquery requires Go1.1+.
$ go get github.com/PuerkitoBio/goquery
(optional) To run unit tests:
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
$ go test
(optional) To run benchmarks (warning: it runs for a few minutes):
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
$ go test -bench=".*"
## Changelog
**Note that goquery's API is now stable, and will not break.**
* **2020-02-04 (v1.5.1)** : Update module dependencies.
* **2018-11-15 (v1.5.0)** : Go module support (thanks @Zaba505).
* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples.
* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`.
* **2018-01-28 (v1.3.0)** : Add `ToEnd` constant to `Slice` until the end of the selection (thanks to @davidjwilkins for raising the issue).
* **2018-01-11 (v1.2.0)** : Add `AddBack*` and deprecate `AndSelf` (thanks to @davidjwilkins).
* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv).
* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb).
* **2016-08-28 (v1.0.1)** : Optimize performance for large documents.
* **2016-07-27 (v1.0.0)** : Tag version 1.0.0.
* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object.
* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see godoc for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`).
* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr].
* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone].
* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone].
* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used.
* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s.
* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader.
* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response.
* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility.
* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out).
* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method.
* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases).
* **v0.1.0** : Initial release.
## API
goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate.
jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention:
* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`)
* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`)
* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`)
* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`)
* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`)
* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`)
Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour).
The complete [godoc reference documentation can be found here][doc].
Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
* `Find("~")` returns an empty selection because the selector string doesn't match anything.
* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything).
* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything.
* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element.
## Examples
See some tips and tricks in the [wiki][].
Adapted from example_test.go:
```Go
package main
import (
"fmt"
"log"
"net/http"
"github.com/PuerkitoBio/goquery"
)
func ExampleScrape() {
// Request the HTML page.
res, err := http.Get("http://metalsucks.net")
if err != nil {
log.Fatal(err)
}
defer res.Body.Close()
if res.StatusCode != 200 {
log.Fatalf("status code error: %d %s", res.StatusCode, res.Status)
}
// Load the HTML document
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
log.Fatal(err)
}
// Find the review items
doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) {
// For each item found, get the band and title
band := s.Find("a").Text()
title := s.Find("i").Text()
fmt.Printf("Review %d: %s - %s\n", i, band, title)
})
}
func main() {
ExampleScrape()
}
```
## Related Projects
- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags.
- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery.
- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors.
- [gocolly/colly](https://github.com/gocolly/colly), a lightning fast and elegant Scraping Framework
- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets.
- [MontFerret/ferret](https://github.com/MontFerret/ferret), declarative web scraping.
- [tacusci/berrycms](https://github.com/tacusci/berrycms), a modern simple to use CMS with easy to write plugins
- [Dataflow kit](https://github.com/slotix/dataflowkit), Web Scraping framework for Gophers.
- [Geziyor](https://github.com/geziyor/geziyor), a fast web crawling & scraping framework for Go. Supports JS rendering.
## Support
There are a number of ways you can support the project:
* Use it, star it, build something with it, spread the word!
- If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section!
* Raise issues to improve the project (note: doc typos and clarifications are issues too!)
- Please search existing issues before opening a new one - it may have already been adressed.
* Pull requests: please discuss new code in an issue first, unless the fix is really trivial.
- Make sure new code is tested.
- Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue.
If you desperately want to send money my way, I have a BuyMeACoffee.com page:
<a href="https://www.buymeacoffee.com/mna" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
## License
The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic].
[jquery]: http://jquery.com/
[go]: http://golang.org/
[cascadia]: https://github.com/andybalholm/cascadia
[cascadiacli]: https://github.com/suntong/cascadia
[bsd]: http://opensource.org/licenses/BSD-3-Clause
[golic]: http://golang.org/LICENSE
[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE
[doc]: http://godoc.org/github.com/PuerkitoBio/goquery
[index]: http://api.jquery.com/index/
[gonet]: https://github.com/golang/net/
[html]: http://godoc.org/golang.org/x/net/html
[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks
[thatguystone]: https://github.com/thatguystone
[piotr]: https://github.com/piotrkowalczuk
[goq]: https://github.com/andrewstuart/goq

124
vendor/github.com/PuerkitoBio/goquery/array.go generated vendored Normal file
View File

@ -0,0 +1,124 @@
package goquery
import (
"golang.org/x/net/html"
)
const (
maxUint = ^uint(0)
maxInt = int(maxUint >> 1)
// ToEnd is a special index value that can be used as end index in a call
// to Slice so that all elements are selected until the end of the Selection.
// It is equivalent to passing (*Selection).Length().
ToEnd = maxInt
)
// First reduces the set of matched elements to the first in the set.
// It returns a new Selection object, and an empty Selection object if the
// the selection is empty.
func (s *Selection) First() *Selection {
return s.Eq(0)
}
// Last reduces the set of matched elements to the last in the set.
// It returns a new Selection object, and an empty Selection object if
// the selection is empty.
func (s *Selection) Last() *Selection {
return s.Eq(-1)
}
// Eq reduces the set of matched elements to the one at the specified index.
// If a negative index is given, it counts backwards starting at the end of the
// set. It returns a new Selection object, and an empty Selection object if the
// index is invalid.
func (s *Selection) Eq(index int) *Selection {
if index < 0 {
index += len(s.Nodes)
}
if index >= len(s.Nodes) || index < 0 {
return newEmptySelection(s.document)
}
return s.Slice(index, index+1)
}
// Slice reduces the set of matched elements to a subset specified by a range
// of indices. The start index is 0-based and indicates the index of the first
// element to select. The end index is 0-based and indicates the index at which
// the elements stop being selected (the end index is not selected).
//
// The indices may be negative, in which case they represent an offset from the
// end of the selection.
//
// The special value ToEnd may be specified as end index, in which case all elements
// until the end are selected. This works both for a positive and negative start
// index.
func (s *Selection) Slice(start, end int) *Selection {
if start < 0 {
start += len(s.Nodes)
}
if end == ToEnd {
end = len(s.Nodes)
} else if end < 0 {
end += len(s.Nodes)
}
return pushStack(s, s.Nodes[start:end])
}
// Get retrieves the underlying node at the specified index.
// Get without parameter is not implemented, since the node array is available
// on the Selection object.
func (s *Selection) Get(index int) *html.Node {
if index < 0 {
index += len(s.Nodes) // Negative index gets from the end
}
return s.Nodes[index]
}
// Index returns the position of the first element within the Selection object
// relative to its sibling elements.
func (s *Selection) Index() int {
if len(s.Nodes) > 0 {
return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length()
}
return -1
}
// IndexSelector returns the position of the first element within the
// Selection object relative to the elements matched by the selector, or -1 if
// not found.
func (s *Selection) IndexSelector(selector string) int {
if len(s.Nodes) > 0 {
sel := s.document.Find(selector)
return indexInSlice(sel.Nodes, s.Nodes[0])
}
return -1
}
// IndexMatcher returns the position of the first element within the
// Selection object relative to the elements matched by the matcher, or -1 if
// not found.
func (s *Selection) IndexMatcher(m Matcher) int {
if len(s.Nodes) > 0 {
sel := s.document.FindMatcher(m)
return indexInSlice(sel.Nodes, s.Nodes[0])
}
return -1
}
// IndexOfNode returns the position of the specified node within the Selection
// object, or -1 if not found.
func (s *Selection) IndexOfNode(node *html.Node) int {
return indexInSlice(s.Nodes, node)
}
// IndexOfSelection returns the position of the first node in the specified
// Selection object within this Selection object, or -1 if not found.
func (s *Selection) IndexOfSelection(sel *Selection) int {
if sel != nil && len(sel.Nodes) > 0 {
return indexInSlice(s.Nodes, sel.Nodes[0])
}
return -1
}

123
vendor/github.com/PuerkitoBio/goquery/doc.go generated vendored Normal file
View File

@ -0,0 +1,123 @@
// Copyright (c) 2012-2016, Martin Angers & Contributors
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation and/or
// other materials provided with the distribution.
// * Neither the name of the author nor the names of its contributors may be used to
// endorse or promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
Package goquery implements features similar to jQuery, including the chainable
syntax, to manipulate and query an HTML document.
It brings a syntax and a set of features similar to jQuery to the Go language.
It is based on Go's net/html package and the CSS Selector library cascadia.
Since the net/html parser returns nodes, and not a full-featured DOM
tree, jQuery's stateful manipulation functions (like height(), css(), detach())
have been left off.
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is
the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML.
See the repository's wiki for various options on how to do this.
Syntax-wise, it is as close as possible to jQuery, with the same method names when
possible, and that warm and fuzzy chainable interface. jQuery being the
ultra-popular library that it is, writing a similar HTML-manipulating
library was better to follow its API than to start anew (in the same spirit as
Go's fmt package), even though some of its methods are less than intuitive (looking
at you, index()...).
It is hosted on GitHub, along with additional documentation in the README.md
file: https://github.com/puerkitobio/goquery
Please note that because of the net/html dependency, goquery requires Go1.1+.
The various methods are split into files based on the category of behavior.
The three dots (...) indicate that various "overloads" are available.
* array.go : array-like positional manipulation of the selection.
- Eq()
- First()
- Get()
- Index...()
- Last()
- Slice()
* expand.go : methods that expand or augment the selection's set.
- Add...()
- AndSelf()
- Union(), which is an alias for AddSelection()
* filter.go : filtering methods, that reduce the selection's set.
- End()
- Filter...()
- Has...()
- Intersection(), which is an alias of FilterSelection()
- Not...()
* iteration.go : methods to loop over the selection's nodes.
- Each()
- EachWithBreak()
- Map()
* manipulation.go : methods for modifying the document
- After...()
- Append...()
- Before...()
- Clone()
- Empty()
- Prepend...()
- Remove...()
- ReplaceWith...()
- Unwrap()
- Wrap...()
- WrapAll...()
- WrapInner...()
* property.go : methods that inspect and get the node's properties values.
- Attr*(), RemoveAttr(), SetAttr()
- AddClass(), HasClass(), RemoveClass(), ToggleClass()
- Html()
- Length()
- Size(), which is an alias for Length()
- Text()
* query.go : methods that query, or reflect, a node's identity.
- Contains()
- Is...()
* traversal.go : methods to traverse the HTML document tree.
- Children...()
- Contents()
- Find...()
- Next...()
- Parent[s]...()
- Prev...()
- Siblings...()
* type.go : definition of the types exposed by goquery.
- Document
- Selection
- Matcher
* utilities.go : definition of helper functions (and not methods on a *Selection)
that are not part of jQuery, but are useful to goquery.
- NodeName
- OuterHtml
*/
package goquery

70
vendor/github.com/PuerkitoBio/goquery/expand.go generated vendored Normal file
View File

@ -0,0 +1,70 @@
package goquery
import "golang.org/x/net/html"
// Add adds the selector string's matching nodes to those in the current
// selection and returns a new Selection object.
// The selector string is run in the context of the document of the current
// Selection object.
func (s *Selection) Add(selector string) *Selection {
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...)
}
// AddMatcher adds the matcher's matching nodes to those in the current
// selection and returns a new Selection object.
// The matcher is run in the context of the document of the current
// Selection object.
func (s *Selection) AddMatcher(m Matcher) *Selection {
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...)
}
// AddSelection adds the specified Selection object's nodes to those in the
// current selection and returns a new Selection object.
func (s *Selection) AddSelection(sel *Selection) *Selection {
if sel == nil {
return s.AddNodes()
}
return s.AddNodes(sel.Nodes...)
}
// Union is an alias for AddSelection.
func (s *Selection) Union(sel *Selection) *Selection {
return s.AddSelection(sel)
}
// AddNodes adds the specified nodes to those in the
// current selection and returns a new Selection object.
func (s *Selection) AddNodes(nodes ...*html.Node) *Selection {
return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil))
}
// AndSelf adds the previous set of elements on the stack to the current set.
// It returns a new Selection object containing the current Selection combined
// with the previous one.
// Deprecated: This function has been deprecated and is now an alias for AddBack().
func (s *Selection) AndSelf() *Selection {
return s.AddBack()
}
// AddBack adds the previous set of elements on the stack to the current set.
// It returns a new Selection object containing the current Selection combined
// with the previous one.
func (s *Selection) AddBack() *Selection {
return s.AddSelection(s.prevSel)
}
// AddBackFiltered reduces the previous set of elements on the stack to those that
// match the selector string, and adds them to the current set.
// It returns a new Selection object containing the current Selection combined
// with the filtered previous one
func (s *Selection) AddBackFiltered(selector string) *Selection {
return s.AddSelection(s.prevSel.Filter(selector))
}
// AddBackMatcher reduces the previous set of elements on the stack to those that match
// the mateher, and adds them to the curernt set.
// It returns a new Selection object containing the current Selection combined
// with the filtered previous one
func (s *Selection) AddBackMatcher(m Matcher) *Selection {
return s.AddSelection(s.prevSel.FilterMatcher(m))
}

163
vendor/github.com/PuerkitoBio/goquery/filter.go generated vendored Normal file
View File

@ -0,0 +1,163 @@
package goquery
import "golang.org/x/net/html"
// Filter reduces the set of matched elements to those that match the selector string.
// It returns a new Selection object for this subset of matching elements.
func (s *Selection) Filter(selector string) *Selection {
return s.FilterMatcher(compileMatcher(selector))
}
// FilterMatcher reduces the set of matched elements to those that match
// the given matcher. It returns a new Selection object for this subset
// of matching elements.
func (s *Selection) FilterMatcher(m Matcher) *Selection {
return pushStack(s, winnow(s, m, true))
}
// Not removes elements from the Selection that match the selector string.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) Not(selector string) *Selection {
return s.NotMatcher(compileMatcher(selector))
}
// NotMatcher removes elements from the Selection that match the given matcher.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) NotMatcher(m Matcher) *Selection {
return pushStack(s, winnow(s, m, false))
}
// FilterFunction reduces the set of matched elements to those that pass the function's test.
// It returns a new Selection object for this subset of elements.
func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection {
return pushStack(s, winnowFunction(s, f, true))
}
// NotFunction removes elements from the Selection that pass the function's test.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection {
return pushStack(s, winnowFunction(s, f, false))
}
// FilterNodes reduces the set of matched elements to those that match the specified nodes.
// It returns a new Selection object for this subset of elements.
func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection {
return pushStack(s, winnowNodes(s, nodes, true))
}
// NotNodes removes elements from the Selection that match the specified nodes.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) NotNodes(nodes ...*html.Node) *Selection {
return pushStack(s, winnowNodes(s, nodes, false))
}
// FilterSelection reduces the set of matched elements to those that match a
// node in the specified Selection object.
// It returns a new Selection object for this subset of elements.
func (s *Selection) FilterSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, winnowNodes(s, nil, true))
}
return pushStack(s, winnowNodes(s, sel.Nodes, true))
}
// NotSelection removes elements from the Selection that match a node in the specified
// Selection object. It returns a new Selection object with the matching elements removed.
func (s *Selection) NotSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, winnowNodes(s, nil, false))
}
return pushStack(s, winnowNodes(s, sel.Nodes, false))
}
// Intersection is an alias for FilterSelection.
func (s *Selection) Intersection(sel *Selection) *Selection {
return s.FilterSelection(sel)
}
// Has reduces the set of matched elements to those that have a descendant
// that matches the selector.
// It returns a new Selection object with the matching elements.
func (s *Selection) Has(selector string) *Selection {
return s.HasSelection(s.document.Find(selector))
}
// HasMatcher reduces the set of matched elements to those that have a descendant
// that matches the matcher.
// It returns a new Selection object with the matching elements.
func (s *Selection) HasMatcher(m Matcher) *Selection {
return s.HasSelection(s.document.FindMatcher(m))
}
// HasNodes reduces the set of matched elements to those that have a
// descendant that matches one of the nodes.
// It returns a new Selection object with the matching elements.
func (s *Selection) HasNodes(nodes ...*html.Node) *Selection {
return s.FilterFunction(func(_ int, sel *Selection) bool {
// Add all nodes that contain one of the specified nodes
for _, n := range nodes {
if sel.Contains(n) {
return true
}
}
return false
})
}
// HasSelection reduces the set of matched elements to those that have a
// descendant that matches one of the nodes of the specified Selection object.
// It returns a new Selection object with the matching elements.
func (s *Selection) HasSelection(sel *Selection) *Selection {
if sel == nil {
return s.HasNodes()
}
return s.HasNodes(sel.Nodes...)
}
// End ends the most recent filtering operation in the current chain and
// returns the set of matched elements to its previous state.
func (s *Selection) End() *Selection {
if s.prevSel != nil {
return s.prevSel
}
return newEmptySelection(s.document)
}
// Filter based on the matcher, and the indicator to keep (Filter) or
// to get rid of (Not) the matching elements.
func winnow(sel *Selection, m Matcher, keep bool) []*html.Node {
// Optimize if keep is requested
if keep {
return m.Filter(sel.Nodes)
}
// Use grep
return grep(sel, func(i int, s *Selection) bool {
return !m.Match(s.Get(0))
})
}
// Filter based on an array of nodes, and the indicator to keep (Filter) or
// to get rid of (Not) the matching elements.
func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node {
if len(nodes)+len(sel.Nodes) < minNodesForSet {
return grep(sel, func(i int, s *Selection) bool {
return isInSlice(nodes, s.Get(0)) == keep
})
}
set := make(map[*html.Node]bool)
for _, n := range nodes {
set[n] = true
}
return grep(sel, func(i int, s *Selection) bool {
return set[s.Get(0)] == keep
})
}
// Filter based on a function test, and the indicator to keep (Filter) or
// to get rid of (Not) the matching elements.
func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node {
return grep(sel, func(i int, s *Selection) bool {
return f(i, s) == keep
})
}

8
vendor/github.com/PuerkitoBio/goquery/go.mod generated vendored Normal file
View File

@ -0,0 +1,8 @@
module github.com/PuerkitoBio/goquery
require (
github.com/andybalholm/cascadia v1.1.0
golang.org/x/net v0.0.0-20200202094626-16171245cfb2
)
go 1.13

8
vendor/github.com/PuerkitoBio/goquery/go.sum generated vendored Normal file
View File

@ -0,0 +1,8 @@
github.com/andybalholm/cascadia v1.1.0 h1:BuuO6sSfQNFRu1LppgbD25Hr2vLYW25JvxHs5zzsLTo=
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=

39
vendor/github.com/PuerkitoBio/goquery/iteration.go generated vendored Normal file
View File

@ -0,0 +1,39 @@
package goquery
// Each iterates over a Selection object, executing a function for each
// matched element. It returns the current Selection object. The function
// f is called for each element in the selection with the index of the
// element in that selection starting at 0, and a *Selection that contains
// only that element.
func (s *Selection) Each(f func(int, *Selection)) *Selection {
for i, n := range s.Nodes {
f(i, newSingleSelection(n, s.document))
}
return s
}
// EachWithBreak iterates over a Selection object, executing a function for each
// matched element. It is identical to Each except that it is possible to break
// out of the loop by returning false in the callback function. It returns the
// current Selection object.
func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection {
for i, n := range s.Nodes {
if !f(i, newSingleSelection(n, s.document)) {
return s
}
}
return s
}
// Map passes each element in the current matched set through a function,
// producing a slice of string holding the returned values. The function
// f is called for each element in the selection with the index of the
// element in that selection starting at 0, and a *Selection that contains
// only that element.
func (s *Selection) Map(f func(int, *Selection) string) (result []string) {
for i, n := range s.Nodes {
result = append(result, f(i, newSingleSelection(n, s.document)))
}
return result
}

574
vendor/github.com/PuerkitoBio/goquery/manipulation.go generated vendored Normal file
View File

@ -0,0 +1,574 @@
package goquery
import (
"strings"
"golang.org/x/net/html"
)
// After applies the selector from the root document and inserts the matched elements
// after the elements in the set of matched elements.
//
// If one of the matched elements in the selection is not currently in the
// document, it's impossible to insert nodes after it, so it will be ignored.
//
// This follows the same rules as Selection.Append.
func (s *Selection) After(selector string) *Selection {
return s.AfterMatcher(compileMatcher(selector))
}
// AfterMatcher applies the matcher from the root document and inserts the matched elements
// after the elements in the set of matched elements.
//
// If one of the matched elements in the selection is not currently in the
// document, it's impossible to insert nodes after it, so it will be ignored.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterMatcher(m Matcher) *Selection {
return s.AfterNodes(m.MatchAll(s.document.rootNode)...)
}
// AfterSelection inserts the elements in the selection after each element in the set of matched
// elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterSelection(sel *Selection) *Selection {
return s.AfterNodes(sel.Nodes...)
}
// AfterHtml parses the html and inserts it after the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterHtml(html string) *Selection {
return s.AfterNodes(parseHtml(html)...)
}
// AfterNodes inserts the nodes after each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
if sn.Parent != nil {
sn.Parent.InsertBefore(n, sn.NextSibling)
}
})
}
// Append appends the elements specified by the selector to the end of each element
// in the set of matched elements, following those rules:
//
// 1) The selector is applied to the root document.
//
// 2) Elements that are part of the document will be moved to the new location.
//
// 3) If there are multiple locations to append to, cloned nodes will be
// appended to all target locations except the last one, which will be moved
// as noted in (2).
func (s *Selection) Append(selector string) *Selection {
return s.AppendMatcher(compileMatcher(selector))
}
// AppendMatcher appends the elements specified by the matcher to the end of each element
// in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AppendMatcher(m Matcher) *Selection {
return s.AppendNodes(m.MatchAll(s.document.rootNode)...)
}
// AppendSelection appends the elements in the selection to the end of each element
// in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AppendSelection(sel *Selection) *Selection {
return s.AppendNodes(sel.Nodes...)
}
// AppendHtml parses the html and appends it to the set of matched elements.
func (s *Selection) AppendHtml(html string) *Selection {
return s.AppendNodes(parseHtml(html)...)
}
// AppendNodes appends the specified nodes to each node in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AppendNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
sn.AppendChild(n)
})
}
// Before inserts the matched elements before each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) Before(selector string) *Selection {
return s.BeforeMatcher(compileMatcher(selector))
}
// BeforeMatcher inserts the matched elements before each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeMatcher(m Matcher) *Selection {
return s.BeforeNodes(m.MatchAll(s.document.rootNode)...)
}
// BeforeSelection inserts the elements in the selection before each element in the set of matched
// elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeSelection(sel *Selection) *Selection {
return s.BeforeNodes(sel.Nodes...)
}
// BeforeHtml parses the html and inserts it before the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeHtml(html string) *Selection {
return s.BeforeNodes(parseHtml(html)...)
}
// BeforeNodes inserts the nodes before each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
if sn.Parent != nil {
sn.Parent.InsertBefore(n, sn)
}
})
}
// Clone creates a deep copy of the set of matched nodes. The new nodes will not be
// attached to the document.
func (s *Selection) Clone() *Selection {
ns := newEmptySelection(s.document)
ns.Nodes = cloneNodes(s.Nodes)
return ns
}
// Empty removes all children nodes from the set of matched elements.
// It returns the children nodes in a new Selection.
func (s *Selection) Empty() *Selection {
var nodes []*html.Node
for _, n := range s.Nodes {
for c := n.FirstChild; c != nil; c = n.FirstChild {
n.RemoveChild(c)
nodes = append(nodes, c)
}
}
return pushStack(s, nodes)
}
// Prepend prepends the elements specified by the selector to each element in
// the set of matched elements, following the same rules as Append.
func (s *Selection) Prepend(selector string) *Selection {
return s.PrependMatcher(compileMatcher(selector))
}
// PrependMatcher prepends the elements specified by the matcher to each
// element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) PrependMatcher(m Matcher) *Selection {
return s.PrependNodes(m.MatchAll(s.document.rootNode)...)
}
// PrependSelection prepends the elements in the selection to each element in
// the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) PrependSelection(sel *Selection) *Selection {
return s.PrependNodes(sel.Nodes...)
}
// PrependHtml parses the html and prepends it to the set of matched elements.
func (s *Selection) PrependHtml(html string) *Selection {
return s.PrependNodes(parseHtml(html)...)
}
// PrependNodes prepends the specified nodes to each node in the set of
// matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) PrependNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
// sn.FirstChild may be nil, in which case this functions like
// sn.AppendChild()
sn.InsertBefore(n, sn.FirstChild)
})
}
// Remove removes the set of matched elements from the document.
// It returns the same selection, now consisting of nodes not in the document.
func (s *Selection) Remove() *Selection {
for _, n := range s.Nodes {
if n.Parent != nil {
n.Parent.RemoveChild(n)
}
}
return s
}
// RemoveFiltered removes the set of matched elements by selector.
// It returns the Selection of removed nodes.
func (s *Selection) RemoveFiltered(selector string) *Selection {
return s.RemoveMatcher(compileMatcher(selector))
}
// RemoveMatcher removes the set of matched elements.
// It returns the Selection of removed nodes.
func (s *Selection) RemoveMatcher(m Matcher) *Selection {
return s.FilterMatcher(m).Remove()
}
// ReplaceWith replaces each element in the set of matched elements with the
// nodes matched by the given selector.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWith(selector string) *Selection {
return s.ReplaceWithMatcher(compileMatcher(selector))
}
// ReplaceWithMatcher replaces each element in the set of matched elements with
// the nodes matched by the given Matcher.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection {
return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...)
}
// ReplaceWithSelection replaces each element in the set of matched elements with
// the nodes from the given Selection.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection {
return s.ReplaceWithNodes(sel.Nodes...)
}
// ReplaceWithHtml replaces each element in the set of matched elements with
// the parsed HTML.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithHtml(html string) *Selection {
return s.ReplaceWithNodes(parseHtml(html)...)
}
// ReplaceWithNodes replaces each element in the set of matched elements with
// the given nodes.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection {
s.AfterNodes(ns...)
return s.Remove()
}
// SetHtml sets the html content of each element in the selection to
// specified html string.
func (s *Selection) SetHtml(html string) *Selection {
return setHtmlNodes(s, parseHtml(html)...)
}
// SetText sets the content of each element in the selection to specified content.
// The provided text string is escaped.
func (s *Selection) SetText(text string) *Selection {
return s.SetHtml(html.EscapeString(text))
}
// Unwrap removes the parents of the set of matched elements, leaving the matched
// elements (and their siblings, if any) in their place.
// It returns the original selection.
func (s *Selection) Unwrap() *Selection {
s.Parent().Each(func(i int, ss *Selection) {
// For some reason, jquery allows unwrap to remove the <head> element, so
// allowing it here too. Same for <html>. Why it allows those elements to
// be unwrapped while not allowing body is a mystery to me.
if ss.Nodes[0].Data != "body" {
ss.ReplaceWithSelection(ss.Contents())
}
})
return s
}
// Wrap wraps each element in the set of matched elements inside the first
// element matched by the given selector. The matched child is cloned before
// being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) Wrap(selector string) *Selection {
return s.WrapMatcher(compileMatcher(selector))
}
// WrapMatcher wraps each element in the set of matched elements inside the
// first element matched by the given matcher. The matched child is cloned
// before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapMatcher(m Matcher) *Selection {
return s.wrapNodes(m.MatchAll(s.document.rootNode)...)
}
// WrapSelection wraps each element in the set of matched elements inside the
// first element in the given Selection. The element is cloned before being
// inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapSelection(sel *Selection) *Selection {
return s.wrapNodes(sel.Nodes...)
}
// WrapHtml wraps each element in the set of matched elements inside the inner-
// most child of the given HTML.
//
// It returns the original set of elements.
func (s *Selection) WrapHtml(html string) *Selection {
return s.wrapNodes(parseHtml(html)...)
}
// WrapNode wraps each element in the set of matched elements inside the inner-
// most child of the given node. The given node is copied before being inserted
// into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapNode(n *html.Node) *Selection {
return s.wrapNodes(n)
}
func (s *Selection) wrapNodes(ns ...*html.Node) *Selection {
s.Each(func(i int, ss *Selection) {
ss.wrapAllNodes(ns...)
})
return s
}
// WrapAll wraps a single HTML structure, matched by the given selector, around
// all elements in the set of matched elements. The matched child is cloned
// before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAll(selector string) *Selection {
return s.WrapAllMatcher(compileMatcher(selector))
}
// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher,
// around all elements in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllMatcher(m Matcher) *Selection {
return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...)
}
// WrapAllSelection wraps a single HTML structure, the first node of the given
// Selection, around all elements in the set of matched elements. The matched
// child is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllSelection(sel *Selection) *Selection {
return s.wrapAllNodes(sel.Nodes...)
}
// WrapAllHtml wraps the given HTML structure around all elements in the set of
// matched elements. The matched child is cloned before being inserted into the
// document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllHtml(html string) *Selection {
return s.wrapAllNodes(parseHtml(html)...)
}
func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection {
if len(ns) > 0 {
return s.WrapAllNode(ns[0])
}
return s
}
// WrapAllNode wraps the given node around the first element in the Selection,
// making all other nodes in the Selection children of the given node. The node
// is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllNode(n *html.Node) *Selection {
if s.Size() == 0 {
return s
}
wrap := cloneNode(n)
first := s.Nodes[0]
if first.Parent != nil {
first.Parent.InsertBefore(wrap, first)
first.Parent.RemoveChild(first)
}
for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) {
wrap = c
}
newSingleSelection(wrap, s.document).AppendSelection(s)
return s
}
// WrapInner wraps an HTML structure, matched by the given selector, around the
// content of element in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInner(selector string) *Selection {
return s.WrapInnerMatcher(compileMatcher(selector))
}
// WrapInnerMatcher wraps an HTML structure, matched by the given selector,
// around the content of element in the set of matched elements. The matched
// child is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerMatcher(m Matcher) *Selection {
return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...)
}
// WrapInnerSelection wraps an HTML structure, matched by the given selector,
// around the content of element in the set of matched elements. The matched
// child is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerSelection(sel *Selection) *Selection {
return s.wrapInnerNodes(sel.Nodes...)
}
// WrapInnerHtml wraps an HTML structure, matched by the given selector, around
// the content of element in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerHtml(html string) *Selection {
return s.wrapInnerNodes(parseHtml(html)...)
}
// WrapInnerNode wraps an HTML structure, matched by the given selector, around
// the content of element in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerNode(n *html.Node) *Selection {
return s.wrapInnerNodes(n)
}
func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection {
if len(ns) == 0 {
return s
}
s.Each(func(i int, s *Selection) {
contents := s.Contents()
if contents.Size() > 0 {
contents.wrapAllNodes(ns...)
} else {
s.AppendNodes(cloneNode(ns[0]))
}
})
return s
}
func parseHtml(h string) []*html.Node {
// Errors are only returned when the io.Reader returns any error besides
// EOF, but strings.Reader never will
nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode})
if err != nil {
panic("goquery: failed to parse HTML: " + err.Error())
}
return nodes
}
func setHtmlNodes(s *Selection, ns ...*html.Node) *Selection {
for _, n := range s.Nodes {
for c := n.FirstChild; c != nil; c = n.FirstChild {
n.RemoveChild(c)
}
for _, c := range ns {
n.AppendChild(cloneNode(c))
}
}
return s
}
// Get the first child that is an ElementNode
func getFirstChildEl(n *html.Node) *html.Node {
c := n.FirstChild
for c != nil && c.Type != html.ElementNode {
c = c.NextSibling
}
return c
}
// Deep copy a slice of nodes.
func cloneNodes(ns []*html.Node) []*html.Node {
cns := make([]*html.Node, 0, len(ns))
for _, n := range ns {
cns = append(cns, cloneNode(n))
}
return cns
}
// Deep copy a node. The new node has clones of all the original node's
// children but none of its parents or siblings.
func cloneNode(n *html.Node) *html.Node {
nn := &html.Node{
Type: n.Type,
DataAtom: n.DataAtom,
Data: n.Data,
Attr: make([]html.Attribute, len(n.Attr)),
}
copy(nn.Attr, n.Attr)
for c := n.FirstChild; c != nil; c = c.NextSibling {
nn.AppendChild(cloneNode(c))
}
return nn
}
func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool,
f func(sn *html.Node, n *html.Node)) *Selection {
lasti := s.Size() - 1
// net.Html doesn't provide document fragments for insertion, so to get
// things in the correct order with After() and Prepend(), the callback
// needs to be called on the reverse of the nodes.
if reverse {
for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 {
ns[i], ns[j] = ns[j], ns[i]
}
}
for i, sn := range s.Nodes {
for _, n := range ns {
if i != lasti {
f(sn, cloneNode(n))
} else {
if n.Parent != nil {
n.Parent.RemoveChild(n)
}
f(sn, n)
}
}
}
return s
}

275
vendor/github.com/PuerkitoBio/goquery/property.go generated vendored Normal file
View File

@ -0,0 +1,275 @@
package goquery
import (
"bytes"
"regexp"
"strings"
"golang.org/x/net/html"
)
var rxClassTrim = regexp.MustCompile("[\t\r\n]")
// Attr gets the specified attribute's value for the first element in the
// Selection. To get the value for each element individually, use a looping
// construct such as Each or Map method.
func (s *Selection) Attr(attrName string) (val string, exists bool) {
if len(s.Nodes) == 0 {
return
}
return getAttributeValue(attrName, s.Nodes[0])
}
// AttrOr works like Attr but returns default value if attribute is not present.
func (s *Selection) AttrOr(attrName, defaultValue string) string {
if len(s.Nodes) == 0 {
return defaultValue
}
val, exists := getAttributeValue(attrName, s.Nodes[0])
if !exists {
return defaultValue
}
return val
}
// RemoveAttr removes the named attribute from each element in the set of matched elements.
func (s *Selection) RemoveAttr(attrName string) *Selection {
for _, n := range s.Nodes {
removeAttr(n, attrName)
}
return s
}
// SetAttr sets the given attribute on each element in the set of matched elements.
func (s *Selection) SetAttr(attrName, val string) *Selection {
for _, n := range s.Nodes {
attr := getAttributePtr(attrName, n)
if attr == nil {
n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val})
} else {
attr.Val = val
}
}
return s
}
// Text gets the combined text contents of each element in the set of matched
// elements, including their descendants.
func (s *Selection) Text() string {
var buf bytes.Buffer
// Slightly optimized vs calling Each: no single selection object created
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.TextNode {
// Keep newlines and spaces, like jQuery
buf.WriteString(n.Data)
}
if n.FirstChild != nil {
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
}
}
}
for _, n := range s.Nodes {
f(n)
}
return buf.String()
}
// Size is an alias for Length.
func (s *Selection) Size() int {
return s.Length()
}
// Length returns the number of elements in the Selection object.
func (s *Selection) Length() int {
return len(s.Nodes)
}
// Html gets the HTML contents of the first element in the set of matched
// elements. It includes text and comment nodes.
func (s *Selection) Html() (ret string, e error) {
// Since there is no .innerHtml, the HTML content must be re-created from
// the nodes using html.Render.
var buf bytes.Buffer
if len(s.Nodes) > 0 {
for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling {
e = html.Render(&buf, c)
if e != nil {
return
}
}
ret = buf.String()
}
return
}
// AddClass adds the given class(es) to each element in the set of matched elements.
// Multiple class names can be specified, separated by a space or via multiple arguments.
func (s *Selection) AddClass(class ...string) *Selection {
classStr := strings.TrimSpace(strings.Join(class, " "))
if classStr == "" {
return s
}
tcls := getClassesSlice(classStr)
for _, n := range s.Nodes {
curClasses, attr := getClassesAndAttr(n, true)
for _, newClass := range tcls {
if !strings.Contains(curClasses, " "+newClass+" ") {
curClasses += newClass + " "
}
}
setClasses(n, attr, curClasses)
}
return s
}
// HasClass determines whether any of the matched elements are assigned the
// given class.
func (s *Selection) HasClass(class string) bool {
class = " " + class + " "
for _, n := range s.Nodes {
classes, _ := getClassesAndAttr(n, false)
if strings.Contains(classes, class) {
return true
}
}
return false
}
// RemoveClass removes the given class(es) from each element in the set of matched elements.
// Multiple class names can be specified, separated by a space or via multiple arguments.
// If no class name is provided, all classes are removed.
func (s *Selection) RemoveClass(class ...string) *Selection {
var rclasses []string
classStr := strings.TrimSpace(strings.Join(class, " "))
remove := classStr == ""
if !remove {
rclasses = getClassesSlice(classStr)
}
for _, n := range s.Nodes {
if remove {
removeAttr(n, "class")
} else {
classes, attr := getClassesAndAttr(n, true)
for _, rcl := range rclasses {
classes = strings.Replace(classes, " "+rcl+" ", " ", -1)
}
setClasses(n, attr, classes)
}
}
return s
}
// ToggleClass adds or removes the given class(es) for each element in the set of matched elements.
// Multiple class names can be specified, separated by a space or via multiple arguments.
func (s *Selection) ToggleClass(class ...string) *Selection {
classStr := strings.TrimSpace(strings.Join(class, " "))
if classStr == "" {
return s
}
tcls := getClassesSlice(classStr)
for _, n := range s.Nodes {
classes, attr := getClassesAndAttr(n, true)
for _, tcl := range tcls {
if strings.Contains(classes, " "+tcl+" ") {
classes = strings.Replace(classes, " "+tcl+" ", " ", -1)
} else {
classes += tcl + " "
}
}
setClasses(n, attr, classes)
}
return s
}
func getAttributePtr(attrName string, n *html.Node) *html.Attribute {
if n == nil {
return nil
}
for i, a := range n.Attr {
if a.Key == attrName {
return &n.Attr[i]
}
}
return nil
}
// Private function to get the specified attribute's value from a node.
func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) {
if a := getAttributePtr(attrName, n); a != nil {
val = a.Val
exists = true
}
return
}
// Get and normalize the "class" attribute from the node.
func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) {
// Applies only to element nodes
if n.Type == html.ElementNode {
attr = getAttributePtr("class", n)
if attr == nil && create {
n.Attr = append(n.Attr, html.Attribute{
Key: "class",
Val: "",
})
attr = &n.Attr[len(n.Attr)-1]
}
}
if attr == nil {
classes = " "
} else {
classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ")
}
return
}
func getClassesSlice(classes string) []string {
return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ")
}
func removeAttr(n *html.Node, attrName string) {
for i, a := range n.Attr {
if a.Key == attrName {
n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr =
n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1]
return
}
}
}
func setClasses(n *html.Node, attr *html.Attribute, classes string) {
classes = strings.TrimSpace(classes)
if classes == "" {
removeAttr(n, "class")
return
}
attr.Val = classes
}

49
vendor/github.com/PuerkitoBio/goquery/query.go generated vendored Normal file
View File

@ -0,0 +1,49 @@
package goquery
import "golang.org/x/net/html"
// Is checks the current matched set of elements against a selector and
// returns true if at least one of these elements matches.
func (s *Selection) Is(selector string) bool {
return s.IsMatcher(compileMatcher(selector))
}
// IsMatcher checks the current matched set of elements against a matcher and
// returns true if at least one of these elements matches.
func (s *Selection) IsMatcher(m Matcher) bool {
if len(s.Nodes) > 0 {
if len(s.Nodes) == 1 {
return m.Match(s.Nodes[0])
}
return len(m.Filter(s.Nodes)) > 0
}
return false
}
// IsFunction checks the current matched set of elements against a predicate and
// returns true if at least one of these elements matches.
func (s *Selection) IsFunction(f func(int, *Selection) bool) bool {
return s.FilterFunction(f).Length() > 0
}
// IsSelection checks the current matched set of elements against a Selection object
// and returns true if at least one of these elements matches.
func (s *Selection) IsSelection(sel *Selection) bool {
return s.FilterSelection(sel).Length() > 0
}
// IsNodes checks the current matched set of elements against the specified nodes
// and returns true if at least one of these elements matches.
func (s *Selection) IsNodes(nodes ...*html.Node) bool {
return s.FilterNodes(nodes...).Length() > 0
}
// Contains returns true if the specified Node is within,
// at any depth, one of the nodes in the Selection object.
// It is NOT inclusive, to behave like jQuery's implementation, and
// unlike Javascript's .contains, so if the contained
// node is itself in the selection, it returns false.
func (s *Selection) Contains(n *html.Node) bool {
return sliceContains(s.Nodes, n)
}

698
vendor/github.com/PuerkitoBio/goquery/traversal.go generated vendored Normal file
View File

@ -0,0 +1,698 @@
package goquery
import "golang.org/x/net/html"
type siblingType int
// Sibling type, used internally when iterating over children at the same
// level (siblings) to specify which nodes are requested.
const (
siblingPrevUntil siblingType = iota - 3
siblingPrevAll
siblingPrev
siblingAll
siblingNext
siblingNextAll
siblingNextUntil
siblingAllIncludingNonElements
)
// Find gets the descendants of each element in the current set of matched
// elements, filtered by a selector. It returns a new Selection object
// containing these matched elements.
func (s *Selection) Find(selector string) *Selection {
return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector)))
}
// FindMatcher gets the descendants of each element in the current set of matched
// elements, filtered by the matcher. It returns a new Selection object
// containing these matched elements.
func (s *Selection) FindMatcher(m Matcher) *Selection {
return pushStack(s, findWithMatcher(s.Nodes, m))
}
// FindSelection gets the descendants of each element in the current
// Selection, filtered by a Selection. It returns a new Selection object
// containing these matched elements.
func (s *Selection) FindSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, nil)
}
return s.FindNodes(sel.Nodes...)
}
// FindNodes gets the descendants of each element in the current
// Selection, filtered by some nodes. It returns a new Selection object
// containing these matched elements.
func (s *Selection) FindNodes(nodes ...*html.Node) *Selection {
return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
if sliceContains(s.Nodes, n) {
return []*html.Node{n}
}
return nil
}))
}
// Contents gets the children of each element in the Selection,
// including text and comment nodes. It returns a new Selection object
// containing these elements.
func (s *Selection) Contents() *Selection {
return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements))
}
// ContentsFiltered gets the children of each element in the Selection,
// filtered by the specified selector. It returns a new Selection
// object containing these elements. Since selectors only act on Element nodes,
// this function is an alias to ChildrenFiltered unless the selector is empty,
// in which case it is an alias to Contents.
func (s *Selection) ContentsFiltered(selector string) *Selection {
if selector != "" {
return s.ChildrenFiltered(selector)
}
return s.Contents()
}
// ContentsMatcher gets the children of each element in the Selection,
// filtered by the specified matcher. It returns a new Selection
// object containing these elements. Since matchers only act on Element nodes,
// this function is an alias to ChildrenMatcher.
func (s *Selection) ContentsMatcher(m Matcher) *Selection {
return s.ChildrenMatcher(m)
}
// Children gets the child elements of each element in the Selection.
// It returns a new Selection object containing these elements.
func (s *Selection) Children() *Selection {
return pushStack(s, getChildrenNodes(s.Nodes, siblingAll))
}
// ChildrenFiltered gets the child elements of each element in the Selection,
// filtered by the specified selector. It returns a new
// Selection object containing these elements.
func (s *Selection) ChildrenFiltered(selector string) *Selection {
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector))
}
// ChildrenMatcher gets the child elements of each element in the Selection,
// filtered by the specified matcher. It returns a new
// Selection object containing these elements.
func (s *Selection) ChildrenMatcher(m Matcher) *Selection {
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m)
}
// Parent gets the parent of each element in the Selection. It returns a
// new Selection object containing the matched elements.
func (s *Selection) Parent() *Selection {
return pushStack(s, getParentNodes(s.Nodes))
}
// ParentFiltered gets the parent of each element in the Selection filtered by a
// selector. It returns a new Selection object containing the matched elements.
func (s *Selection) ParentFiltered(selector string) *Selection {
return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector))
}
// ParentMatcher gets the parent of each element in the Selection filtered by a
// matcher. It returns a new Selection object containing the matched elements.
func (s *Selection) ParentMatcher(m Matcher) *Selection {
return filterAndPush(s, getParentNodes(s.Nodes), m)
}
// Closest gets the first element that matches the selector by testing the
// element itself and traversing up through its ancestors in the DOM tree.
func (s *Selection) Closest(selector string) *Selection {
cs := compileMatcher(selector)
return s.ClosestMatcher(cs)
}
// ClosestMatcher gets the first element that matches the matcher by testing the
// element itself and traversing up through its ancestors in the DOM tree.
func (s *Selection) ClosestMatcher(m Matcher) *Selection {
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
// For each node in the selection, test the node itself, then each parent
// until a match is found.
for ; n != nil; n = n.Parent {
if m.Match(n) {
return []*html.Node{n}
}
}
return nil
}))
}
// ClosestNodes gets the first element that matches one of the nodes by testing the
// element itself and traversing up through its ancestors in the DOM tree.
func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection {
set := make(map[*html.Node]bool)
for _, n := range nodes {
set[n] = true
}
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
// For each node in the selection, test the node itself, then each parent
// until a match is found.
for ; n != nil; n = n.Parent {
if set[n] {
return []*html.Node{n}
}
}
return nil
}))
}
// ClosestSelection gets the first element that matches one of the nodes in the
// Selection by testing the element itself and traversing up through its ancestors
// in the DOM tree.
func (s *Selection) ClosestSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, nil)
}
return s.ClosestNodes(sel.Nodes...)
}
// Parents gets the ancestors of each element in the current Selection. It
// returns a new Selection object with the matched elements.
func (s *Selection) Parents() *Selection {
return pushStack(s, getParentsNodes(s.Nodes, nil, nil))
}
// ParentsFiltered gets the ancestors of each element in the current
// Selection. It returns a new Selection object with the matched elements.
func (s *Selection) ParentsFiltered(selector string) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector))
}
// ParentsMatcher gets the ancestors of each element in the current
// Selection. It returns a new Selection object with the matched elements.
func (s *Selection) ParentsMatcher(m Matcher) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m)
}
// ParentsUntil gets the ancestors of each element in the Selection, up to but
// not including the element matched by the selector. It returns a new Selection
// object containing the matched elements.
func (s *Selection) ParentsUntil(selector string) *Selection {
return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil))
}
// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but
// not including the element matched by the matcher. It returns a new Selection
// object containing the matched elements.
func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection {
return pushStack(s, getParentsNodes(s.Nodes, m, nil))
}
// ParentsUntilSelection gets the ancestors of each element in the Selection,
// up to but not including the elements in the specified Selection. It returns a
// new Selection object containing the matched elements.
func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection {
if sel == nil {
return s.Parents()
}
return s.ParentsUntilNodes(sel.Nodes...)
}
// ParentsUntilNodes gets the ancestors of each element in the Selection,
// up to but not including the specified nodes. It returns a
// new Selection object containing the matched elements.
func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection {
return pushStack(s, getParentsNodes(s.Nodes, nil, nodes))
}
// ParentsFilteredUntil is like ParentsUntil, with the option to filter the
// results based on a selector string. It returns a new Selection
// object containing the matched elements.
func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
}
// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the
// results based on a matcher. It returns a new Selection object containing the matched elements.
func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter)
}
// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel)
}
// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
if sel == nil {
return s.ParentsMatcher(filter)
}
return s.ParentsMatcherUntilNodes(filter, sel.Nodes...)
}
// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector))
}
// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter)
}
// Siblings gets the siblings of each element in the Selection. It returns
// a new Selection object containing the matched elements.
func (s *Selection) Siblings() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil))
}
// SiblingsFiltered gets the siblings of each element in the Selection
// filtered by a selector. It returns a new Selection object containing the
// matched elements.
func (s *Selection) SiblingsFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector))
}
// SiblingsMatcher gets the siblings of each element in the Selection
// filtered by a matcher. It returns a new Selection object containing the
// matched elements.
func (s *Selection) SiblingsMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m)
}
// Next gets the immediately following sibling of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) Next() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil))
}
// NextFiltered gets the immediately following sibling of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector))
}
// NextMatcher gets the immediately following sibling of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m)
}
// NextAll gets all the following siblings of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) NextAll() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil))
}
// NextAllFiltered gets all the following siblings of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextAllFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector))
}
// NextAllMatcher gets all the following siblings of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextAllMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m)
}
// Prev gets the immediately preceding sibling of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) Prev() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil))
}
// PrevFiltered gets the immediately preceding sibling of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector))
}
// PrevMatcher gets the immediately preceding sibling of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m)
}
// PrevAll gets all the preceding siblings of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) PrevAll() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil))
}
// PrevAllFiltered gets all the preceding siblings of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevAllFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector))
}
// PrevAllMatcher gets all the preceding siblings of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevAllMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m)
}
// NextUntil gets all following siblings of each element up to but not
// including the element matched by the selector. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntil(selector string) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
compileMatcher(selector), nil))
}
// NextUntilMatcher gets all following siblings of each element up to but not
// including the element matched by the matcher. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntilMatcher(m Matcher) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
m, nil))
}
// NextUntilSelection gets all following siblings of each element up to but not
// including the element matched by the Selection. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntilSelection(sel *Selection) *Selection {
if sel == nil {
return s.NextAll()
}
return s.NextUntilNodes(sel.Nodes...)
}
// NextUntilNodes gets all following siblings of each element up to but not
// including the element matched by the nodes. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
nil, nodes))
}
// PrevUntil gets all preceding siblings of each element up to but not
// including the element matched by the selector. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntil(selector string) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
compileMatcher(selector), nil))
}
// PrevUntilMatcher gets all preceding siblings of each element up to but not
// including the element matched by the matcher. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntilMatcher(m Matcher) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
m, nil))
}
// PrevUntilSelection gets all preceding siblings of each element up to but not
// including the element matched by the Selection. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntilSelection(sel *Selection) *Selection {
if sel == nil {
return s.PrevAll()
}
return s.PrevUntilNodes(sel.Nodes...)
}
// PrevUntilNodes gets all preceding siblings of each element up to but not
// including the element matched by the nodes. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
nil, nodes))
}
// NextFilteredUntil is like NextUntil, with the option to filter
// the results based on a selector string.
// It returns a new Selection object containing the matched elements.
func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
}
// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter
// the results based on a matcher.
// It returns a new Selection object containing the matched elements.
func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
until, nil), filter)
}
// NextFilteredUntilSelection is like NextUntilSelection, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel)
}
// NextMatcherUntilSelection is like NextUntilSelection, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
if sel == nil {
return s.NextMatcher(filter)
}
return s.NextMatcherUntilNodes(filter, sel.Nodes...)
}
// NextFilteredUntilNodes is like NextUntilNodes, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
nil, nodes), compileMatcher(filterSelector))
}
// NextMatcherUntilNodes is like NextUntilNodes, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
nil, nodes), filter)
}
// PrevFilteredUntil is like PrevUntil, with the option to filter
// the results based on a selector string.
// It returns a new Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
}
// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter
// the results based on a matcher.
// It returns a new Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
until, nil), filter)
}
// PrevFilteredUntilSelection is like PrevUntilSelection, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel)
}
// PrevMatcherUntilSelection is like PrevUntilSelection, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
if sel == nil {
return s.PrevMatcher(filter)
}
return s.PrevMatcherUntilNodes(filter, sel.Nodes...)
}
// PrevFilteredUntilNodes is like PrevUntilNodes, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
nil, nodes), compileMatcher(filterSelector))
}
// PrevMatcherUntilNodes is like PrevUntilNodes, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
nil, nodes), filter)
}
// Filter and push filters the nodes based on a matcher, and pushes the results
// on the stack, with the srcSel as previous selection.
func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection {
// Create a temporary Selection with the specified nodes to filter using winnow
sel := &Selection{nodes, srcSel.document, nil}
// Filter based on matcher and push on stack
return pushStack(srcSel, winnow(sel, m, true))
}
// Internal implementation of Find that return raw nodes.
func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node {
// Map nodes to find the matches within the children of each node
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
// Go down one level, becausejQuery's Find selects only within descendants
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == html.ElementNode {
result = append(result, m.MatchAll(c)...)
}
}
return
})
}
// Internal implementation to get all parent nodes, stopping at the specified
// node (or nil if no stop).
func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node {
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
for p := n.Parent; p != nil; p = p.Parent {
sel := newSingleSelection(p, nil)
if stopm != nil {
if sel.IsMatcher(stopm) {
break
}
} else if len(stopNodes) > 0 {
if sel.IsNodes(stopNodes...) {
break
}
}
if p.Type == html.ElementNode {
result = append(result, p)
}
}
return
})
}
// Internal implementation of sibling nodes that return a raw slice of matches.
func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node {
var f func(*html.Node) bool
// If the requested siblings are ...Until, create the test function to
// determine if the until condition is reached (returns true if it is)
if st == siblingNextUntil || st == siblingPrevUntil {
f = func(n *html.Node) bool {
if untilm != nil {
// Matcher-based condition
sel := newSingleSelection(n, nil)
return sel.IsMatcher(untilm)
} else if len(untilNodes) > 0 {
// Nodes-based condition
sel := newSingleSelection(n, nil)
return sel.IsNodes(untilNodes...)
}
return false
}
}
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
return getChildrenWithSiblingType(n.Parent, st, n, f)
})
}
// Gets the children nodes of each node in the specified slice of nodes,
// based on the sibling type request.
func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node {
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
return getChildrenWithSiblingType(n, st, nil, nil)
})
}
// Gets the children of the specified parent, based on the requested sibling
// type, skipping a specified node if required.
func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node,
untilFunc func(*html.Node) bool) (result []*html.Node) {
// Create the iterator function
var iter = func(cur *html.Node) (ret *html.Node) {
// Based on the sibling type requested, iterate the right way
for {
switch st {
case siblingAll, siblingAllIncludingNonElements:
if cur == nil {
// First iteration, start with first child of parent
// Skip node if required
if ret = parent.FirstChild; ret == skipNode && skipNode != nil {
ret = skipNode.NextSibling
}
} else {
// Skip node if required
if ret = cur.NextSibling; ret == skipNode && skipNode != nil {
ret = skipNode.NextSibling
}
}
case siblingPrev, siblingPrevAll, siblingPrevUntil:
if cur == nil {
// Start with previous sibling of the skip node
ret = skipNode.PrevSibling
} else {
ret = cur.PrevSibling
}
case siblingNext, siblingNextAll, siblingNextUntil:
if cur == nil {
// Start with next sibling of the skip node
ret = skipNode.NextSibling
} else {
ret = cur.NextSibling
}
default:
panic("Invalid sibling type.")
}
if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements {
return
}
// Not a valid node, try again from this one
cur = ret
}
}
for c := iter(nil); c != nil; c = iter(c) {
// If this is an ...Until case, test before append (returns true
// if the until condition is reached)
if st == siblingNextUntil || st == siblingPrevUntil {
if untilFunc(c) {
return
}
}
result = append(result, c)
if st == siblingNext || st == siblingPrev {
// Only one node was requested (immediate next or previous), so exit
return
}
}
return
}
// Internal implementation of parent nodes that return a raw slice of Nodes.
func getParentNodes(nodes []*html.Node) []*html.Node {
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
if n.Parent != nil && n.Parent.Type == html.ElementNode {
return []*html.Node{n.Parent}
}
return nil
})
}
// Internal map function used by many traversing methods. Takes the source nodes
// to iterate on and the mapping function that returns an array of nodes.
// Returns an array of nodes mapped by calling the callback function once for
// each node in the source nodes.
func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) {
set := make(map[*html.Node]bool)
for i, n := range nodes {
if vals := f(i, n); len(vals) > 0 {
result = appendWithoutDuplicates(result, vals, set)
}
}
return result
}

141
vendor/github.com/PuerkitoBio/goquery/type.go generated vendored Normal file
View File

@ -0,0 +1,141 @@
package goquery
import (
"errors"
"io"
"net/http"
"net/url"
"github.com/andybalholm/cascadia"
"golang.org/x/net/html"
)
// Document represents an HTML document to be manipulated. Unlike jQuery, which
// is loaded as part of a DOM document, and thus acts upon its containing
// document, GoQuery doesn't know which HTML document to act upon. So it needs
// to be told, and that's what the Document class is for. It holds the root
// document node to manipulate, and can make selections on this document.
type Document struct {
*Selection
Url *url.URL
rootNode *html.Node
}
// NewDocumentFromNode is a Document constructor that takes a root html Node
// as argument.
func NewDocumentFromNode(root *html.Node) *Document {
return newDocument(root, nil)
}
// NewDocument is a Document constructor that takes a string URL as argument.
// It loads the specified document, parses it, and stores the root Document
// node, ready to be manipulated.
//
// Deprecated: Use the net/http standard library package to make the request
// and validate the response before calling goquery.NewDocumentFromReader
// with the response's body.
func NewDocument(url string) (*Document, error) {
// Load the URL
res, e := http.Get(url)
if e != nil {
return nil, e
}
return NewDocumentFromResponse(res)
}
// NewDocumentFromReader returns a Document from an io.Reader.
// It returns an error as second value if the reader's data cannot be parsed
// as html. It does not check if the reader is also an io.Closer, the
// provided reader is never closed by this call. It is the responsibility
// of the caller to close it if required.
func NewDocumentFromReader(r io.Reader) (*Document, error) {
root, e := html.Parse(r)
if e != nil {
return nil, e
}
return newDocument(root, nil), nil
}
// NewDocumentFromResponse is another Document constructor that takes an http response as argument.
// It loads the specified response's document, parses it, and stores the root Document
// node, ready to be manipulated. The response's body is closed on return.
//
// Deprecated: Use goquery.NewDocumentFromReader with the response's body.
func NewDocumentFromResponse(res *http.Response) (*Document, error) {
if res == nil {
return nil, errors.New("Response is nil")
}
defer res.Body.Close()
if res.Request == nil {
return nil, errors.New("Response.Request is nil")
}
// Parse the HTML into nodes
root, e := html.Parse(res.Body)
if e != nil {
return nil, e
}
// Create and fill the document
return newDocument(root, res.Request.URL), nil
}
// CloneDocument creates a deep-clone of a document.
func CloneDocument(doc *Document) *Document {
return newDocument(cloneNode(doc.rootNode), doc.Url)
}
// Private constructor, make sure all fields are correctly filled.
func newDocument(root *html.Node, url *url.URL) *Document {
// Create and fill the document
d := &Document{nil, url, root}
d.Selection = newSingleSelection(root, d)
return d
}
// Selection represents a collection of nodes matching some criteria. The
// initial Selection can be created by using Document.Find, and then
// manipulated using the jQuery-like chainable syntax and methods.
type Selection struct {
Nodes []*html.Node
document *Document
prevSel *Selection
}
// Helper constructor to create an empty selection
func newEmptySelection(doc *Document) *Selection {
return &Selection{nil, doc, nil}
}
// Helper constructor to create a selection of only one node
func newSingleSelection(node *html.Node, doc *Document) *Selection {
return &Selection{[]*html.Node{node}, doc, nil}
}
// Matcher is an interface that defines the methods to match
// HTML nodes against a compiled selector string. Cascadia's
// Selector implements this interface.
type Matcher interface {
Match(*html.Node) bool
MatchAll(*html.Node) []*html.Node
Filter([]*html.Node) []*html.Node
}
// compileMatcher compiles the selector string s and returns
// the corresponding Matcher. If s is an invalid selector string,
// it returns a Matcher that fails all matches.
func compileMatcher(s string) Matcher {
cs, err := cascadia.Compile(s)
if err != nil {
return invalidMatcher{}
}
return cs
}
// invalidMatcher is a Matcher that always fails to match.
type invalidMatcher struct{}
func (invalidMatcher) Match(n *html.Node) bool { return false }
func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil }
func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil }

161
vendor/github.com/PuerkitoBio/goquery/utilities.go generated vendored Normal file
View File

@ -0,0 +1,161 @@
package goquery
import (
"bytes"
"golang.org/x/net/html"
)
// used to determine if a set (map[*html.Node]bool) should be used
// instead of iterating over a slice. The set uses more memory and
// is slower than slice iteration for small N.
const minNodesForSet = 1000
var nodeNames = []string{
html.ErrorNode: "#error",
html.TextNode: "#text",
html.DocumentNode: "#document",
html.CommentNode: "#comment",
}
// NodeName returns the node name of the first element in the selection.
// It tries to behave in a similar way as the DOM's nodeName property
// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName).
//
// Go's net/html package defines the following node types, listed with
// the corresponding returned value from this function:
//
// ErrorNode : #error
// TextNode : #text
// DocumentNode : #document
// ElementNode : the element's tag name
// CommentNode : #comment
// DoctypeNode : the name of the document type
//
func NodeName(s *Selection) string {
if s.Length() == 0 {
return ""
}
switch n := s.Get(0); n.Type {
case html.ElementNode, html.DoctypeNode:
return n.Data
default:
if n.Type >= 0 && int(n.Type) < len(nodeNames) {
return nodeNames[n.Type]
}
return ""
}
}
// OuterHtml returns the outer HTML rendering of the first item in
// the selection - that is, the HTML including the first element's
// tag and attributes.
//
// Unlike InnerHtml, this is a function and not a method on the Selection,
// because this is not a jQuery method (in javascript-land, this is
// a property provided by the DOM).
func OuterHtml(s *Selection) (string, error) {
var buf bytes.Buffer
if s.Length() == 0 {
return "", nil
}
n := s.Get(0)
if err := html.Render(&buf, n); err != nil {
return "", err
}
return buf.String(), nil
}
// Loop through all container nodes to search for the target node.
func sliceContains(container []*html.Node, contained *html.Node) bool {
for _, n := range container {
if nodeContains(n, contained) {
return true
}
}
return false
}
// Checks if the contained node is within the container node.
func nodeContains(container *html.Node, contained *html.Node) bool {
// Check if the parent of the contained node is the container node, traversing
// upward until the top is reached, or the container is found.
for contained = contained.Parent; contained != nil; contained = contained.Parent {
if container == contained {
return true
}
}
return false
}
// Checks if the target node is in the slice of nodes.
func isInSlice(slice []*html.Node, node *html.Node) bool {
return indexInSlice(slice, node) > -1
}
// Returns the index of the target node in the slice, or -1.
func indexInSlice(slice []*html.Node, node *html.Node) int {
if node != nil {
for i, n := range slice {
if n == node {
return i
}
}
}
return -1
}
// Appends the new nodes to the target slice, making sure no duplicate is added.
// There is no check to the original state of the target slice, so it may still
// contain duplicates. The target slice is returned because append() may create
// a new underlying array. If targetSet is nil, a local set is created with the
// target if len(target) + len(nodes) is greater than minNodesForSet.
func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node {
// if there are not that many nodes, don't use the map, faster to just use nested loops
// (unless a non-nil targetSet is passed, in which case the caller knows better).
if targetSet == nil && len(target)+len(nodes) < minNodesForSet {
for _, n := range nodes {
if !isInSlice(target, n) {
target = append(target, n)
}
}
return target
}
// if a targetSet is passed, then assume it is reliable, otherwise create one
// and initialize it with the current target contents.
if targetSet == nil {
targetSet = make(map[*html.Node]bool, len(target))
for _, n := range target {
targetSet[n] = true
}
}
for _, n := range nodes {
if !targetSet[n] {
target = append(target, n)
targetSet[n] = true
}
}
return target
}
// Loop through a selection, returning only those nodes that pass the predicate
// function.
func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) {
for i, n := range sel.Nodes {
if predicate(i, newSingleSelection(n, sel.document)) {
result = append(result, n)
}
}
return result
}
// Creates a new Selection object based on the specified nodes, and keeps the
// source Selection object on the stack (linked list).
func pushStack(fromSel *Selection, nodes []*html.Node) *Selection {
result := &Selection{nodes, fromSel.document, fromSel}
return result
}

24
vendor/github.com/andybalholm/cascadia/LICENSE generated vendored Executable file
View File

@ -0,0 +1,24 @@
Copyright (c) 2011 Andy Balholm. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

9
vendor/github.com/andybalholm/cascadia/README.md generated vendored Normal file
View File

@ -0,0 +1,9 @@
# cascadia
[![](https://travis-ci.org/andybalholm/cascadia.svg)](https://travis-ci.org/andybalholm/cascadia)
The Cascadia package implements CSS selectors for use with the parse trees produced by the html package.
To test CSS selectors without writing Go code, check out [cascadia](https://github.com/suntong/cascadia) the command line tool, a thin wrapper around this package.
[Refer to godoc here](https://godoc.org/github.com/andybalholm/cascadia).

5
vendor/github.com/andybalholm/cascadia/go.mod generated vendored Normal file
View File

@ -0,0 +1,5 @@
module github.com/andybalholm/cascadia
require golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01
go 1.13

838
vendor/github.com/andybalholm/cascadia/parser.go generated vendored Normal file
View File

@ -0,0 +1,838 @@
// Package cascadia is an implementation of CSS selectors.
package cascadia
import (
"errors"
"fmt"
"regexp"
"strconv"
"strings"
)
// a parser for CSS selectors
type parser struct {
s string // the source text
i int // the current position
// if `false`, parsing a pseudo-element
// returns an error.
acceptPseudoElements bool
}
// parseEscape parses a backslash escape.
func (p *parser) parseEscape() (result string, err error) {
if len(p.s) < p.i+2 || p.s[p.i] != '\\' {
return "", errors.New("invalid escape sequence")
}
start := p.i + 1
c := p.s[start]
switch {
case c == '\r' || c == '\n' || c == '\f':
return "", errors.New("escaped line ending outside string")
case hexDigit(c):
// unicode escape (hex)
var i int
for i = start; i < start+6 && i < len(p.s) && hexDigit(p.s[i]); i++ {
// empty
}
v, _ := strconv.ParseUint(p.s[start:i], 16, 21)
if len(p.s) > i {
switch p.s[i] {
case '\r':
i++
if len(p.s) > i && p.s[i] == '\n' {
i++
}
case ' ', '\t', '\n', '\f':
i++
}
}
p.i = i
return string(rune(v)), nil
}
// Return the literal character after the backslash.
result = p.s[start : start+1]
p.i += 2
return result, nil
}
// toLowerASCII returns s with all ASCII capital letters lowercased.
func toLowerASCII(s string) string {
var b []byte
for i := 0; i < len(s); i++ {
if c := s[i]; 'A' <= c && c <= 'Z' {
if b == nil {
b = make([]byte, len(s))
copy(b, s)
}
b[i] = s[i] + ('a' - 'A')
}
}
if b == nil {
return s
}
return string(b)
}
func hexDigit(c byte) bool {
return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F'
}
// nameStart returns whether c can be the first character of an identifier
// (not counting an initial hyphen, or an escape sequence).
func nameStart(c byte) bool {
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127
}
// nameChar returns whether c can be a character within an identifier
// (not counting an escape sequence).
func nameChar(c byte) bool {
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 ||
c == '-' || '0' <= c && c <= '9'
}
// parseIdentifier parses an identifier.
func (p *parser) parseIdentifier() (result string, err error) {
startingDash := false
if len(p.s) > p.i && p.s[p.i] == '-' {
startingDash = true
p.i++
}
if len(p.s) <= p.i {
return "", errors.New("expected identifier, found EOF instead")
}
if c := p.s[p.i]; !(nameStart(c) || c == '\\') {
return "", fmt.Errorf("expected identifier, found %c instead", c)
}
result, err = p.parseName()
if startingDash && err == nil {
result = "-" + result
}
return
}
// parseName parses a name (which is like an identifier, but doesn't have
// extra restrictions on the first character).
func (p *parser) parseName() (result string, err error) {
i := p.i
loop:
for i < len(p.s) {
c := p.s[i]
switch {
case nameChar(c):
start := i
for i < len(p.s) && nameChar(p.s[i]) {
i++
}
result += p.s[start:i]
case c == '\\':
p.i = i
val, err := p.parseEscape()
if err != nil {
return "", err
}
i = p.i
result += val
default:
break loop
}
}
if result == "" {
return "", errors.New("expected name, found EOF instead")
}
p.i = i
return result, nil
}
// parseString parses a single- or double-quoted string.
func (p *parser) parseString() (result string, err error) {
i := p.i
if len(p.s) < i+2 {
return "", errors.New("expected string, found EOF instead")
}
quote := p.s[i]
i++
loop:
for i < len(p.s) {
switch p.s[i] {
case '\\':
if len(p.s) > i+1 {
switch c := p.s[i+1]; c {
case '\r':
if len(p.s) > i+2 && p.s[i+2] == '\n' {
i += 3
continue loop
}
fallthrough
case '\n', '\f':
i += 2
continue loop
}
}
p.i = i
val, err := p.parseEscape()
if err != nil {
return "", err
}
i = p.i
result += val
case quote:
break loop
case '\r', '\n', '\f':
return "", errors.New("unexpected end of line in string")
default:
start := i
for i < len(p.s) {
if c := p.s[i]; c == quote || c == '\\' || c == '\r' || c == '\n' || c == '\f' {
break
}
i++
}
result += p.s[start:i]
}
}
if i >= len(p.s) {
return "", errors.New("EOF in string")
}
// Consume the final quote.
i++
p.i = i
return result, nil
}
// parseRegex parses a regular expression; the end is defined by encountering an
// unmatched closing ')' or ']' which is not consumed
func (p *parser) parseRegex() (rx *regexp.Regexp, err error) {
i := p.i
if len(p.s) < i+2 {
return nil, errors.New("expected regular expression, found EOF instead")
}
// number of open parens or brackets;
// when it becomes negative, finished parsing regex
open := 0
loop:
for i < len(p.s) {
switch p.s[i] {
case '(', '[':
open++
case ')', ']':
open--
if open < 0 {
break loop
}
}
i++
}
if i >= len(p.s) {
return nil, errors.New("EOF in regular expression")
}
rx, err = regexp.Compile(p.s[p.i:i])
p.i = i
return rx, err
}
// skipWhitespace consumes whitespace characters and comments.
// It returns true if there was actually anything to skip.
func (p *parser) skipWhitespace() bool {
i := p.i
for i < len(p.s) {
switch p.s[i] {
case ' ', '\t', '\r', '\n', '\f':
i++
continue
case '/':
if strings.HasPrefix(p.s[i:], "/*") {
end := strings.Index(p.s[i+len("/*"):], "*/")
if end != -1 {
i += end + len("/**/")
continue
}
}
}
break
}
if i > p.i {
p.i = i
return true
}
return false
}
// consumeParenthesis consumes an opening parenthesis and any following
// whitespace. It returns true if there was actually a parenthesis to skip.
func (p *parser) consumeParenthesis() bool {
if p.i < len(p.s) && p.s[p.i] == '(' {
p.i++
p.skipWhitespace()
return true
}
return false
}
// consumeClosingParenthesis consumes a closing parenthesis and any preceding
// whitespace. It returns true if there was actually a parenthesis to skip.
func (p *parser) consumeClosingParenthesis() bool {
i := p.i
p.skipWhitespace()
if p.i < len(p.s) && p.s[p.i] == ')' {
p.i++
return true
}
p.i = i
return false
}
// parseTypeSelector parses a type selector (one that matches by tag name).
func (p *parser) parseTypeSelector() (result tagSelector, err error) {
tag, err := p.parseIdentifier()
if err != nil {
return
}
return tagSelector{tag: toLowerASCII(tag)}, nil
}
// parseIDSelector parses a selector that matches by id attribute.
func (p *parser) parseIDSelector() (idSelector, error) {
if p.i >= len(p.s) {
return idSelector{}, fmt.Errorf("expected id selector (#id), found EOF instead")
}
if p.s[p.i] != '#' {
return idSelector{}, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i])
}
p.i++
id, err := p.parseName()
if err != nil {
return idSelector{}, err
}
return idSelector{id: id}, nil
}
// parseClassSelector parses a selector that matches by class attribute.
func (p *parser) parseClassSelector() (classSelector, error) {
if p.i >= len(p.s) {
return classSelector{}, fmt.Errorf("expected class selector (.class), found EOF instead")
}
if p.s[p.i] != '.' {
return classSelector{}, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i])
}
p.i++
class, err := p.parseIdentifier()
if err != nil {
return classSelector{}, err
}
return classSelector{class: class}, nil
}
// parseAttributeSelector parses a selector that matches by attribute value.
func (p *parser) parseAttributeSelector() (attrSelector, error) {
if p.i >= len(p.s) {
return attrSelector{}, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead")
}
if p.s[p.i] != '[' {
return attrSelector{}, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i])
}
p.i++
p.skipWhitespace()
key, err := p.parseIdentifier()
if err != nil {
return attrSelector{}, err
}
key = toLowerASCII(key)
p.skipWhitespace()
if p.i >= len(p.s) {
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
}
if p.s[p.i] == ']' {
p.i++
return attrSelector{key: key, operation: ""}, nil
}
if p.i+2 >= len(p.s) {
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
}
op := p.s[p.i : p.i+2]
if op[0] == '=' {
op = "="
} else if op[1] != '=' {
return attrSelector{}, fmt.Errorf(`expected equality operator, found "%s" instead`, op)
}
p.i += len(op)
p.skipWhitespace()
if p.i >= len(p.s) {
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
}
var val string
var rx *regexp.Regexp
if op == "#=" {
rx, err = p.parseRegex()
} else {
switch p.s[p.i] {
case '\'', '"':
val, err = p.parseString()
default:
val, err = p.parseIdentifier()
}
}
if err != nil {
return attrSelector{}, err
}
p.skipWhitespace()
if p.i >= len(p.s) {
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
}
if p.s[p.i] != ']' {
return attrSelector{}, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i])
}
p.i++
switch op {
case "=", "!=", "~=", "|=", "^=", "$=", "*=", "#=":
return attrSelector{key: key, val: val, operation: op, regexp: rx}, nil
default:
return attrSelector{}, fmt.Errorf("attribute operator %q is not supported", op)
}
}
var errExpectedParenthesis = errors.New("expected '(' but didn't find it")
var errExpectedClosingParenthesis = errors.New("expected ')' but didn't find it")
var errUnmatchedParenthesis = errors.New("unmatched '('")
// parsePseudoclassSelector parses a pseudoclass selector like :not(p) or a pseudo-element
// For backwards compatibility, both ':' and '::' prefix are allowed for pseudo-elements.
// https://drafts.csswg.org/selectors-3/#pseudo-elements
// Returning a nil `Sel` (and a nil `error`) means we found a pseudo-element.
func (p *parser) parsePseudoclassSelector() (out Sel, pseudoElement string, err error) {
if p.i >= len(p.s) {
return nil, "", fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead")
}
if p.s[p.i] != ':' {
return nil, "", fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i])
}
p.i++
var mustBePseudoElement bool
if p.i >= len(p.s) {
return nil, "", fmt.Errorf("got empty pseudoclass (or pseudoelement)")
}
if p.s[p.i] == ':' { // we found a pseudo-element
mustBePseudoElement = true
p.i++
}
name, err := p.parseIdentifier()
if err != nil {
return
}
name = toLowerASCII(name)
if mustBePseudoElement && (name != "after" && name != "backdrop" && name != "before" &&
name != "cue" && name != "first-letter" && name != "first-line" && name != "grammar-error" &&
name != "marker" && name != "placeholder" && name != "selection" && name != "spelling-error") {
return out, "", fmt.Errorf("unknown pseudoelement :%s", name)
}
switch name {
case "not", "has", "haschild":
if !p.consumeParenthesis() {
return out, "", errExpectedParenthesis
}
sel, parseErr := p.parseSelectorGroup()
if parseErr != nil {
return out, "", parseErr
}
if !p.consumeClosingParenthesis() {
return out, "", errExpectedClosingParenthesis
}
out = relativePseudoClassSelector{name: name, match: sel}
case "contains", "containsown":
if !p.consumeParenthesis() {
return out, "", errExpectedParenthesis
}
if p.i == len(p.s) {
return out, "", errUnmatchedParenthesis
}
var val string
switch p.s[p.i] {
case '\'', '"':
val, err = p.parseString()
default:
val, err = p.parseIdentifier()
}
if err != nil {
return out, "", err
}
val = strings.ToLower(val)
p.skipWhitespace()
if p.i >= len(p.s) {
return out, "", errors.New("unexpected EOF in pseudo selector")
}
if !p.consumeClosingParenthesis() {
return out, "", errExpectedClosingParenthesis
}
out = containsPseudoClassSelector{own: name == "containsown", value: val}
case "matches", "matchesown":
if !p.consumeParenthesis() {
return out, "", errExpectedParenthesis
}
rx, err := p.parseRegex()
if err != nil {
return out, "", err
}
if p.i >= len(p.s) {
return out, "", errors.New("unexpected EOF in pseudo selector")
}
if !p.consumeClosingParenthesis() {
return out, "", errExpectedClosingParenthesis
}
out = regexpPseudoClassSelector{own: name == "matchesown", regexp: rx}
case "nth-child", "nth-last-child", "nth-of-type", "nth-last-of-type":
if !p.consumeParenthesis() {
return out, "", errExpectedParenthesis
}
a, b, err := p.parseNth()
if err != nil {
return out, "", err
}
if !p.consumeClosingParenthesis() {
return out, "", errExpectedClosingParenthesis
}
last := name == "nth-last-child" || name == "nth-last-of-type"
ofType := name == "nth-of-type" || name == "nth-last-of-type"
out = nthPseudoClassSelector{a: a, b: b, last: last, ofType: ofType}
case "first-child":
out = nthPseudoClassSelector{a: 0, b: 1, ofType: false, last: false}
case "last-child":
out = nthPseudoClassSelector{a: 0, b: 1, ofType: false, last: true}
case "first-of-type":
out = nthPseudoClassSelector{a: 0, b: 1, ofType: true, last: false}
case "last-of-type":
out = nthPseudoClassSelector{a: 0, b: 1, ofType: true, last: true}
case "only-child":
out = onlyChildPseudoClassSelector{ofType: false}
case "only-of-type":
out = onlyChildPseudoClassSelector{ofType: true}
case "input":
out = inputPseudoClassSelector{}
case "empty":
out = emptyElementPseudoClassSelector{}
case "root":
out = rootPseudoClassSelector{}
case "after", "backdrop", "before", "cue", "first-letter", "first-line", "grammar-error", "marker", "placeholder", "selection", "spelling-error":
return nil, name, nil
default:
return out, "", fmt.Errorf("unknown pseudoclass or pseudoelement :%s", name)
}
return
}
// parseInteger parses a decimal integer.
func (p *parser) parseInteger() (int, error) {
i := p.i
start := i
for i < len(p.s) && '0' <= p.s[i] && p.s[i] <= '9' {
i++
}
if i == start {
return 0, errors.New("expected integer, but didn't find it")
}
p.i = i
val, err := strconv.Atoi(p.s[start:i])
if err != nil {
return 0, err
}
return val, nil
}
// parseNth parses the argument for :nth-child (normally of the form an+b).
func (p *parser) parseNth() (a, b int, err error) {
// initial state
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '-':
p.i++
goto negativeA
case '+':
p.i++
goto positiveA
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
goto positiveA
case 'n', 'N':
a = 1
p.i++
goto readN
case 'o', 'O', 'e', 'E':
id, nameErr := p.parseName()
if nameErr != nil {
return 0, 0, nameErr
}
id = toLowerASCII(id)
if id == "odd" {
return 2, 1, nil
}
if id == "even" {
return 2, 0, nil
}
return 0, 0, fmt.Errorf("expected 'odd' or 'even', but found '%s' instead", id)
default:
goto invalid
}
positiveA:
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
a, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
goto readA
case 'n', 'N':
a = 1
p.i++
goto readN
default:
goto invalid
}
negativeA:
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
a, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
a = -a
goto readA
case 'n', 'N':
a = -1
p.i++
goto readN
default:
goto invalid
}
readA:
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case 'n', 'N':
p.i++
goto readN
default:
// The number we read as a is actually b.
return 0, a, nil
}
readN:
p.skipWhitespace()
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '+':
p.i++
p.skipWhitespace()
b, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
return a, b, nil
case '-':
p.i++
p.skipWhitespace()
b, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
return a, -b, nil
default:
return a, 0, nil
}
eof:
return 0, 0, errors.New("unexpected EOF while attempting to parse expression of form an+b")
invalid:
return 0, 0, errors.New("unexpected character while attempting to parse expression of form an+b")
}
// parseSimpleSelectorSequence parses a selector sequence that applies to
// a single element.
func (p *parser) parseSimpleSelectorSequence() (Sel, error) {
var selectors []Sel
if p.i >= len(p.s) {
return nil, errors.New("expected selector, found EOF instead")
}
switch p.s[p.i] {
case '*':
// It's the universal selector. Just skip over it, since it doesn't affect the meaning.
p.i++
case '#', '.', '[', ':':
// There's no type selector. Wait to process the other till the main loop.
default:
r, err := p.parseTypeSelector()
if err != nil {
return nil, err
}
selectors = append(selectors, r)
}
var pseudoElement string
loop:
for p.i < len(p.s) {
var (
ns Sel
newPseudoElement string
err error
)
switch p.s[p.i] {
case '#':
ns, err = p.parseIDSelector()
case '.':
ns, err = p.parseClassSelector()
case '[':
ns, err = p.parseAttributeSelector()
case ':':
ns, newPseudoElement, err = p.parsePseudoclassSelector()
default:
break loop
}
if err != nil {
return nil, err
}
// From https://drafts.csswg.org/selectors-3/#pseudo-elements :
// "Only one pseudo-element may appear per selector, and if present
// it must appear after the sequence of simple selectors that
// represents the subjects of the selector.""
if ns == nil { // we found a pseudo-element
if pseudoElement != "" {
return nil, fmt.Errorf("only one pseudo-element is accepted per selector, got %s and %s", pseudoElement, newPseudoElement)
}
if !p.acceptPseudoElements {
return nil, fmt.Errorf("pseudo-element %s found, but pseudo-elements support is disabled", newPseudoElement)
}
pseudoElement = newPseudoElement
} else {
if pseudoElement != "" {
return nil, fmt.Errorf("pseudo-element %s must be at the end of selector", pseudoElement)
}
selectors = append(selectors, ns)
}
}
if len(selectors) == 1 && pseudoElement == "" { // no need wrap the selectors in compoundSelector
return selectors[0], nil
}
return compoundSelector{selectors: selectors, pseudoElement: pseudoElement}, nil
}
// parseSelector parses a selector that may include combinators.
func (p *parser) parseSelector() (Sel, error) {
p.skipWhitespace()
result, err := p.parseSimpleSelectorSequence()
if err != nil {
return nil, err
}
for {
var (
combinator byte
c Sel
)
if p.skipWhitespace() {
combinator = ' '
}
if p.i >= len(p.s) {
return result, nil
}
switch p.s[p.i] {
case '+', '>', '~':
combinator = p.s[p.i]
p.i++
p.skipWhitespace()
case ',', ')':
// These characters can't begin a selector, but they can legally occur after one.
return result, nil
}
if combinator == 0 {
return result, nil
}
c, err = p.parseSimpleSelectorSequence()
if err != nil {
return nil, err
}
result = combinedSelector{first: result, combinator: combinator, second: c}
}
}
// parseSelectorGroup parses a group of selectors, separated by commas.
func (p *parser) parseSelectorGroup() (SelectorGroup, error) {
current, err := p.parseSelector()
if err != nil {
return nil, err
}
result := SelectorGroup{current}
for p.i < len(p.s) {
if p.s[p.i] != ',' {
break
}
p.i++
c, err := p.parseSelector()
if err != nil {
return nil, err
}
result = append(result, c)
}
return result, nil
}

938
vendor/github.com/andybalholm/cascadia/selector.go generated vendored Normal file
View File

@ -0,0 +1,938 @@
package cascadia
import (
"bytes"
"fmt"
"regexp"
"strings"
"golang.org/x/net/html"
)
// Matcher is the interface for basic selector functionality.
// Match returns whether a selector matches n.
type Matcher interface {
Match(n *html.Node) bool
}
// Sel is the interface for all the functionality provided by selectors.
type Sel interface {
Matcher
Specificity() Specificity
// Returns a CSS input compiling to this selector.
String() string
// Returns a pseudo-element, or an empty string.
PseudoElement() string
}
// Parse parses a selector. Use `ParseWithPseudoElement`
// if you need support for pseudo-elements.
func Parse(sel string) (Sel, error) {
p := &parser{s: sel}
compiled, err := p.parseSelector()
if err != nil {
return nil, err
}
if p.i < len(sel) {
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
}
return compiled, nil
}
// ParseWithPseudoElement parses a single selector,
// with support for pseudo-element.
func ParseWithPseudoElement(sel string) (Sel, error) {
p := &parser{s: sel, acceptPseudoElements: true}
compiled, err := p.parseSelector()
if err != nil {
return nil, err
}
if p.i < len(sel) {
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
}
return compiled, nil
}
// ParseGroup parses a selector, or a group of selectors separated by commas.
// Use `ParseGroupWithPseudoElements`
// if you need support for pseudo-elements.
func ParseGroup(sel string) (SelectorGroup, error) {
p := &parser{s: sel}
compiled, err := p.parseSelectorGroup()
if err != nil {
return nil, err
}
if p.i < len(sel) {
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
}
return compiled, nil
}
// ParseGroupWithPseudoElements parses a selector, or a group of selectors separated by commas.
// It supports pseudo-elements.
func ParseGroupWithPseudoElements(sel string) (SelectorGroup, error) {
p := &parser{s: sel, acceptPseudoElements: true}
compiled, err := p.parseSelectorGroup()
if err != nil {
return nil, err
}
if p.i < len(sel) {
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
}
return compiled, nil
}
// A Selector is a function which tells whether a node matches or not.
//
// This type is maintained for compatibility; I recommend using the newer and
// more idiomatic interfaces Sel and Matcher.
type Selector func(*html.Node) bool
// Compile parses a selector and returns, if successful, a Selector object
// that can be used to match against html.Node objects.
func Compile(sel string) (Selector, error) {
compiled, err := ParseGroup(sel)
if err != nil {
return nil, err
}
return Selector(compiled.Match), nil
}
// MustCompile is like Compile, but panics instead of returning an error.
func MustCompile(sel string) Selector {
compiled, err := Compile(sel)
if err != nil {
panic(err)
}
return compiled
}
// MatchAll returns a slice of the nodes that match the selector,
// from n and its children.
func (s Selector) MatchAll(n *html.Node) []*html.Node {
return s.matchAllInto(n, nil)
}
func (s Selector) matchAllInto(n *html.Node, storage []*html.Node) []*html.Node {
if s(n) {
storage = append(storage, n)
}
for child := n.FirstChild; child != nil; child = child.NextSibling {
storage = s.matchAllInto(child, storage)
}
return storage
}
func queryInto(n *html.Node, m Matcher, storage []*html.Node) []*html.Node {
for child := n.FirstChild; child != nil; child = child.NextSibling {
if m.Match(child) {
storage = append(storage, child)
}
storage = queryInto(child, m, storage)
}
return storage
}
// QueryAll returns a slice of all the nodes that match m, from the descendants
// of n.
func QueryAll(n *html.Node, m Matcher) []*html.Node {
return queryInto(n, m, nil)
}
// Match returns true if the node matches the selector.
func (s Selector) Match(n *html.Node) bool {
return s(n)
}
// MatchFirst returns the first node that matches s, from n and its children.
func (s Selector) MatchFirst(n *html.Node) *html.Node {
if s.Match(n) {
return n
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
m := s.MatchFirst(c)
if m != nil {
return m
}
}
return nil
}
// Query returns the first node that matches m, from the descendants of n.
// If none matches, it returns nil.
func Query(n *html.Node, m Matcher) *html.Node {
for c := n.FirstChild; c != nil; c = c.NextSibling {
if m.Match(c) {
return c
}
if matched := Query(c, m); matched != nil {
return matched
}
}
return nil
}
// Filter returns the nodes in nodes that match the selector.
func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) {
for _, n := range nodes {
if s(n) {
result = append(result, n)
}
}
return result
}
// Filter returns the nodes that match m.
func Filter(nodes []*html.Node, m Matcher) (result []*html.Node) {
for _, n := range nodes {
if m.Match(n) {
result = append(result, n)
}
}
return result
}
type tagSelector struct {
tag string
}
// Matches elements with a given tag name.
func (t tagSelector) Match(n *html.Node) bool {
return n.Type == html.ElementNode && n.Data == t.tag
}
func (c tagSelector) Specificity() Specificity {
return Specificity{0, 0, 1}
}
func (c tagSelector) PseudoElement() string {
return ""
}
type classSelector struct {
class string
}
// Matches elements by class attribute.
func (t classSelector) Match(n *html.Node) bool {
return matchAttribute(n, "class", func(s string) bool {
return matchInclude(t.class, s)
})
}
func (c classSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c classSelector) PseudoElement() string {
return ""
}
type idSelector struct {
id string
}
// Matches elements by id attribute.
func (t idSelector) Match(n *html.Node) bool {
return matchAttribute(n, "id", func(s string) bool {
return s == t.id
})
}
func (c idSelector) Specificity() Specificity {
return Specificity{1, 0, 0}
}
func (c idSelector) PseudoElement() string {
return ""
}
type attrSelector struct {
key, val, operation string
regexp *regexp.Regexp
}
// Matches elements by attribute value.
func (t attrSelector) Match(n *html.Node) bool {
switch t.operation {
case "":
return matchAttribute(n, t.key, func(string) bool { return true })
case "=":
return matchAttribute(n, t.key, func(s string) bool { return s == t.val })
case "!=":
return attributeNotEqualMatch(t.key, t.val, n)
case "~=":
// matches elements where the attribute named key is a whitespace-separated list that includes val.
return matchAttribute(n, t.key, func(s string) bool { return matchInclude(t.val, s) })
case "|=":
return attributeDashMatch(t.key, t.val, n)
case "^=":
return attributePrefixMatch(t.key, t.val, n)
case "$=":
return attributeSuffixMatch(t.key, t.val, n)
case "*=":
return attributeSubstringMatch(t.key, t.val, n)
case "#=":
return attributeRegexMatch(t.key, t.regexp, n)
default:
panic(fmt.Sprintf("unsuported operation : %s", t.operation))
}
}
// matches elements where the attribute named key satisifes the function f.
func matchAttribute(n *html.Node, key string, f func(string) bool) bool {
if n.Type != html.ElementNode {
return false
}
for _, a := range n.Attr {
if a.Key == key && f(a.Val) {
return true
}
}
return false
}
// attributeNotEqualMatch matches elements where
// the attribute named key does not have the value val.
func attributeNotEqualMatch(key, val string, n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
for _, a := range n.Attr {
if a.Key == key && a.Val == val {
return false
}
}
return true
}
// returns true if s is a whitespace-separated list that includes val.
func matchInclude(val, s string) bool {
for s != "" {
i := strings.IndexAny(s, " \t\r\n\f")
if i == -1 {
return s == val
}
if s[:i] == val {
return true
}
s = s[i+1:]
}
return false
}
// matches elements where the attribute named key equals val or starts with val plus a hyphen.
func attributeDashMatch(key, val string, n *html.Node) bool {
return matchAttribute(n, key,
func(s string) bool {
if s == val {
return true
}
if len(s) <= len(val) {
return false
}
if s[:len(val)] == val && s[len(val)] == '-' {
return true
}
return false
})
}
// attributePrefixMatch returns a Selector that matches elements where
// the attribute named key starts with val.
func attributePrefixMatch(key, val string, n *html.Node) bool {
return matchAttribute(n, key,
func(s string) bool {
if strings.TrimSpace(s) == "" {
return false
}
return strings.HasPrefix(s, val)
})
}
// attributeSuffixMatch matches elements where
// the attribute named key ends with val.
func attributeSuffixMatch(key, val string, n *html.Node) bool {
return matchAttribute(n, key,
func(s string) bool {
if strings.TrimSpace(s) == "" {
return false
}
return strings.HasSuffix(s, val)
})
}
// attributeSubstringMatch matches nodes where
// the attribute named key contains val.
func attributeSubstringMatch(key, val string, n *html.Node) bool {
return matchAttribute(n, key,
func(s string) bool {
if strings.TrimSpace(s) == "" {
return false
}
return strings.Contains(s, val)
})
}
// attributeRegexMatch matches nodes where
// the attribute named key matches the regular expression rx
func attributeRegexMatch(key string, rx *regexp.Regexp, n *html.Node) bool {
return matchAttribute(n, key,
func(s string) bool {
return rx.MatchString(s)
})
}
func (c attrSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c attrSelector) PseudoElement() string {
return ""
}
// ---------------- Pseudo class selectors ----------------
// we use severals concrete types of pseudo-class selectors
type relativePseudoClassSelector struct {
name string // one of "not", "has", "haschild"
match SelectorGroup
}
func (s relativePseudoClassSelector) Match(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
switch s.name {
case "not":
// matches elements that do not match a.
return !s.match.Match(n)
case "has":
// matches elements with any descendant that matches a.
return hasDescendantMatch(n, s.match)
case "haschild":
// matches elements with a child that matches a.
return hasChildMatch(n, s.match)
default:
panic(fmt.Sprintf("unsupported relative pseudo class selector : %s", s.name))
}
}
// hasChildMatch returns whether n has any child that matches a.
func hasChildMatch(n *html.Node, a Matcher) bool {
for c := n.FirstChild; c != nil; c = c.NextSibling {
if a.Match(c) {
return true
}
}
return false
}
// hasDescendantMatch performs a depth-first search of n's descendants,
// testing whether any of them match a. It returns true as soon as a match is
// found, or false if no match is found.
func hasDescendantMatch(n *html.Node, a Matcher) bool {
for c := n.FirstChild; c != nil; c = c.NextSibling {
if a.Match(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) {
return true
}
}
return false
}
// Specificity returns the specificity of the most specific selectors
// in the pseudo-class arguments.
// See https://www.w3.org/TR/selectors/#specificity-rules
func (s relativePseudoClassSelector) Specificity() Specificity {
var max Specificity
for _, sel := range s.match {
newSpe := sel.Specificity()
if max.Less(newSpe) {
max = newSpe
}
}
return max
}
func (c relativePseudoClassSelector) PseudoElement() string {
return ""
}
type containsPseudoClassSelector struct {
own bool
value string
}
func (s containsPseudoClassSelector) Match(n *html.Node) bool {
var text string
if s.own {
// matches nodes that directly contain the given text
text = strings.ToLower(nodeOwnText(n))
} else {
// matches nodes that contain the given text.
text = strings.ToLower(nodeText(n))
}
return strings.Contains(text, s.value)
}
func (s containsPseudoClassSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c containsPseudoClassSelector) PseudoElement() string {
return ""
}
type regexpPseudoClassSelector struct {
own bool
regexp *regexp.Regexp
}
func (s regexpPseudoClassSelector) Match(n *html.Node) bool {
var text string
if s.own {
// matches nodes whose text directly matches the specified regular expression
text = nodeOwnText(n)
} else {
// matches nodes whose text matches the specified regular expression
text = nodeText(n)
}
return s.regexp.MatchString(text)
}
// writeNodeText writes the text contained in n and its descendants to b.
func writeNodeText(n *html.Node, b *bytes.Buffer) {
switch n.Type {
case html.TextNode:
b.WriteString(n.Data)
case html.ElementNode:
for c := n.FirstChild; c != nil; c = c.NextSibling {
writeNodeText(c, b)
}
}
}
// nodeText returns the text contained in n and its descendants.
func nodeText(n *html.Node) string {
var b bytes.Buffer
writeNodeText(n, &b)
return b.String()
}
// nodeOwnText returns the contents of the text nodes that are direct
// children of n.
func nodeOwnText(n *html.Node) string {
var b bytes.Buffer
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == html.TextNode {
b.WriteString(c.Data)
}
}
return b.String()
}
func (s regexpPseudoClassSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c regexpPseudoClassSelector) PseudoElement() string {
return ""
}
type nthPseudoClassSelector struct {
a, b int
last, ofType bool
}
func (s nthPseudoClassSelector) Match(n *html.Node) bool {
if s.a == 0 {
if s.last {
return simpleNthLastChildMatch(s.b, s.ofType, n)
} else {
return simpleNthChildMatch(s.b, s.ofType, n)
}
}
return nthChildMatch(s.a, s.b, s.last, s.ofType, n)
}
// nthChildMatch implements :nth-child(an+b).
// If last is true, implements :nth-last-child instead.
// If ofType is true, implements :nth-of-type instead.
func nthChildMatch(a, b int, last, ofType bool, n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
i := -1
count := 0
for c := parent.FirstChild; c != nil; c = c.NextSibling {
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
continue
}
count++
if c == n {
i = count
if !last {
break
}
}
}
if i == -1 {
// This shouldn't happen, since n should always be one of its parent's children.
return false
}
if last {
i = count - i + 1
}
i -= b
if a == 0 {
return i == 0
}
return i%a == 0 && i/a >= 0
}
// simpleNthChildMatch implements :nth-child(b).
// If ofType is true, implements :nth-of-type instead.
func simpleNthChildMatch(b int, ofType bool, n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
count := 0
for c := parent.FirstChild; c != nil; c = c.NextSibling {
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
continue
}
count++
if c == n {
return count == b
}
if count >= b {
return false
}
}
return false
}
// simpleNthLastChildMatch implements :nth-last-child(b).
// If ofType is true, implements :nth-last-of-type instead.
func simpleNthLastChildMatch(b int, ofType bool, n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
count := 0
for c := parent.LastChild; c != nil; c = c.PrevSibling {
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
continue
}
count++
if c == n {
return count == b
}
if count >= b {
return false
}
}
return false
}
// Specificity for nth-child pseudo-class.
// Does not support a list of selectors
func (s nthPseudoClassSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c nthPseudoClassSelector) PseudoElement() string {
return ""
}
type onlyChildPseudoClassSelector struct {
ofType bool
}
// Match implements :only-child.
// If `ofType` is true, it implements :only-of-type instead.
func (s onlyChildPseudoClassSelector) Match(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
count := 0
for c := parent.FirstChild; c != nil; c = c.NextSibling {
if (c.Type != html.ElementNode) || (s.ofType && c.Data != n.Data) {
continue
}
count++
if count > 1 {
return false
}
}
return count == 1
}
func (s onlyChildPseudoClassSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c onlyChildPseudoClassSelector) PseudoElement() string {
return ""
}
type inputPseudoClassSelector struct{}
// Matches input, select, textarea and button elements.
func (s inputPseudoClassSelector) Match(n *html.Node) bool {
return n.Type == html.ElementNode && (n.Data == "input" || n.Data == "select" || n.Data == "textarea" || n.Data == "button")
}
func (s inputPseudoClassSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c inputPseudoClassSelector) PseudoElement() string {
return ""
}
type emptyElementPseudoClassSelector struct{}
// Matches empty elements.
func (s emptyElementPseudoClassSelector) Match(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
switch c.Type {
case html.ElementNode, html.TextNode:
return false
}
}
return true
}
func (s emptyElementPseudoClassSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c emptyElementPseudoClassSelector) PseudoElement() string {
return ""
}
type rootPseudoClassSelector struct{}
// Match implements :root
func (s rootPseudoClassSelector) Match(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
if n.Parent == nil {
return false
}
return n.Parent.Type == html.DocumentNode
}
func (s rootPseudoClassSelector) Specificity() Specificity {
return Specificity{0, 1, 0}
}
func (c rootPseudoClassSelector) PseudoElement() string {
return ""
}
type compoundSelector struct {
selectors []Sel
pseudoElement string
}
// Matches elements if each sub-selectors matches.
func (t compoundSelector) Match(n *html.Node) bool {
if len(t.selectors) == 0 {
return n.Type == html.ElementNode
}
for _, sel := range t.selectors {
if !sel.Match(n) {
return false
}
}
return true
}
func (s compoundSelector) Specificity() Specificity {
var out Specificity
for _, sel := range s.selectors {
out = out.Add(sel.Specificity())
}
if s.pseudoElement != "" {
// https://drafts.csswg.org/selectors-3/#specificity
out = out.Add(Specificity{0, 0, 1})
}
return out
}
func (c compoundSelector) PseudoElement() string {
return c.pseudoElement
}
type combinedSelector struct {
first Sel
combinator byte
second Sel
}
func (t combinedSelector) Match(n *html.Node) bool {
if t.first == nil {
return false // maybe we should panic
}
switch t.combinator {
case 0:
return t.first.Match(n)
case ' ':
return descendantMatch(t.first, t.second, n)
case '>':
return childMatch(t.first, t.second, n)
case '+':
return siblingMatch(t.first, t.second, true, n)
case '~':
return siblingMatch(t.first, t.second, false, n)
default:
panic("unknown combinator")
}
}
// matches an element if it matches d and has an ancestor that matches a.
func descendantMatch(a, d Matcher, n *html.Node) bool {
if !d.Match(n) {
return false
}
for p := n.Parent; p != nil; p = p.Parent {
if a.Match(p) {
return true
}
}
return false
}
// matches an element if it matches d and its parent matches a.
func childMatch(a, d Matcher, n *html.Node) bool {
return d.Match(n) && n.Parent != nil && a.Match(n.Parent)
}
// matches an element if it matches s2 and is preceded by an element that matches s1.
// If adjacent is true, the sibling must be immediately before the element.
func siblingMatch(s1, s2 Matcher, adjacent bool, n *html.Node) bool {
if !s2.Match(n) {
return false
}
if adjacent {
for n = n.PrevSibling; n != nil; n = n.PrevSibling {
if n.Type == html.TextNode || n.Type == html.CommentNode {
continue
}
return s1.Match(n)
}
return false
}
// Walk backwards looking for element that matches s1
for c := n.PrevSibling; c != nil; c = c.PrevSibling {
if s1.Match(c) {
return true
}
}
return false
}
func (s combinedSelector) Specificity() Specificity {
spec := s.first.Specificity()
if s.second != nil {
spec = spec.Add(s.second.Specificity())
}
return spec
}
// on combinedSelector, a pseudo-element only makes sens on the last
// selector, although others increase specificity.
func (c combinedSelector) PseudoElement() string {
if c.second == nil {
return ""
}
return c.second.PseudoElement()
}
// A SelectorGroup is a list of selectors, which matches if any of the
// individual selectors matches.
type SelectorGroup []Sel
// Match returns true if the node matches one of the single selectors.
func (s SelectorGroup) Match(n *html.Node) bool {
for _, sel := range s {
if sel.Match(n) {
return true
}
}
return false
}

120
vendor/github.com/andybalholm/cascadia/serialize.go generated vendored Normal file
View File

@ -0,0 +1,120 @@
package cascadia
import (
"fmt"
"strings"
)
// implements the reverse operation Sel -> string
func (c tagSelector) String() string {
return c.tag
}
func (c idSelector) String() string {
return "#" + c.id
}
func (c classSelector) String() string {
return "." + c.class
}
func (c attrSelector) String() string {
val := c.val
if c.operation == "#=" {
val = c.regexp.String()
} else if c.operation != "" {
val = fmt.Sprintf(`"%s"`, val)
}
return fmt.Sprintf(`[%s%s%s]`, c.key, c.operation, val)
}
func (c relativePseudoClassSelector) String() string {
return fmt.Sprintf(":%s(%s)", c.name, c.match.String())
}
func (c containsPseudoClassSelector) String() string {
s := "contains"
if c.own {
s += "Own"
}
return fmt.Sprintf(`:%s("%s")`, s, c.value)
}
func (c regexpPseudoClassSelector) String() string {
s := "matches"
if c.own {
s += "Own"
}
return fmt.Sprintf(":%s(%s)", s, c.regexp.String())
}
func (c nthPseudoClassSelector) String() string {
if c.a == 0 && c.b == 1 { // special cases
s := ":first-"
if c.last {
s = ":last-"
}
if c.ofType {
s += "of-type"
} else {
s += "child"
}
return s
}
var name string
switch [2]bool{c.last, c.ofType} {
case [2]bool{true, true}:
name = "nth-last-of-type"
case [2]bool{true, false}:
name = "nth-last-child"
case [2]bool{false, true}:
name = "nth-of-type"
case [2]bool{false, false}:
name = "nth-child"
}
return fmt.Sprintf(":%s(%dn+%d)", name, c.a, c.b)
}
func (c onlyChildPseudoClassSelector) String() string {
if c.ofType {
return ":only-of-type"
}
return ":only-child"
}
func (c inputPseudoClassSelector) String() string {
return ":input"
}
func (c emptyElementPseudoClassSelector) String() string {
return ":empty"
}
func (c rootPseudoClassSelector) String() string {
return ":root"
}
func (c compoundSelector) String() string {
if len(c.selectors) == 0 && c.pseudoElement == "" {
return "*"
}
chunks := make([]string, len(c.selectors))
for i, sel := range c.selectors {
chunks[i] = sel.String()
}
s := strings.Join(chunks, "")
if c.pseudoElement != "" {
s += "::" + c.pseudoElement
}
return s
}
func (c combinedSelector) String() string {
start := c.first.String()
if c.second != nil {
start += fmt.Sprintf(" %s %s", string(c.combinator), c.second.String())
}
return start
}
func (c SelectorGroup) String() string {
ck := make([]string, len(c))
for i, s := range c {
ck[i] = s.String()
}
return strings.Join(ck, ", ")
}

26
vendor/github.com/andybalholm/cascadia/specificity.go generated vendored Normal file
View File

@ -0,0 +1,26 @@
package cascadia
// Specificity is the CSS specificity as defined in
// https://www.w3.org/TR/selectors/#specificity-rules
// with the convention Specificity = [A,B,C].
type Specificity [3]int
// returns `true` if s < other (strictly), false otherwise
func (s Specificity) Less(other Specificity) bool {
for i := range s {
if s[i] < other[i] {
return true
}
if s[i] > other[i] {
return false
}
}
return false
}
func (s Specificity) Add(other Specificity) Specificity {
for i, sp := range other {
s[i] += sp
}
return s
}

12
vendor/github.com/cheggaaa/pb/LICENSE generated vendored Normal file
View File

@ -0,0 +1,12 @@
Copyright (c) 2012-2015, Sergey Cherepanov
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

129
vendor/github.com/cheggaaa/pb/README.md generated vendored Normal file
View File

@ -0,0 +1,129 @@
# Terminal progress bar for Go
[![Coverage Status](https://coveralls.io/repos/github/cheggaaa/pb/badge.svg)](https://coveralls.io/github/cheggaaa/pb)
## Installation
```
go get github.com/cheggaaa/pb/v3
```
Documentation for v1 bar available [here](README_V1.md)
## Quick start
```Go
package main
import (
"time"
"github.com/cheggaaa/pb/v3"
)
func main() {
count := 100000
// create and start new bar
bar := pb.StartNew(count)
// start bar from 'default' template
// bar := pb.Default.Start(count)
// start bar from 'simple' template
// bar := pb.Simple.Start(count)
// start bar from 'full' template
// bar := pb.Full.Start(count)
for i := 0; i < count; i++ {
bar.Increment()
time.Sleep(time.Millisecond)
}
bar.Finish()
}
```
Result will be like this:
```
> go run test.go
37158 / 100000 [================>_______________________________] 37.16% 1m11s
```
## Settings
```Go
// create bar
bar := pb.New(count)
// refresh info every second (default 200ms)
bar.SetRefreshRate(time.Second)
// force set io.Writer, by default it's os.Stderr
bar.SetWriter(os.Stdout)
// bar will format numbers as bytes (B, KiB, MiB, etc)
bar.Set(pb.Byte, true)
// bar use SI bytes prefix names (B, kB) instead of IEC (B, KiB)
bar.Set(pb.SIBytesPrefix, true)
// set custom bar template
bar.SetTemplateString(myTemplate)
// check for error after template set
if err = bar.Err(); err != nil {
return
}
// start bar
bar.Start()
```
## Progress bar for IO Operations
```go
package main
import (
"crypto/rand"
"io"
"io/ioutil"
"github.com/cheggaaa/pb/v3"
)
func main() {
var limit int64 = 1024 * 1024 * 500
// we will copy 200 Mb from /dev/rand to /dev/null
reader := io.LimitReader(rand.Reader, limit)
writer := ioutil.Discard
// start new bar
bar := pb.Full.Start64(limit)
// create proxy reader
barReader := bar.NewProxyReader(reader)
// copy from proxy reader
io.Copy(writer, barReader)
// finish bar
bar.Finish()
}
```
## Custom Progress Bar templates
Rendering based on builtin text/template package. You can use existing pb's elements or create you own.
All available elements are described in element.go file.
#### All in one example:
```go
tmpl := `{{ red "With funcs:" }} {{ bar . "<" "-" (cycle . "↖" "↗" "↘" "↙" ) "." ">"}} {{speed . | rndcolor }} {{percent .}} {{string . "my_green_string" | green}} {{string . "my_blue_string" | blue}}`
// start bar based on our template
bar := pb.ProgressBarTemplate(tmpl).Start64(limit)
// set values for string elements
bar.Set("my_green_string", "green").
Set("my_blue_string", "blue")
```

175
vendor/github.com/cheggaaa/pb/README_V1.md generated vendored Normal file
View File

@ -0,0 +1,175 @@
# Terminal progress bar for Go
Simple progress bar for console programs.
## Installation
```
go get github.com/cheggaaa/pb
```
## Usage
```Go
package main
import (
"github.com/cheggaaa/pb"
"time"
)
func main() {
count := 100000
bar := pb.StartNew(count)
for i := 0; i < count; i++ {
bar.Increment()
time.Sleep(time.Millisecond)
}
bar.FinishPrint("The End!")
}
```
Result will be like this:
```
> go run test.go
37158 / 100000 [================>_______________________________] 37.16% 1m11s
```
## Customization
```Go
// create bar
bar := pb.New(count)
// refresh info every second (default 200ms)
bar.SetRefreshRate(time.Second)
// show percents (by default already true)
bar.ShowPercent = true
// show bar (by default already true)
bar.ShowBar = true
// no counters
bar.ShowCounters = false
// show "time left"
bar.ShowTimeLeft = true
// show average speed
bar.ShowSpeed = true
// sets the width of the progress bar
bar.SetWidth(80)
// sets the width of the progress bar, but if terminal size smaller will be ignored
bar.SetMaxWidth(80)
// convert output to readable format (like KB, MB)
bar.SetUnits(pb.U_BYTES)
// and start
bar.Start()
```
## Progress bar for IO Operations
```go
// create and start bar
bar := pb.New(myDataLen).SetUnits(pb.U_BYTES)
bar.Start()
// my io.Reader
r := myReader
// my io.Writer
w := myWriter
// create proxy reader
reader := bar.NewProxyReader(r)
// and copy from pb reader
io.Copy(w, reader)
```
```go
// create and start bar
bar := pb.New(myDataLen).SetUnits(pb.U_BYTES)
bar.Start()
// my io.Reader
r := myReader
// my io.Writer
w := myWriter
// create multi writer
writer := io.MultiWriter(w, bar)
// and copy
io.Copy(writer, r)
bar.Finish()
```
## Custom Progress Bar Look-and-feel
```go
bar.Format("<.- >")
```
## Multiple Progress Bars (experimental and unstable)
Do not print to terminal while pool is active.
```go
package main
import (
"math/rand"
"sync"
"time"
"github.com/cheggaaa/pb"
)
func main() {
// create bars
first := pb.New(200).Prefix("First ")
second := pb.New(200).Prefix("Second ")
third := pb.New(200).Prefix("Third ")
// start pool
pool, err := pb.StartPool(first, second, third)
if err != nil {
panic(err)
}
// update bars
wg := new(sync.WaitGroup)
for _, bar := range []*pb.ProgressBar{first, second, third} {
wg.Add(1)
go func(cb *pb.ProgressBar) {
for n := 0; n < 200; n++ {
cb.Increment()
time.Sleep(time.Millisecond * time.Duration(rand.Intn(100)))
}
cb.Finish()
wg.Done()
}(bar)
}
wg.Wait()
// close pool
pool.Stop()
}
```
The result will be as follows:
```
$ go run example/multiple.go
First 34 / 200 [=========>---------------------------------------------] 17.00% 00m08s
Second 42 / 200 [===========>------------------------------------------] 21.00% 00m06s
Third 36 / 200 [=========>---------------------------------------------] 18.00% 00m08s
```

125
vendor/github.com/cheggaaa/pb/format.go generated vendored Normal file
View File

@ -0,0 +1,125 @@
package pb
import (
"fmt"
"time"
)
type Units int
const (
// U_NO are default units, they represent a simple value and are not formatted at all.
U_NO Units = iota
// U_BYTES units are formatted in a human readable way (B, KiB, MiB, ...)
U_BYTES
// U_BYTES_DEC units are like U_BYTES, but base 10 (B, KB, MB, ...)
U_BYTES_DEC
// U_DURATION units are formatted in a human readable way (3h14m15s)
U_DURATION
)
const (
KiB = 1024
MiB = 1048576
GiB = 1073741824
TiB = 1099511627776
KB = 1e3
MB = 1e6
GB = 1e9
TB = 1e12
)
func Format(i int64) *formatter {
return &formatter{n: i}
}
type formatter struct {
n int64
unit Units
width int
perSec bool
}
func (f *formatter) To(unit Units) *formatter {
f.unit = unit
return f
}
func (f *formatter) Width(width int) *formatter {
f.width = width
return f
}
func (f *formatter) PerSec() *formatter {
f.perSec = true
return f
}
func (f *formatter) String() (out string) {
switch f.unit {
case U_BYTES:
out = formatBytes(f.n)
case U_BYTES_DEC:
out = formatBytesDec(f.n)
case U_DURATION:
out = formatDuration(f.n)
default:
out = fmt.Sprintf(fmt.Sprintf("%%%dd", f.width), f.n)
}
if f.perSec {
out += "/s"
}
return
}
// Convert bytes to human readable string. Like 2 MiB, 64.2 KiB, 52 B
func formatBytes(i int64) (result string) {
switch {
case i >= TiB:
result = fmt.Sprintf("%.02f TiB", float64(i)/TiB)
case i >= GiB:
result = fmt.Sprintf("%.02f GiB", float64(i)/GiB)
case i >= MiB:
result = fmt.Sprintf("%.02f MiB", float64(i)/MiB)
case i >= KiB:
result = fmt.Sprintf("%.02f KiB", float64(i)/KiB)
default:
result = fmt.Sprintf("%d B", i)
}
return
}
// Convert bytes to base-10 human readable string. Like 2 MB, 64.2 KB, 52 B
func formatBytesDec(i int64) (result string) {
switch {
case i >= TB:
result = fmt.Sprintf("%.02f TB", float64(i)/TB)
case i >= GB:
result = fmt.Sprintf("%.02f GB", float64(i)/GB)
case i >= MB:
result = fmt.Sprintf("%.02f MB", float64(i)/MB)
case i >= KB:
result = fmt.Sprintf("%.02f KB", float64(i)/KB)
default:
result = fmt.Sprintf("%d B", i)
}
return
}
func formatDuration(n int64) (result string) {
d := time.Duration(n)
if d > time.Hour*24 {
result = fmt.Sprintf("%dd", d/24/time.Hour)
d -= (d / time.Hour / 24) * (time.Hour * 24)
}
if d > time.Hour {
result = fmt.Sprintf("%s%dh", result, d/time.Hour)
d -= d / time.Hour * time.Hour
}
m := d / time.Minute
d -= m * time.Minute
s := d / time.Second
result = fmt.Sprintf("%s%02dm%02ds", result, m, s)
return
}

8
vendor/github.com/cheggaaa/pb/go.mod generated vendored Normal file
View File

@ -0,0 +1,8 @@
module github.com/cheggaaa/pb
require (
github.com/fatih/color v1.7.0
github.com/mattn/go-colorable v0.1.2
github.com/mattn/go-runewidth v0.0.4
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb
)

11
vendor/github.com/cheggaaa/pb/go.sum generated vendored Normal file
View File

@ -0,0 +1,11 @@
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y=
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb h1:fgwFCsaw9buMuxNd6+DQfAuSFqbNiQZpcgJQAgJsK6k=
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=

507
vendor/github.com/cheggaaa/pb/pb.go generated vendored Normal file
View File

@ -0,0 +1,507 @@
// Simple console progress bars
package pb
import (
"fmt"
"io"
"math"
"strings"
"sync"
"sync/atomic"
"time"
"unicode/utf8"
)
// Current version
const Version = "1.0.28"
const (
// Default refresh rate - 200ms
DEFAULT_REFRESH_RATE = time.Millisecond * 200
FORMAT = "[=>-]"
)
// DEPRECATED
// variables for backward compatibility, from now do not work
// use pb.Format and pb.SetRefreshRate
var (
DefaultRefreshRate = DEFAULT_REFRESH_RATE
BarStart, BarEnd, Empty, Current, CurrentN string
)
// Create new progress bar object
func New(total int) *ProgressBar {
return New64(int64(total))
}
// Create new progress bar object using int64 as total
func New64(total int64) *ProgressBar {
pb := &ProgressBar{
Total: total,
RefreshRate: DEFAULT_REFRESH_RATE,
ShowPercent: true,
ShowCounters: true,
ShowBar: true,
ShowTimeLeft: true,
ShowElapsedTime: false,
ShowFinalTime: true,
Units: U_NO,
ManualUpdate: false,
finish: make(chan struct{}),
}
return pb.Format(FORMAT)
}
// Create new object and start
func StartNew(total int) *ProgressBar {
return New(total).Start()
}
// Callback for custom output
// For example:
// bar.Callback = func(s string) {
// mySuperPrint(s)
// }
//
type Callback func(out string)
type ProgressBar struct {
current int64 // current must be first member of struct (https://code.google.com/p/go/issues/detail?id=5278)
previous int64
Total int64
RefreshRate time.Duration
ShowPercent, ShowCounters bool
ShowSpeed, ShowTimeLeft, ShowBar bool
ShowFinalTime, ShowElapsedTime bool
Output io.Writer
Callback Callback
NotPrint bool
Units Units
Width int
ForceWidth bool
ManualUpdate bool
AutoStat bool
// Default width for the time box.
UnitsWidth int
TimeBoxWidth int
finishOnce sync.Once //Guards isFinish
finish chan struct{}
isFinish bool
startTime time.Time
startValue int64
changeTime time.Time
prefix, postfix string
mu sync.Mutex
lastPrint string
BarStart string
BarEnd string
Empty string
Current string
CurrentN string
AlwaysUpdate bool
}
// Start print
func (pb *ProgressBar) Start() *ProgressBar {
pb.startTime = time.Now()
pb.startValue = atomic.LoadInt64(&pb.current)
if atomic.LoadInt64(&pb.Total) == 0 {
pb.ShowTimeLeft = false
pb.ShowPercent = false
pb.AutoStat = false
}
if !pb.ManualUpdate {
pb.Update() // Initial printing of the bar before running the bar refresher.
go pb.refresher()
}
return pb
}
// Increment current value
func (pb *ProgressBar) Increment() int {
return pb.Add(1)
}
// Get current value
func (pb *ProgressBar) Get() int64 {
c := atomic.LoadInt64(&pb.current)
return c
}
// Set current value
func (pb *ProgressBar) Set(current int) *ProgressBar {
return pb.Set64(int64(current))
}
// Set64 sets the current value as int64
func (pb *ProgressBar) Set64(current int64) *ProgressBar {
atomic.StoreInt64(&pb.current, current)
return pb
}
// Add to current value
func (pb *ProgressBar) Add(add int) int {
return int(pb.Add64(int64(add)))
}
func (pb *ProgressBar) Add64(add int64) int64 {
return atomic.AddInt64(&pb.current, add)
}
// Set prefix string
func (pb *ProgressBar) Prefix(prefix string) *ProgressBar {
pb.mu.Lock()
defer pb.mu.Unlock()
pb.prefix = prefix
return pb
}
// Set postfix string
func (pb *ProgressBar) Postfix(postfix string) *ProgressBar {
pb.mu.Lock()
defer pb.mu.Unlock()
pb.postfix = postfix
return pb
}
// Set custom format for bar
// Example: bar.Format("[=>_]")
// Example: bar.Format("[\x00=\x00>\x00-\x00]") // \x00 is the delimiter
func (pb *ProgressBar) Format(format string) *ProgressBar {
var formatEntries []string
if utf8.RuneCountInString(format) == 5 {
formatEntries = strings.Split(format, "")
} else {
formatEntries = strings.Split(format, "\x00")
}
if len(formatEntries) == 5 {
pb.BarStart = formatEntries[0]
pb.BarEnd = formatEntries[4]
pb.Empty = formatEntries[3]
pb.Current = formatEntries[1]
pb.CurrentN = formatEntries[2]
}
return pb
}
// Set bar refresh rate
func (pb *ProgressBar) SetRefreshRate(rate time.Duration) *ProgressBar {
pb.RefreshRate = rate
return pb
}
// Set units
// bar.SetUnits(U_NO) - by default
// bar.SetUnits(U_BYTES) - for Mb, Kb, etc
func (pb *ProgressBar) SetUnits(units Units) *ProgressBar {
pb.Units = units
return pb
}
// Set max width, if width is bigger than terminal width, will be ignored
func (pb *ProgressBar) SetMaxWidth(width int) *ProgressBar {
pb.Width = width
pb.ForceWidth = false
return pb
}
// Set bar width
func (pb *ProgressBar) SetWidth(width int) *ProgressBar {
pb.Width = width
pb.ForceWidth = true
return pb
}
// End print
func (pb *ProgressBar) Finish() {
//Protect multiple calls
pb.finishOnce.Do(func() {
close(pb.finish)
pb.write(atomic.LoadInt64(&pb.Total), atomic.LoadInt64(&pb.current))
pb.mu.Lock()
defer pb.mu.Unlock()
switch {
case pb.Output != nil:
fmt.Fprintln(pb.Output)
case !pb.NotPrint:
fmt.Println()
}
pb.isFinish = true
})
}
// IsFinished return boolean
func (pb *ProgressBar) IsFinished() bool {
pb.mu.Lock()
defer pb.mu.Unlock()
return pb.isFinish
}
// End print and write string 'str'
func (pb *ProgressBar) FinishPrint(str string) {
pb.Finish()
if pb.Output != nil {
fmt.Fprintln(pb.Output, str)
} else {
fmt.Println(str)
}
}
// implement io.Writer
func (pb *ProgressBar) Write(p []byte) (n int, err error) {
n = len(p)
pb.Add(n)
return
}
// implement io.Reader
func (pb *ProgressBar) Read(p []byte) (n int, err error) {
n = len(p)
pb.Add(n)
return
}
// Create new proxy reader over bar
// Takes io.Reader or io.ReadCloser
func (pb *ProgressBar) NewProxyReader(r io.Reader) *Reader {
return &Reader{r, pb}
}
// Create new proxy writer over bar
// Takes io.Writer or io.WriteCloser
func (pb *ProgressBar) NewProxyWriter(r io.Writer) *Writer {
return &Writer{r, pb}
}
func (pb *ProgressBar) write(total, current int64) {
pb.mu.Lock()
defer pb.mu.Unlock()
width := pb.GetWidth()
var percentBox, countersBox, timeLeftBox, timeSpentBox, speedBox, barBox, end, out string
// percents
if pb.ShowPercent {
var percent float64
if total > 0 {
percent = float64(current) / (float64(total) / float64(100))
} else {
percent = float64(current) / float64(100)
}
percentBox = fmt.Sprintf(" %6.02f%%", percent)
}
// counters
if pb.ShowCounters {
current := Format(current).To(pb.Units).Width(pb.UnitsWidth)
if total > 0 {
totalS := Format(total).To(pb.Units).Width(pb.UnitsWidth)
countersBox = fmt.Sprintf(" %s / %s ", current, totalS)
} else {
countersBox = fmt.Sprintf(" %s / ? ", current)
}
}
// time left
currentFromStart := current - pb.startValue
fromStart := time.Now().Sub(pb.startTime)
lastChangeTime := pb.changeTime
fromChange := lastChangeTime.Sub(pb.startTime)
if pb.ShowElapsedTime {
timeSpentBox = fmt.Sprintf(" %s ", (fromStart/time.Second)*time.Second)
}
select {
case <-pb.finish:
if pb.ShowFinalTime {
var left time.Duration
left = (fromStart / time.Second) * time.Second
timeLeftBox = fmt.Sprintf(" %s", left.String())
}
default:
if pb.ShowTimeLeft && currentFromStart > 0 {
perEntry := fromChange / time.Duration(currentFromStart)
var left time.Duration
if total > 0 {
left = time.Duration(total-current) * perEntry
left -= time.Since(lastChangeTime)
left = (left / time.Second) * time.Second
}
if left > 0 {
timeLeft := Format(int64(left)).To(U_DURATION).String()
timeLeftBox = fmt.Sprintf(" %s", timeLeft)
}
}
}
if len(timeLeftBox) < pb.TimeBoxWidth {
timeLeftBox = fmt.Sprintf("%s%s", strings.Repeat(" ", pb.TimeBoxWidth-len(timeLeftBox)), timeLeftBox)
}
// speed
if pb.ShowSpeed && currentFromStart > 0 {
fromStart := time.Now().Sub(pb.startTime)
speed := float64(currentFromStart) / (float64(fromStart) / float64(time.Second))
speedBox = " " + Format(int64(speed)).To(pb.Units).Width(pb.UnitsWidth).PerSec().String()
}
barWidth := escapeAwareRuneCountInString(countersBox + pb.BarStart + pb.BarEnd + percentBox + timeSpentBox + timeLeftBox + speedBox + pb.prefix + pb.postfix)
// bar
if pb.ShowBar {
size := width - barWidth
if size > 0 {
if total > 0 {
curSize := int(math.Ceil((float64(current) / float64(total)) * float64(size)))
emptySize := size - curSize
barBox = pb.BarStart
if emptySize < 0 {
emptySize = 0
}
if curSize > size {
curSize = size
}
cursorLen := escapeAwareRuneCountInString(pb.Current)
if emptySize <= 0 {
barBox += strings.Repeat(pb.Current, curSize/cursorLen)
} else if curSize > 0 {
cursorEndLen := escapeAwareRuneCountInString(pb.CurrentN)
cursorRepetitions := (curSize - cursorEndLen) / cursorLen
barBox += strings.Repeat(pb.Current, cursorRepetitions)
barBox += pb.CurrentN
}
emptyLen := escapeAwareRuneCountInString(pb.Empty)
barBox += strings.Repeat(pb.Empty, emptySize/emptyLen)
barBox += pb.BarEnd
} else {
pos := size - int(current)%int(size)
barBox = pb.BarStart
if pos-1 > 0 {
barBox += strings.Repeat(pb.Empty, pos-1)
}
barBox += pb.Current
if size-pos-1 > 0 {
barBox += strings.Repeat(pb.Empty, size-pos-1)
}
barBox += pb.BarEnd
}
}
}
// check len
out = pb.prefix + timeSpentBox + countersBox + barBox + percentBox + speedBox + timeLeftBox + pb.postfix
if cl := escapeAwareRuneCountInString(out); cl < width {
end = strings.Repeat(" ", width-cl)
}
// and print!
pb.lastPrint = out + end
isFinish := pb.isFinish
switch {
case isFinish:
return
case pb.Output != nil:
fmt.Fprint(pb.Output, "\r"+out+end)
case pb.Callback != nil:
pb.Callback(out + end)
case !pb.NotPrint:
fmt.Print("\r" + out + end)
}
}
// GetTerminalWidth - returns terminal width for all platforms.
func GetTerminalWidth() (int, error) {
return terminalWidth()
}
func (pb *ProgressBar) GetWidth() int {
if pb.ForceWidth {
return pb.Width
}
width := pb.Width
termWidth, _ := terminalWidth()
if width == 0 || termWidth <= width {
width = termWidth
}
return width
}
// Write the current state of the progressbar
func (pb *ProgressBar) Update() {
c := atomic.LoadInt64(&pb.current)
p := atomic.LoadInt64(&pb.previous)
t := atomic.LoadInt64(&pb.Total)
if p != c {
pb.mu.Lock()
pb.changeTime = time.Now()
pb.mu.Unlock()
atomic.StoreInt64(&pb.previous, c)
}
pb.write(t, c)
if pb.AutoStat {
if c == 0 {
pb.startTime = time.Now()
pb.startValue = 0
} else if c >= t && pb.isFinish != true {
pb.Finish()
}
}
}
// String return the last bar print
func (pb *ProgressBar) String() string {
pb.mu.Lock()
defer pb.mu.Unlock()
return pb.lastPrint
}
// SetTotal atomically sets new total count
func (pb *ProgressBar) SetTotal(total int) *ProgressBar {
return pb.SetTotal64(int64(total))
}
// SetTotal64 atomically sets new total count
func (pb *ProgressBar) SetTotal64(total int64) *ProgressBar {
atomic.StoreInt64(&pb.Total, total)
return pb
}
// Reset bar and set new total count
// Does effect only on finished bar
func (pb *ProgressBar) Reset(total int) *ProgressBar {
pb.mu.Lock()
defer pb.mu.Unlock()
if pb.isFinish {
pb.SetTotal(total).Set(0)
atomic.StoreInt64(&pb.previous, 0)
}
return pb
}
// Internal loop for refreshing the progressbar
func (pb *ProgressBar) refresher() {
for {
select {
case <-pb.finish:
return
case <-time.After(pb.RefreshRate):
pb.Update()
}
}
}

11
vendor/github.com/cheggaaa/pb/pb_appengine.go generated vendored Normal file
View File

@ -0,0 +1,11 @@
// +build appengine js
package pb
import "errors"
// terminalWidth returns width of the terminal, which is not supported
// and should always failed on appengine classic which is a sandboxed PaaS.
func terminalWidth() (int, error) {
return 0, errors.New("Not supported")
}

143
vendor/github.com/cheggaaa/pb/pb_win.go generated vendored Normal file
View File

@ -0,0 +1,143 @@
// +build windows
package pb
import (
"errors"
"fmt"
"os"
"sync"
"syscall"
"unsafe"
)
var tty = os.Stdin
var (
kernel32 = syscall.NewLazyDLL("kernel32.dll")
// GetConsoleScreenBufferInfo retrieves information about the
// specified console screen buffer.
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms683171(v=vs.85).aspx
procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
// GetConsoleMode retrieves the current input mode of a console's
// input buffer or the current output mode of a console screen buffer.
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms683167(v=vs.85).aspx
getConsoleMode = kernel32.NewProc("GetConsoleMode")
// SetConsoleMode sets the input mode of a console's input buffer
// or the output mode of a console screen buffer.
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms686033(v=vs.85).aspx
setConsoleMode = kernel32.NewProc("SetConsoleMode")
// SetConsoleCursorPosition sets the cursor position in the
// specified console screen buffer.
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms686025(v=vs.85).aspx
setConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition")
)
type (
// Defines the coordinates of the upper left and lower right corners
// of a rectangle.
// See
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms686311(v=vs.85).aspx
smallRect struct {
Left, Top, Right, Bottom int16
}
// Defines the coordinates of a character cell in a console screen
// buffer. The origin of the coordinate system (0,0) is at the top, left cell
// of the buffer.
// See
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms682119(v=vs.85).aspx
coordinates struct {
X, Y int16
}
word int16
// Contains information about a console screen buffer.
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms682093(v=vs.85).aspx
consoleScreenBufferInfo struct {
dwSize coordinates
dwCursorPosition coordinates
wAttributes word
srWindow smallRect
dwMaximumWindowSize coordinates
}
)
// terminalWidth returns width of the terminal.
func terminalWidth() (width int, err error) {
var info consoleScreenBufferInfo
_, _, e := syscall.Syscall(procGetConsoleScreenBufferInfo.Addr(), 2, uintptr(syscall.Stdout), uintptr(unsafe.Pointer(&info)), 0)
if e != 0 {
return 0, error(e)
}
return int(info.dwSize.X) - 1, nil
}
func getCursorPos() (pos coordinates, err error) {
var info consoleScreenBufferInfo
_, _, e := syscall.Syscall(procGetConsoleScreenBufferInfo.Addr(), 2, uintptr(syscall.Stdout), uintptr(unsafe.Pointer(&info)), 0)
if e != 0 {
return info.dwCursorPosition, error(e)
}
return info.dwCursorPosition, nil
}
func setCursorPos(pos coordinates) error {
_, _, e := syscall.Syscall(setConsoleCursorPosition.Addr(), 2, uintptr(syscall.Stdout), uintptr(uint32(uint16(pos.Y))<<16|uint32(uint16(pos.X))), 0)
if e != 0 {
return error(e)
}
return nil
}
var ErrPoolWasStarted = errors.New("Bar pool was started")
var echoLocked bool
var echoLockMutex sync.Mutex
var oldState word
func lockEcho() (shutdownCh chan struct{}, err error) {
echoLockMutex.Lock()
defer echoLockMutex.Unlock()
if echoLocked {
err = ErrPoolWasStarted
return
}
echoLocked = true
if _, _, e := syscall.Syscall(getConsoleMode.Addr(), 2, uintptr(syscall.Stdout), uintptr(unsafe.Pointer(&oldState)), 0); e != 0 {
err = fmt.Errorf("Can't get terminal settings: %v", e)
return
}
newState := oldState
const ENABLE_ECHO_INPUT = 0x0004
const ENABLE_LINE_INPUT = 0x0002
newState = newState & (^(ENABLE_LINE_INPUT | ENABLE_ECHO_INPUT))
if _, _, e := syscall.Syscall(setConsoleMode.Addr(), 2, uintptr(syscall.Stdout), uintptr(newState), 0); e != 0 {
err = fmt.Errorf("Can't set terminal settings: %v", e)
return
}
shutdownCh = make(chan struct{})
return
}
func unlockEcho() (err error) {
echoLockMutex.Lock()
defer echoLockMutex.Unlock()
if !echoLocked {
return
}
echoLocked = false
if _, _, e := syscall.Syscall(setConsoleMode.Addr(), 2, uintptr(syscall.Stdout), uintptr(oldState), 0); e != 0 {
err = fmt.Errorf("Can't set terminal settings")
}
return
}

118
vendor/github.com/cheggaaa/pb/pb_x.go generated vendored Normal file
View File

@ -0,0 +1,118 @@
// +build linux darwin freebsd netbsd openbsd solaris dragonfly
// +build !appengine !js
package pb
import (
"errors"
"fmt"
"os"
"os/signal"
"sync"
"syscall"
"golang.org/x/sys/unix"
)
var ErrPoolWasStarted = errors.New("Bar pool was started")
var (
echoLockMutex sync.Mutex
origTermStatePtr *unix.Termios
tty *os.File
istty bool
)
func init() {
echoLockMutex.Lock()
defer echoLockMutex.Unlock()
var err error
tty, err = os.Open("/dev/tty")
istty = true
if err != nil {
tty = os.Stdin
istty = false
}
}
// terminalWidth returns width of the terminal.
func terminalWidth() (int, error) {
if !istty {
return 0, errors.New("Not Supported")
}
echoLockMutex.Lock()
defer echoLockMutex.Unlock()
fd := int(tty.Fd())
ws, err := unix.IoctlGetWinsize(fd, unix.TIOCGWINSZ)
if err != nil {
return 0, err
}
return int(ws.Col), nil
}
func lockEcho() (shutdownCh chan struct{}, err error) {
echoLockMutex.Lock()
defer echoLockMutex.Unlock()
if istty {
if origTermStatePtr != nil {
return shutdownCh, ErrPoolWasStarted
}
fd := int(tty.Fd())
origTermStatePtr, err = unix.IoctlGetTermios(fd, ioctlReadTermios)
if err != nil {
return nil, fmt.Errorf("Can't get terminal settings: %v", err)
}
oldTermios := *origTermStatePtr
newTermios := oldTermios
newTermios.Lflag &^= syscall.ECHO
newTermios.Lflag |= syscall.ICANON | syscall.ISIG
newTermios.Iflag |= syscall.ICRNL
if err := unix.IoctlSetTermios(fd, ioctlWriteTermios, &newTermios); err != nil {
return nil, fmt.Errorf("Can't set terminal settings: %v", err)
}
}
shutdownCh = make(chan struct{})
go catchTerminate(shutdownCh)
return
}
func unlockEcho() error {
echoLockMutex.Lock()
defer echoLockMutex.Unlock()
if istty {
if origTermStatePtr == nil {
return nil
}
fd := int(tty.Fd())
if err := unix.IoctlSetTermios(fd, ioctlWriteTermios, origTermStatePtr); err != nil {
return fmt.Errorf("Can't set terminal settings: %v", err)
}
}
origTermStatePtr = nil
return nil
}
// listen exit signals and restore terminal state
func catchTerminate(shutdownCh chan struct{}) {
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGKILL)
defer signal.Stop(sig)
select {
case <-shutdownCh:
unlockEcho()
case <-sig:
unlockEcho()
}
}

104
vendor/github.com/cheggaaa/pb/pool.go generated vendored Normal file
View File

@ -0,0 +1,104 @@
// +build linux darwin freebsd netbsd openbsd solaris dragonfly windows
package pb
import (
"io"
"sync"
"time"
)
// Create and start new pool with given bars
// You need call pool.Stop() after work
func StartPool(pbs ...*ProgressBar) (pool *Pool, err error) {
pool = new(Pool)
if err = pool.Start(); err != nil {
return
}
pool.Add(pbs...)
return
}
// NewPool initialises a pool with progress bars, but
// doesn't start it. You need to call Start manually
func NewPool(pbs ...*ProgressBar) (pool *Pool) {
pool = new(Pool)
pool.Add(pbs...)
return
}
type Pool struct {
Output io.Writer
RefreshRate time.Duration
bars []*ProgressBar
lastBarsCount int
shutdownCh chan struct{}
workerCh chan struct{}
m sync.Mutex
finishOnce sync.Once
}
// Add progress bars.
func (p *Pool) Add(pbs ...*ProgressBar) {
p.m.Lock()
defer p.m.Unlock()
for _, bar := range pbs {
bar.ManualUpdate = true
bar.NotPrint = true
bar.Start()
p.bars = append(p.bars, bar)
}
}
func (p *Pool) Start() (err error) {
p.RefreshRate = DefaultRefreshRate
p.shutdownCh, err = lockEcho()
if err != nil {
return
}
p.workerCh = make(chan struct{})
go p.writer()
return
}
func (p *Pool) writer() {
var first = true
defer func() {
if first == false {
p.print(false)
} else {
p.print(true)
p.print(false)
}
close(p.workerCh)
}()
for {
select {
case <-time.After(p.RefreshRate):
if p.print(first) {
p.print(false)
return
}
first = false
case <-p.shutdownCh:
return
}
}
}
// Restore terminal state and close pool
func (p *Pool) Stop() error {
p.finishOnce.Do(func() {
if p.shutdownCh != nil {
close(p.shutdownCh)
}
})
// Wait for the worker to complete
select {
case <-p.workerCh:
}
return unlockEcho()
}

45
vendor/github.com/cheggaaa/pb/pool_win.go generated vendored Normal file
View File

@ -0,0 +1,45 @@
// +build windows
package pb
import (
"fmt"
"log"
)
func (p *Pool) print(first bool) bool {
p.m.Lock()
defer p.m.Unlock()
var out string
if !first {
coords, err := getCursorPos()
if err != nil {
log.Panic(err)
}
coords.Y -= int16(p.lastBarsCount)
if coords.Y < 0 {
coords.Y = 0
}
coords.X = 0
err = setCursorPos(coords)
if err != nil {
log.Panic(err)
}
}
isFinished := true
for _, bar := range p.bars {
if !bar.IsFinished() {
isFinished = false
}
bar.Update()
out += fmt.Sprintf("\r%s\n", bar.String())
}
if p.Output != nil {
fmt.Fprint(p.Output, out)
} else {
fmt.Print(out)
}
p.lastBarsCount = len(p.bars)
return isFinished
}

29
vendor/github.com/cheggaaa/pb/pool_x.go generated vendored Normal file
View File

@ -0,0 +1,29 @@
// +build linux darwin freebsd netbsd openbsd solaris dragonfly
package pb
import "fmt"
func (p *Pool) print(first bool) bool {
p.m.Lock()
defer p.m.Unlock()
var out string
if !first {
out = fmt.Sprintf("\033[%dA", p.lastBarsCount)
}
isFinished := true
for _, bar := range p.bars {
if !bar.IsFinished() {
isFinished = false
}
bar.Update()
out += fmt.Sprintf("\r%s\n", bar.String())
}
if p.Output != nil {
fmt.Fprint(p.Output, out)
} else {
fmt.Print(out)
}
p.lastBarsCount = len(p.bars)
return isFinished
}

26
vendor/github.com/cheggaaa/pb/reader.go generated vendored Normal file
View File

@ -0,0 +1,26 @@
package pb
import (
"io"
)
// It's proxy reader, implement io.Reader
type Reader struct {
io.Reader
bar *ProgressBar
}
func (r *Reader) Read(p []byte) (n int, err error) {
n, err = r.Reader.Read(p)
r.bar.Add(n)
return
}
// Close the reader when it implements io.Closer
func (r *Reader) Close() (err error) {
r.bar.Finish()
if closer, ok := r.Reader.(io.Closer); ok {
return closer.Close()
}
return
}

17
vendor/github.com/cheggaaa/pb/runecount.go generated vendored Normal file
View File

@ -0,0 +1,17 @@
package pb
import (
"github.com/mattn/go-runewidth"
"regexp"
)
// Finds the control character sequences (like colors)
var ctrlFinder = regexp.MustCompile("\x1b\x5b[0-9]+\x6d")
func escapeAwareRuneCountInString(s string) int {
n := runewidth.StringWidth(s)
for _, sm := range ctrlFinder.FindAllString(s, -1) {
n -= runewidth.StringWidth(sm)
}
return n
}

9
vendor/github.com/cheggaaa/pb/termios_bsd.go generated vendored Normal file
View File

@ -0,0 +1,9 @@
// +build darwin freebsd netbsd openbsd dragonfly
// +build !appengine
package pb
import "syscall"
const ioctlReadTermios = syscall.TIOCGETA
const ioctlWriteTermios = syscall.TIOCSETA

13
vendor/github.com/cheggaaa/pb/termios_sysv.go generated vendored Normal file
View File

@ -0,0 +1,13 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux solaris
// +build !appengine
package pb
import "golang.org/x/sys/unix"
const ioctlReadTermios = unix.TCGETS
const ioctlWriteTermios = unix.TCSETS

26
vendor/github.com/cheggaaa/pb/writer.go generated vendored Normal file
View File

@ -0,0 +1,26 @@
package pb
import (
"io"
)
// It's proxy Writer, implement io.Writer
type Writer struct {
io.Writer
bar *ProgressBar
}
func (r *Writer) Write(p []byte) (n int, err error) {
n, err = r.Writer.Write(p)
r.bar.Add(n)
return
}
// Close the reader when it implements io.Closer
func (r *Writer) Close() (err error) {
r.bar.Finish()
if closer, ok := r.Writer.(io.Closer); ok {
return closer.Close()
}
return
}

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2014 Pivotal
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,44 @@
# Jibber Jabber [![Build Status](https://travis-ci.org/cloudfoundry/jibber_jabber.svg?branch=master)](https://travis-ci.org/cloudfoundry/jibber_jabber)
Jibber Jabber is a GoLang Library that can be used to detect an operating system's current language.
### OS Support
OSX and Linux via the `LC_ALL` and `LANG` environment variables. These are standard variables that are used in ALL versions of UNIX for language detection.
Windows via [GetUserDefaultLocaleName](http://msdn.microsoft.com/en-us/library/windows/desktop/dd318136.aspx) and [GetSystemDefaultLocaleName](http://msdn.microsoft.com/en-us/library/windows/desktop/dd318122.aspx) system calls. These calls are supported in Windows Vista and up.
# Usage
Add the following line to your go `import`:
```
"github.com/cloudfoundry/jibber_jabber"
```
### DetectIETF
`DetectIETF` will return the current locale as a string. The format of the locale will be the [ISO 639](http://en.wikipedia.org/wiki/ISO_639) two-letter language code, a DASH, then an [ISO 3166](http://en.wikipedia.org/wiki/ISO_3166-1) two-letter country code.
```
userLocale, err := jibber_jabber.DetectIETF()
println("Locale:", userLocale)
```
### DetectLanguage
`DetectLanguage` will return the current languge as a string. The format will be the [ISO 639](http://en.wikipedia.org/wiki/ISO_639) two-letter language code.
```
userLanguage, err := jibber_jabber.DetectLanguage()
println("Language:", userLanguage)
```
### DetectTerritory
`DetectTerritory` will return the current locale territory as a string. The format will be the [ISO 3166](http://en.wikipedia.org/wiki/ISO_3166-1) two-letter country code.
```
localeTerritory, err := jibber_jabber.DetectTerritory()
println("Territory:", localeTerritory)
```
### Errors
All the Detect commands will return an error if they are unable to read the Locale from the system.
For Windows, additional error information is provided due to the nature of the system call being used.

View File

@ -0,0 +1,22 @@
package jibber_jabber
import (
"strings"
)
const (
COULD_NOT_DETECT_PACKAGE_ERROR_MESSAGE = "Could not detect Language"
)
func splitLocale(locale string) (string, string) {
formattedLocale := strings.Split(locale, ".")[0]
formattedLocale = strings.Replace(formattedLocale, "-", "_", -1)
pieces := strings.Split(formattedLocale, "_")
language := pieces[0]
territory := ""
if len(pieces) > 1 {
territory = strings.Split(formattedLocale, "_")[1]
}
return language, territory
}

View File

@ -0,0 +1,57 @@
// +build darwin freebsd linux netbsd openbsd
package jibber_jabber
import (
"errors"
"os"
"strings"
)
func getLangFromEnv() (locale string) {
locale = os.Getenv("LC_ALL")
if locale == "" {
locale = os.Getenv("LANG")
}
return
}
func getUnixLocale() (unix_locale string, err error) {
unix_locale = getLangFromEnv()
if unix_locale == "" {
err = errors.New(COULD_NOT_DETECT_PACKAGE_ERROR_MESSAGE)
}
return
}
func DetectIETF() (locale string, err error) {
unix_locale, err := getUnixLocale()
if err == nil {
language, territory := splitLocale(unix_locale)
locale = language
if territory != "" {
locale = strings.Join([]string{language, territory}, "-")
}
}
return
}
func DetectLanguage() (language string, err error) {
unix_locale, err := getUnixLocale()
if err == nil {
language, _ = splitLocale(unix_locale)
}
return
}
func DetectTerritory() (territory string, err error) {
unix_locale, err := getUnixLocale()
if err == nil {
_, territory = splitLocale(unix_locale)
}
return
}

View File

@ -0,0 +1,114 @@
// +build windows
package jibber_jabber
import (
"errors"
"syscall"
"unsafe"
)
const LOCALE_NAME_MAX_LENGTH uint32 = 85
var SUPPORTED_LOCALES = map[uintptr]string{
0x0407: "de-DE",
0x0409: "en-US",
0x0c0a: "es-ES", //or is it 0x040a
0x040c: "fr-FR",
0x0410: "it-IT",
0x0411: "ja-JA",
0x0412: "ko_KR",
0x0416: "pt-BR",
//0x0419: "ru_RU", - Will add support for Russian when nicksnyder/go-i18n supports Russian
0x0804: "zh-CN",
0x0c04: "zh-HK",
0x0404: "zh-TW",
}
func getWindowsLocaleFrom(sysCall string) (locale string, err error) {
buffer := make([]uint16, LOCALE_NAME_MAX_LENGTH)
dll := syscall.MustLoadDLL("kernel32")
proc := dll.MustFindProc(sysCall)
r, _, dllError := proc.Call(uintptr(unsafe.Pointer(&buffer[0])), uintptr(LOCALE_NAME_MAX_LENGTH))
if r == 0 {
err = errors.New(COULD_NOT_DETECT_PACKAGE_ERROR_MESSAGE + ":\n" + dllError.Error())
return
}
locale = syscall.UTF16ToString(buffer)
return
}
func getAllWindowsLocaleFrom(sysCall string) (string, error) {
dll, err := syscall.LoadDLL("kernel32")
if err != nil {
return "", errors.New("Could not find kernel32 dll")
}
proc, err := dll.FindProc(sysCall)
if err != nil {
return "", err
}
locale, _, dllError := proc.Call()
if locale == 0 {
return "", errors.New(COULD_NOT_DETECT_PACKAGE_ERROR_MESSAGE + ":\n" + dllError.Error())
}
return SUPPORTED_LOCALES[locale], nil
}
func getWindowsLocale() (locale string, err error) {
dll, err := syscall.LoadDLL("kernel32")
if err != nil {
return "", errors.New("Could not find kernel32 dll")
}
proc, err := dll.FindProc("GetVersion")
if err != nil {
return "", err
}
v, _, _ := proc.Call()
windowsVersion := byte(v)
isVistaOrGreater := (windowsVersion >= 6)
if isVistaOrGreater {
locale, err = getWindowsLocaleFrom("GetUserDefaultLocaleName")
if err != nil {
locale, err = getWindowsLocaleFrom("GetSystemDefaultLocaleName")
}
} else if !isVistaOrGreater {
locale, err = getAllWindowsLocaleFrom("GetUserDefaultLCID")
if err != nil {
locale, err = getAllWindowsLocaleFrom("GetSystemDefaultLCID")
}
} else {
panic(v)
}
return
}
func DetectIETF() (locale string, err error) {
locale, err = getWindowsLocale()
return
}
func DetectLanguage() (language string, err error) {
windows_locale, err := getWindowsLocale()
if err == nil {
language, _ = splitLocale(windows_locale)
}
return
}
func DetectTerritory() (territory string, err error) {
windows_locale, err := getWindowsLocale()
if err == nil {
_, territory = splitLocale(windows_locale)
}
return
}

20
vendor/github.com/fatih/color/LICENSE.md generated vendored Normal file
View File

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2013 Fatih Arslan
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

182
vendor/github.com/fatih/color/README.md generated vendored Normal file
View File

@ -0,0 +1,182 @@
# Archived project. No maintenance.
This project is not maintained anymore and is archived. Feel free to fork and
make your own changes if needed. For more detail read my blog post: [Taking an indefinite sabbatical from my projects](https://arslan.io/2018/10/09/taking-an-indefinite-sabbatical-from-my-projects/)
Thanks to everyone for their valuable feedback and contributions.
# Color [![GoDoc](https://godoc.org/github.com/fatih/color?status.svg)](https://godoc.org/github.com/fatih/color)
Color lets you use colorized outputs in terms of [ANSI Escape
Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It
has support for Windows too! The API can be used in several ways, pick one that
suits you.
![Color](https://i.imgur.com/c1JI0lA.png)
## Install
```bash
go get github.com/fatih/color
```
## Examples
### Standard colors
```go
// Print with default helper functions
color.Cyan("Prints text in cyan.")
// A newline will be appended automatically
color.Blue("Prints %s in blue.", "text")
// These are using the default foreground colors
color.Red("We have red")
color.Magenta("And many others ..")
```
### Mix and reuse colors
```go
// Create a new color object
c := color.New(color.FgCyan).Add(color.Underline)
c.Println("Prints cyan text with an underline.")
// Or just add them to New()
d := color.New(color.FgCyan, color.Bold)
d.Printf("This prints bold cyan %s\n", "too!.")
// Mix up foreground and background colors, create new mixes!
red := color.New(color.FgRed)
boldRed := red.Add(color.Bold)
boldRed.Println("This will print text in bold red.")
whiteBackground := red.Add(color.BgWhite)
whiteBackground.Println("Red text with white background.")
```
### Use your own output (io.Writer)
```go
// Use your own io.Writer output
color.New(color.FgBlue).Fprintln(myWriter, "blue color!")
blue := color.New(color.FgBlue)
blue.Fprint(writer, "This will print text in blue.")
```
### Custom print functions (PrintFunc)
```go
// Create a custom print function for convenience
red := color.New(color.FgRed).PrintfFunc()
red("Warning")
red("Error: %s", err)
// Mix up multiple attributes
notice := color.New(color.Bold, color.FgGreen).PrintlnFunc()
notice("Don't forget this...")
```
### Custom fprint functions (FprintFunc)
```go
blue := color.New(FgBlue).FprintfFunc()
blue(myWriter, "important notice: %s", stars)
// Mix up with multiple attributes
success := color.New(color.Bold, color.FgGreen).FprintlnFunc()
success(myWriter, "Don't forget this...")
```
### Insert into noncolor strings (SprintFunc)
```go
// Create SprintXxx functions to mix strings with other non-colorized strings:
yellow := color.New(color.FgYellow).SprintFunc()
red := color.New(color.FgRed).SprintFunc()
fmt.Printf("This is a %s and this is %s.\n", yellow("warning"), red("error"))
info := color.New(color.FgWhite, color.BgGreen).SprintFunc()
fmt.Printf("This %s rocks!\n", info("package"))
// Use helper functions
fmt.Println("This", color.RedString("warning"), "should be not neglected.")
fmt.Printf("%v %v\n", color.GreenString("Info:"), "an important message.")
// Windows supported too! Just don't forget to change the output to color.Output
fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS"))
```
### Plug into existing code
```go
// Use handy standard colors
color.Set(color.FgYellow)
fmt.Println("Existing text will now be in yellow")
fmt.Printf("This one %s\n", "too")
color.Unset() // Don't forget to unset
// You can mix up parameters
color.Set(color.FgMagenta, color.Bold)
defer color.Unset() // Use it in your function
fmt.Println("All text will now be bold magenta.")
```
### Disable/Enable color
There might be a case where you want to explicitly disable/enable color output. the
`go-isatty` package will automatically disable color output for non-tty output streams
(for example if the output were piped directly to `less`)
`Color` has support to disable/enable colors both globally and for single color
definitions. For example suppose you have a CLI app and a `--no-color` bool flag. You
can easily disable the color output with:
```go
var flagNoColor = flag.Bool("no-color", false, "Disable color output")
if *flagNoColor {
color.NoColor = true // disables colorized output
}
```
It also has support for single color definitions (local). You can
disable/enable color output on the fly:
```go
c := color.New(color.FgCyan)
c.Println("Prints cyan text")
c.DisableColor()
c.Println("This is printed without any color")
c.EnableColor()
c.Println("This prints again cyan...")
```
## Todo
* Save/Return previous values
* Evaluate fmt.Formatter interface
## Credits
* [Fatih Arslan](https://github.com/fatih)
* Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable)
## License
The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details

603
vendor/github.com/fatih/color/color.go generated vendored Normal file
View File

@ -0,0 +1,603 @@
package color
import (
"fmt"
"io"
"os"
"strconv"
"strings"
"sync"
"github.com/mattn/go-colorable"
"github.com/mattn/go-isatty"
)
var (
// NoColor defines if the output is colorized or not. It's dynamically set to
// false or true based on the stdout's file descriptor referring to a terminal
// or not. This is a global option and affects all colors. For more control
// over each color block use the methods DisableColor() individually.
NoColor = os.Getenv("TERM") == "dumb" ||
(!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd()))
// Output defines the standard output of the print functions. By default
// os.Stdout is used.
Output = colorable.NewColorableStdout()
// Error defines a color supporting writer for os.Stderr.
Error = colorable.NewColorableStderr()
// colorsCache is used to reduce the count of created Color objects and
// allows to reuse already created objects with required Attribute.
colorsCache = make(map[Attribute]*Color)
colorsCacheMu sync.Mutex // protects colorsCache
)
// Color defines a custom color object which is defined by SGR parameters.
type Color struct {
params []Attribute
noColor *bool
}
// Attribute defines a single SGR Code
type Attribute int
const escape = "\x1b"
// Base attributes
const (
Reset Attribute = iota
Bold
Faint
Italic
Underline
BlinkSlow
BlinkRapid
ReverseVideo
Concealed
CrossedOut
)
// Foreground text colors
const (
FgBlack Attribute = iota + 30
FgRed
FgGreen
FgYellow
FgBlue
FgMagenta
FgCyan
FgWhite
)
// Foreground Hi-Intensity text colors
const (
FgHiBlack Attribute = iota + 90
FgHiRed
FgHiGreen
FgHiYellow
FgHiBlue
FgHiMagenta
FgHiCyan
FgHiWhite
)
// Background text colors
const (
BgBlack Attribute = iota + 40
BgRed
BgGreen
BgYellow
BgBlue
BgMagenta
BgCyan
BgWhite
)
// Background Hi-Intensity text colors
const (
BgHiBlack Attribute = iota + 100
BgHiRed
BgHiGreen
BgHiYellow
BgHiBlue
BgHiMagenta
BgHiCyan
BgHiWhite
)
// New returns a newly created color object.
func New(value ...Attribute) *Color {
c := &Color{params: make([]Attribute, 0)}
c.Add(value...)
return c
}
// Set sets the given parameters immediately. It will change the color of
// output with the given SGR parameters until color.Unset() is called.
func Set(p ...Attribute) *Color {
c := New(p...)
c.Set()
return c
}
// Unset resets all escape attributes and clears the output. Usually should
// be called after Set().
func Unset() {
if NoColor {
return
}
fmt.Fprintf(Output, "%s[%dm", escape, Reset)
}
// Set sets the SGR sequence.
func (c *Color) Set() *Color {
if c.isNoColorSet() {
return c
}
fmt.Fprintf(Output, c.format())
return c
}
func (c *Color) unset() {
if c.isNoColorSet() {
return
}
Unset()
}
func (c *Color) setWriter(w io.Writer) *Color {
if c.isNoColorSet() {
return c
}
fmt.Fprintf(w, c.format())
return c
}
func (c *Color) unsetWriter(w io.Writer) {
if c.isNoColorSet() {
return
}
if NoColor {
return
}
fmt.Fprintf(w, "%s[%dm", escape, Reset)
}
// Add is used to chain SGR parameters. Use as many as parameters to combine
// and create custom color objects. Example: Add(color.FgRed, color.Underline).
func (c *Color) Add(value ...Attribute) *Color {
c.params = append(c.params, value...)
return c
}
func (c *Color) prepend(value Attribute) {
c.params = append(c.params, 0)
copy(c.params[1:], c.params[0:])
c.params[0] = value
}
// Fprint formats using the default formats for its operands and writes to w.
// Spaces are added between operands when neither is a string.
// It returns the number of bytes written and any write error encountered.
// On Windows, users should wrap w with colorable.NewColorable() if w is of
// type *os.File.
func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) {
c.setWriter(w)
defer c.unsetWriter(w)
return fmt.Fprint(w, a...)
}
// Print formats using the default formats for its operands and writes to
// standard output. Spaces are added between operands when neither is a
// string. It returns the number of bytes written and any write error
// encountered. This is the standard fmt.Print() method wrapped with the given
// color.
func (c *Color) Print(a ...interface{}) (n int, err error) {
c.Set()
defer c.unset()
return fmt.Fprint(Output, a...)
}
// Fprintf formats according to a format specifier and writes to w.
// It returns the number of bytes written and any write error encountered.
// On Windows, users should wrap w with colorable.NewColorable() if w is of
// type *os.File.
func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
c.setWriter(w)
defer c.unsetWriter(w)
return fmt.Fprintf(w, format, a...)
}
// Printf formats according to a format specifier and writes to standard output.
// It returns the number of bytes written and any write error encountered.
// This is the standard fmt.Printf() method wrapped with the given color.
func (c *Color) Printf(format string, a ...interface{}) (n int, err error) {
c.Set()
defer c.unset()
return fmt.Fprintf(Output, format, a...)
}
// Fprintln formats using the default formats for its operands and writes to w.
// Spaces are always added between operands and a newline is appended.
// On Windows, users should wrap w with colorable.NewColorable() if w is of
// type *os.File.
func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
c.setWriter(w)
defer c.unsetWriter(w)
return fmt.Fprintln(w, a...)
}
// Println formats using the default formats for its operands and writes to
// standard output. Spaces are always added between operands and a newline is
// appended. It returns the number of bytes written and any write error
// encountered. This is the standard fmt.Print() method wrapped with the given
// color.
func (c *Color) Println(a ...interface{}) (n int, err error) {
c.Set()
defer c.unset()
return fmt.Fprintln(Output, a...)
}
// Sprint is just like Print, but returns a string instead of printing it.
func (c *Color) Sprint(a ...interface{}) string {
return c.wrap(fmt.Sprint(a...))
}
// Sprintln is just like Println, but returns a string instead of printing it.
func (c *Color) Sprintln(a ...interface{}) string {
return c.wrap(fmt.Sprintln(a...))
}
// Sprintf is just like Printf, but returns a string instead of printing it.
func (c *Color) Sprintf(format string, a ...interface{}) string {
return c.wrap(fmt.Sprintf(format, a...))
}
// FprintFunc returns a new function that prints the passed arguments as
// colorized with color.Fprint().
func (c *Color) FprintFunc() func(w io.Writer, a ...interface{}) {
return func(w io.Writer, a ...interface{}) {
c.Fprint(w, a...)
}
}
// PrintFunc returns a new function that prints the passed arguments as
// colorized with color.Print().
func (c *Color) PrintFunc() func(a ...interface{}) {
return func(a ...interface{}) {
c.Print(a...)
}
}
// FprintfFunc returns a new function that prints the passed arguments as
// colorized with color.Fprintf().
func (c *Color) FprintfFunc() func(w io.Writer, format string, a ...interface{}) {
return func(w io.Writer, format string, a ...interface{}) {
c.Fprintf(w, format, a...)
}
}
// PrintfFunc returns a new function that prints the passed arguments as
// colorized with color.Printf().
func (c *Color) PrintfFunc() func(format string, a ...interface{}) {
return func(format string, a ...interface{}) {
c.Printf(format, a...)
}
}
// FprintlnFunc returns a new function that prints the passed arguments as
// colorized with color.Fprintln().
func (c *Color) FprintlnFunc() func(w io.Writer, a ...interface{}) {
return func(w io.Writer, a ...interface{}) {
c.Fprintln(w, a...)
}
}
// PrintlnFunc returns a new function that prints the passed arguments as
// colorized with color.Println().
func (c *Color) PrintlnFunc() func(a ...interface{}) {
return func(a ...interface{}) {
c.Println(a...)
}
}
// SprintFunc returns a new function that returns colorized strings for the
// given arguments with fmt.Sprint(). Useful to put into or mix into other
// string. Windows users should use this in conjunction with color.Output, example:
//
// put := New(FgYellow).SprintFunc()
// fmt.Fprintf(color.Output, "This is a %s", put("warning"))
func (c *Color) SprintFunc() func(a ...interface{}) string {
return func(a ...interface{}) string {
return c.wrap(fmt.Sprint(a...))
}
}
// SprintfFunc returns a new function that returns colorized strings for the
// given arguments with fmt.Sprintf(). Useful to put into or mix into other
// string. Windows users should use this in conjunction with color.Output.
func (c *Color) SprintfFunc() func(format string, a ...interface{}) string {
return func(format string, a ...interface{}) string {
return c.wrap(fmt.Sprintf(format, a...))
}
}
// SprintlnFunc returns a new function that returns colorized strings for the
// given arguments with fmt.Sprintln(). Useful to put into or mix into other
// string. Windows users should use this in conjunction with color.Output.
func (c *Color) SprintlnFunc() func(a ...interface{}) string {
return func(a ...interface{}) string {
return c.wrap(fmt.Sprintln(a...))
}
}
// sequence returns a formatted SGR sequence to be plugged into a "\x1b[...m"
// an example output might be: "1;36" -> bold cyan
func (c *Color) sequence() string {
format := make([]string, len(c.params))
for i, v := range c.params {
format[i] = strconv.Itoa(int(v))
}
return strings.Join(format, ";")
}
// wrap wraps the s string with the colors attributes. The string is ready to
// be printed.
func (c *Color) wrap(s string) string {
if c.isNoColorSet() {
return s
}
return c.format() + s + c.unformat()
}
func (c *Color) format() string {
return fmt.Sprintf("%s[%sm", escape, c.sequence())
}
func (c *Color) unformat() string {
return fmt.Sprintf("%s[%dm", escape, Reset)
}
// DisableColor disables the color output. Useful to not change any existing
// code and still being able to output. Can be used for flags like
// "--no-color". To enable back use EnableColor() method.
func (c *Color) DisableColor() {
c.noColor = boolPtr(true)
}
// EnableColor enables the color output. Use it in conjunction with
// DisableColor(). Otherwise this method has no side effects.
func (c *Color) EnableColor() {
c.noColor = boolPtr(false)
}
func (c *Color) isNoColorSet() bool {
// check first if we have user setted action
if c.noColor != nil {
return *c.noColor
}
// if not return the global option, which is disabled by default
return NoColor
}
// Equals returns a boolean value indicating whether two colors are equal.
func (c *Color) Equals(c2 *Color) bool {
if len(c.params) != len(c2.params) {
return false
}
for _, attr := range c.params {
if !c2.attrExists(attr) {
return false
}
}
return true
}
func (c *Color) attrExists(a Attribute) bool {
for _, attr := range c.params {
if attr == a {
return true
}
}
return false
}
func boolPtr(v bool) *bool {
return &v
}
func getCachedColor(p Attribute) *Color {
colorsCacheMu.Lock()
defer colorsCacheMu.Unlock()
c, ok := colorsCache[p]
if !ok {
c = New(p)
colorsCache[p] = c
}
return c
}
func colorPrint(format string, p Attribute, a ...interface{}) {
c := getCachedColor(p)
if !strings.HasSuffix(format, "\n") {
format += "\n"
}
if len(a) == 0 {
c.Print(format)
} else {
c.Printf(format, a...)
}
}
func colorString(format string, p Attribute, a ...interface{}) string {
c := getCachedColor(p)
if len(a) == 0 {
return c.SprintFunc()(format)
}
return c.SprintfFunc()(format, a...)
}
// Black is a convenient helper function to print with black foreground. A
// newline is appended to format by default.
func Black(format string, a ...interface{}) { colorPrint(format, FgBlack, a...) }
// Red is a convenient helper function to print with red foreground. A
// newline is appended to format by default.
func Red(format string, a ...interface{}) { colorPrint(format, FgRed, a...) }
// Green is a convenient helper function to print with green foreground. A
// newline is appended to format by default.
func Green(format string, a ...interface{}) { colorPrint(format, FgGreen, a...) }
// Yellow is a convenient helper function to print with yellow foreground.
// A newline is appended to format by default.
func Yellow(format string, a ...interface{}) { colorPrint(format, FgYellow, a...) }
// Blue is a convenient helper function to print with blue foreground. A
// newline is appended to format by default.
func Blue(format string, a ...interface{}) { colorPrint(format, FgBlue, a...) }
// Magenta is a convenient helper function to print with magenta foreground.
// A newline is appended to format by default.
func Magenta(format string, a ...interface{}) { colorPrint(format, FgMagenta, a...) }
// Cyan is a convenient helper function to print with cyan foreground. A
// newline is appended to format by default.
func Cyan(format string, a ...interface{}) { colorPrint(format, FgCyan, a...) }
// White is a convenient helper function to print with white foreground. A
// newline is appended to format by default.
func White(format string, a ...interface{}) { colorPrint(format, FgWhite, a...) }
// BlackString is a convenient helper function to return a string with black
// foreground.
func BlackString(format string, a ...interface{}) string { return colorString(format, FgBlack, a...) }
// RedString is a convenient helper function to return a string with red
// foreground.
func RedString(format string, a ...interface{}) string { return colorString(format, FgRed, a...) }
// GreenString is a convenient helper function to return a string with green
// foreground.
func GreenString(format string, a ...interface{}) string { return colorString(format, FgGreen, a...) }
// YellowString is a convenient helper function to return a string with yellow
// foreground.
func YellowString(format string, a ...interface{}) string { return colorString(format, FgYellow, a...) }
// BlueString is a convenient helper function to return a string with blue
// foreground.
func BlueString(format string, a ...interface{}) string { return colorString(format, FgBlue, a...) }
// MagentaString is a convenient helper function to return a string with magenta
// foreground.
func MagentaString(format string, a ...interface{}) string {
return colorString(format, FgMagenta, a...)
}
// CyanString is a convenient helper function to return a string with cyan
// foreground.
func CyanString(format string, a ...interface{}) string { return colorString(format, FgCyan, a...) }
// WhiteString is a convenient helper function to return a string with white
// foreground.
func WhiteString(format string, a ...interface{}) string { return colorString(format, FgWhite, a...) }
// HiBlack is a convenient helper function to print with hi-intensity black foreground. A
// newline is appended to format by default.
func HiBlack(format string, a ...interface{}) { colorPrint(format, FgHiBlack, a...) }
// HiRed is a convenient helper function to print with hi-intensity red foreground. A
// newline is appended to format by default.
func HiRed(format string, a ...interface{}) { colorPrint(format, FgHiRed, a...) }
// HiGreen is a convenient helper function to print with hi-intensity green foreground. A
// newline is appended to format by default.
func HiGreen(format string, a ...interface{}) { colorPrint(format, FgHiGreen, a...) }
// HiYellow is a convenient helper function to print with hi-intensity yellow foreground.
// A newline is appended to format by default.
func HiYellow(format string, a ...interface{}) { colorPrint(format, FgHiYellow, a...) }
// HiBlue is a convenient helper function to print with hi-intensity blue foreground. A
// newline is appended to format by default.
func HiBlue(format string, a ...interface{}) { colorPrint(format, FgHiBlue, a...) }
// HiMagenta is a convenient helper function to print with hi-intensity magenta foreground.
// A newline is appended to format by default.
func HiMagenta(format string, a ...interface{}) { colorPrint(format, FgHiMagenta, a...) }
// HiCyan is a convenient helper function to print with hi-intensity cyan foreground. A
// newline is appended to format by default.
func HiCyan(format string, a ...interface{}) { colorPrint(format, FgHiCyan, a...) }
// HiWhite is a convenient helper function to print with hi-intensity white foreground. A
// newline is appended to format by default.
func HiWhite(format string, a ...interface{}) { colorPrint(format, FgHiWhite, a...) }
// HiBlackString is a convenient helper function to return a string with hi-intensity black
// foreground.
func HiBlackString(format string, a ...interface{}) string {
return colorString(format, FgHiBlack, a...)
}
// HiRedString is a convenient helper function to return a string with hi-intensity red
// foreground.
func HiRedString(format string, a ...interface{}) string { return colorString(format, FgHiRed, a...) }
// HiGreenString is a convenient helper function to return a string with hi-intensity green
// foreground.
func HiGreenString(format string, a ...interface{}) string {
return colorString(format, FgHiGreen, a...)
}
// HiYellowString is a convenient helper function to return a string with hi-intensity yellow
// foreground.
func HiYellowString(format string, a ...interface{}) string {
return colorString(format, FgHiYellow, a...)
}
// HiBlueString is a convenient helper function to return a string with hi-intensity blue
// foreground.
func HiBlueString(format string, a ...interface{}) string { return colorString(format, FgHiBlue, a...) }
// HiMagentaString is a convenient helper function to return a string with hi-intensity magenta
// foreground.
func HiMagentaString(format string, a ...interface{}) string {
return colorString(format, FgHiMagenta, a...)
}
// HiCyanString is a convenient helper function to return a string with hi-intensity cyan
// foreground.
func HiCyanString(format string, a ...interface{}) string { return colorString(format, FgHiCyan, a...) }
// HiWhiteString is a convenient helper function to return a string with hi-intensity white
// foreground.
func HiWhiteString(format string, a ...interface{}) string {
return colorString(format, FgHiWhite, a...)
}

133
vendor/github.com/fatih/color/doc.go generated vendored Normal file
View File

@ -0,0 +1,133 @@
/*
Package color is an ANSI color package to output colorized or SGR defined
output to the standard output. The API can be used in several way, pick one
that suits you.
Use simple and default helper functions with predefined foreground colors:
color.Cyan("Prints text in cyan.")
// a newline will be appended automatically
color.Blue("Prints %s in blue.", "text")
// More default foreground colors..
color.Red("We have red")
color.Yellow("Yellow color too!")
color.Magenta("And many others ..")
// Hi-intensity colors
color.HiGreen("Bright green color.")
color.HiBlack("Bright black means gray..")
color.HiWhite("Shiny white color!")
However there are times where custom color mixes are required. Below are some
examples to create custom color objects and use the print functions of each
separate color object.
// Create a new color object
c := color.New(color.FgCyan).Add(color.Underline)
c.Println("Prints cyan text with an underline.")
// Or just add them to New()
d := color.New(color.FgCyan, color.Bold)
d.Printf("This prints bold cyan %s\n", "too!.")
// Mix up foreground and background colors, create new mixes!
red := color.New(color.FgRed)
boldRed := red.Add(color.Bold)
boldRed.Println("This will print text in bold red.")
whiteBackground := red.Add(color.BgWhite)
whiteBackground.Println("Red text with White background.")
// Use your own io.Writer output
color.New(color.FgBlue).Fprintln(myWriter, "blue color!")
blue := color.New(color.FgBlue)
blue.Fprint(myWriter, "This will print text in blue.")
You can create PrintXxx functions to simplify even more:
// Create a custom print function for convenient
red := color.New(color.FgRed).PrintfFunc()
red("warning")
red("error: %s", err)
// Mix up multiple attributes
notice := color.New(color.Bold, color.FgGreen).PrintlnFunc()
notice("don't forget this...")
You can also FprintXxx functions to pass your own io.Writer:
blue := color.New(FgBlue).FprintfFunc()
blue(myWriter, "important notice: %s", stars)
// Mix up with multiple attributes
success := color.New(color.Bold, color.FgGreen).FprintlnFunc()
success(myWriter, don't forget this...")
Or create SprintXxx functions to mix strings with other non-colorized strings:
yellow := New(FgYellow).SprintFunc()
red := New(FgRed).SprintFunc()
fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error"))
info := New(FgWhite, BgGreen).SprintFunc()
fmt.Printf("this %s rocks!\n", info("package"))
Windows support is enabled by default. All Print functions work as intended.
However only for color.SprintXXX functions, user should use fmt.FprintXXX and
set the output to color.Output:
fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS"))
info := New(FgWhite, BgGreen).SprintFunc()
fmt.Fprintf(color.Output, "this %s rocks!\n", info("package"))
Using with existing code is possible. Just use the Set() method to set the
standard output to the given parameters. That way a rewrite of an existing
code is not required.
// Use handy standard colors.
color.Set(color.FgYellow)
fmt.Println("Existing text will be now in Yellow")
fmt.Printf("This one %s\n", "too")
color.Unset() // don't forget to unset
// You can mix up parameters
color.Set(color.FgMagenta, color.Bold)
defer color.Unset() // use it in your function
fmt.Println("All text will be now bold magenta.")
There might be a case where you want to disable color output (for example to
pipe the standard output of your app to somewhere else). `Color` has support to
disable colors both globally and for single color definition. For example
suppose you have a CLI app and a `--no-color` bool flag. You can easily disable
the color output with:
var flagNoColor = flag.Bool("no-color", false, "Disable color output")
if *flagNoColor {
color.NoColor = true // disables colorized output
}
It also has support for single color definitions (local). You can
disable/enable color output on the fly:
c := color.New(color.FgCyan)
c.Println("Prints cyan text")
c.DisableColor()
c.Println("This is printed without any color")
c.EnableColor()
c.Println("This prints again cyan...")
*/
package color

8
vendor/github.com/fatih/color/go.mod generated vendored Normal file
View File

@ -0,0 +1,8 @@
module github.com/fatih/color
go 1.13
require (
github.com/mattn/go-colorable v0.1.4
github.com/mattn/go-isatty v0.0.11
)

8
vendor/github.com/fatih/color/go.sum generated vendored Normal file
View File

@ -0,0 +1,8 @@
github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=

191
vendor/github.com/golang/glog/LICENSE generated vendored Normal file
View File

@ -0,0 +1,191 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and
distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright
owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities
that control, are controlled by, or are under common control with that entity.
For the purposes of this definition, "control" means (i) the power, direct or
indirect, to cause the direction or management of such entity, whether by
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising
permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including
but not limited to software source code, documentation source, and configuration
files.
"Object" form shall mean any form resulting from mechanical transformation or
translation of a Source form, including but not limited to compiled object code,
generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made
available under the License, as indicated by a copyright notice that is included
in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that
is based on (or derived from) the Work and for which the editorial revisions,
annotations, elaborations, or other modifications represent, as a whole, an
original work of authorship. For the purposes of this License, Derivative Works
shall not include works that remain separable from, or merely link (or bind by
name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version
of the Work and any modifications or additions to that Work or Derivative Works
thereof, that is intentionally submitted to Licensor for inclusion in the Work
by the copyright owner or by an individual or Legal Entity authorized to submit
on behalf of the copyright owner. For the purposes of this definition,
"submitted" means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems, and
issue tracking systems that are managed by, or on behalf of, the Licensor for
the purpose of discussing and improving the Work, but excluding communication
that is conspicuously marked or otherwise designated in writing by the copyright
owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
of whom a Contribution has been received by Licensor and subsequently
incorporated within the Work.
2. Grant of Copyright License.
Subject to the terms and conditions of this License, each Contributor hereby
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
irrevocable copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the Work and such
Derivative Works in Source or Object form.
3. Grant of Patent License.
Subject to the terms and conditions of this License, each Contributor hereby
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
irrevocable (except as stated in this section) patent license to make, have
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
such license applies only to those patent claims licensable by such Contributor
that are necessarily infringed by their Contribution(s) alone or by combination
of their Contribution(s) with the Work to which such Contribution(s) was
submitted. If You institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
Contribution incorporated within the Work constitutes direct or contributory
patent infringement, then any patent licenses granted to You under this License
for that Work shall terminate as of the date such litigation is filed.
4. Redistribution.
You may reproduce and distribute copies of the Work or Derivative Works thereof
in any medium, with or without modifications, and in Source or Object form,
provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of
this License; and
You must cause any modified files to carry prominent notices stating that You
changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute,
all copyright, patent, trademark, and attribution notices from the Source form
of the Work, excluding those notices that do not pertain to any part of the
Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any
Derivative Works that You distribute must include a readable copy of the
attribution notices contained within such NOTICE file, excluding those notices
that do not pertain to any part of the Derivative Works, in at least one of the
following places: within a NOTICE text file distributed as part of the
Derivative Works; within the Source form or documentation, if provided along
with the Derivative Works; or, within a display generated by the Derivative
Works, if and wherever such third-party notices normally appear. The contents of
the NOTICE file are for informational purposes only and do not modify the
License. You may add Your own attribution notices within Derivative Works that
You distribute, alongside or as an addendum to the NOTICE text from the Work,
provided that such additional attribution notices cannot be construed as
modifying the License.
You may add Your own copyright statement to Your modifications and may provide
additional or different license terms and conditions for use, reproduction, or
distribution of Your modifications, or for any such Derivative Works as a whole,
provided Your use, reproduction, and distribution of the Work otherwise complies
with the conditions stated in this License.
5. Submission of Contributions.
Unless You explicitly state otherwise, any Contribution intentionally submitted
for inclusion in the Work by You to the Licensor shall be under the terms and
conditions of this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify the terms of
any separate license agreement you may have executed with Licensor regarding
such Contributions.
6. Trademarks.
This License does not grant permission to use the trade names, trademarks,
service marks, or product names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the NOTICE file.
7. Disclaimer of Warranty.
Unless required by applicable law or agreed to in writing, Licensor provides the
Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
including, without limitation, any warranties or conditions of TITLE,
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
solely responsible for determining the appropriateness of using or
redistributing the Work and assume any risks associated with Your exercise of
permissions under this License.
8. Limitation of Liability.
In no event and under no legal theory, whether in tort (including negligence),
contract, or otherwise, unless required by applicable law (such as deliberate
and grossly negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special, incidental,
or consequential damages of any character arising as a result of this License or
out of the use or inability to use the Work (including but not limited to
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
any and all other commercial damages or losses), even if such Contributor has
been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability.
While redistributing the Work or Derivative Works thereof, You may choose to
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
other liability obligations and/or rights consistent with this License. However,
in accepting such obligations, You may act only on Your own behalf and on Your
sole responsibility, not on behalf of any other Contributor, and only if You
agree to indemnify, defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason of your
accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work
To apply the Apache License to your work, attach the following boilerplate
notice, with the fields enclosed by brackets "[]" replaced with your own
identifying information. (Don't include the brackets!) The text should be
enclosed in the appropriate comment syntax for the file format. We also
recommend that a file or class name and description of purpose be included on
the same "printed page" as the copyright notice for easier identification within
third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

44
vendor/github.com/golang/glog/README generated vendored Normal file
View File

@ -0,0 +1,44 @@
glog
====
Leveled execution logs for Go.
This is an efficient pure Go implementation of leveled logs in the
manner of the open source C++ package
https://github.com/google/glog
By binding methods to booleans it is possible to use the log package
without paying the expense of evaluating the arguments to the log.
Through the -vmodule flag, the package also provides fine-grained
control over logging at the file level.
The comment from glog.go introduces the ideas:
Package glog implements logging analogous to the Google-internal
C++ INFO/ERROR/V setup. It provides functions Info, Warning,
Error, Fatal, plus formatting variants such as Infof. It
also provides V-style logging controlled by the -v and
-vmodule=file=2 flags.
Basic examples:
glog.Info("Prepare to repel boarders")
glog.Fatalf("Initialization failed: %s", err)
See the documentation for the V function for an explanation
of these examples:
if glog.V(2) {
glog.Info("Starting transaction...")
}
glog.V(2).Infoln("Processed", nItems, "elements")
The repository contains an open source version of the log package
used inside Google. The master copy of the source lives inside
Google, not here. The code in this repo is for export only and is not itself
under development. Feature requests will be ignored.
Send bug reports to golang-nuts@googlegroups.com.

1180
vendor/github.com/golang/glog/glog.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

124
vendor/github.com/golang/glog/glog_file.go generated vendored Normal file
View File

@ -0,0 +1,124 @@
// Go support for leveled logs, analogous to https://code.google.com/p/google-glog/
//
// Copyright 2013 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// File I/O for logs.
package glog
import (
"errors"
"flag"
"fmt"
"os"
"os/user"
"path/filepath"
"strings"
"sync"
"time"
)
// MaxSize is the maximum size of a log file in bytes.
var MaxSize uint64 = 1024 * 1024 * 1800
// logDirs lists the candidate directories for new log files.
var logDirs []string
// If non-empty, overrides the choice of directory in which to write logs.
// See createLogDirs for the full list of possible destinations.
var logDir = flag.String("log_dir", "", "If non-empty, write log files in this directory")
func createLogDirs() {
if *logDir != "" {
logDirs = append(logDirs, *logDir)
}
logDirs = append(logDirs, os.TempDir())
}
var (
pid = os.Getpid()
program = filepath.Base(os.Args[0])
host = "unknownhost"
userName = "unknownuser"
)
func init() {
h, err := os.Hostname()
if err == nil {
host = shortHostname(h)
}
current, err := user.Current()
if err == nil {
userName = current.Username
}
// Sanitize userName since it may contain filepath separators on Windows.
userName = strings.Replace(userName, `\`, "_", -1)
}
// shortHostname returns its argument, truncating at the first period.
// For instance, given "www.google.com" it returns "www".
func shortHostname(hostname string) string {
if i := strings.Index(hostname, "."); i >= 0 {
return hostname[:i]
}
return hostname
}
// logName returns a new log file name containing tag, with start time t, and
// the name for the symlink for tag.
func logName(tag string, t time.Time) (name, link string) {
name = fmt.Sprintf("%s.%s.%s.log.%s.%04d%02d%02d-%02d%02d%02d.%d",
program,
host,
userName,
tag,
t.Year(),
t.Month(),
t.Day(),
t.Hour(),
t.Minute(),
t.Second(),
pid)
return name, program + "." + tag
}
var onceLogDirs sync.Once
// create creates a new log file and returns the file and its filename, which
// contains tag ("INFO", "FATAL", etc.) and t. If the file is created
// successfully, create also attempts to update the symlink for that tag, ignoring
// errors.
func create(tag string, t time.Time) (f *os.File, filename string, err error) {
onceLogDirs.Do(createLogDirs)
if len(logDirs) == 0 {
return nil, "", errors.New("log: no log dirs")
}
name, link := logName(tag, t)
var lastErr error
for _, dir := range logDirs {
fname := filepath.Join(dir, name)
f, err := os.Create(fname)
if err == nil {
symlink := filepath.Join(dir, link)
os.Remove(symlink) // ignore err
os.Symlink(name, symlink) // ignore err
return f, fname, nil
}
lastErr = err
}
return nil, "", fmt.Errorf("log: cannot create log: %v", lastErr)
}

26
vendor/github.com/iawia002/annie/CONTRIBUTING.md generated vendored Normal file
View File

@ -0,0 +1,26 @@
# Contributing Guide
* [Style Guide](#style-guide)
* [Build](#build)
* [Features Requested](#features-requested)
## Style Guide
### Code format
Annie uses [gofmt](https://golang.org/cmd/gofmt) to format the code, you must use [gofmt](https://golang.org/cmd/gofmt) to format your code before submitting.
### linter
We recommend using [golint](https://github.com/golang/lint) or [gometalinter](https://github.com/alecthomas/gometalinter) to check your code format.
## Build
Make sure that this folder is in `GOPATH`, then:
```bash
$ go build
```
## Features Requested
There are several [features](https://github.com/iawia002/annie/issues?q=is%3Aissue+is%3Aopen+label%3Afeature-request) requested by the community. If you have any idea, feel free to fork the repo, follow the style guide above, push and merge it after passing the test. Besides, you are welcomed to propose new features through the issue.

9
vendor/github.com/iawia002/annie/LICENSE generated vendored Normal file
View File

@ -0,0 +1,9 @@
MIT License
Copyright 2018-present, iawia002
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

641
vendor/github.com/iawia002/annie/README.md generated vendored Normal file
View File

@ -0,0 +1,641 @@
<p align="center"><img src="static/logo.png" alt="Annie" height="100px"></p>
<div align="center">
<a href="https://codecov.io/gh/iawia002/annie">
<img src="https://img.shields.io/codecov/c/github/iawia002/annie.svg?style=flat-square" alt="Codecov">
</a>
<a href="https://travis-ci.com/iawia002/annie">
<img src="https://img.shields.io/travis/iawia002/annie.svg?style=flat-square" alt="Build Status">
</a>
<a href="https://goreportcard.com/report/github.com/iawia002/annie">
<img src="https://goreportcard.com/badge/github.com/iawia002/annie?style=flat-square" alt="Go Report Card">
</a>
<a href="https://github.com/iawia002/annie/releases">
<img src="https://img.shields.io/github/release/iawia002/annie.svg?style=flat-square" alt="GitHub release">
</a>
<a href="https://formulae.brew.sh/formula/annie">
<img src="https://img.shields.io/homebrew/v/annie.svg?style=flat-square" alt="Homebrew">
</a>
<a href="https://t.me/anniedev">
<img src="https://img.shields.io/badge/telegram-join%20chat-0088cc.svg?longCache=true&style=flat-square" alt="telegram">
</a>
</div>
👾 Annie is a fast, simple and clean video downloader built with Go.
* [Installation](#installation)
* [Getting Started](#getting-started)
* [Download a video](#download-a-video)
* [Download anything else](#download-anything-else)
* [Download playlist](#download-playlist)
* [Multiple inputs](#multiple-inputs)
* [Resume a download](#resume-a-download)
* [Cookies](#cookies)
* [Auto retry](#auto-retry)
* [Proxy](#proxy)
* [Multi-Thread](#multi-thread)
* [Short link](#short-link)
* [Use specified Referrer](#use-specified-referrer)
* [Specify the output path and name](#specify-the-output-path-and-name)
* [Debug Mode](#debug-mode)
* [Reuse extracted data](#reuse-extracted-data)
* [Options](#options)
* [Supported Sites](#supported-sites)
* [Known issues](#known-issues)
* [Contributing](#contributing)
* [Authors](#authors)
* [Similar projects](#similar-projects)
* [License](#license)
## Installation
### Prerequisites
The following dependencies are required and must be installed separately.
* **[FFmpeg](https://www.ffmpeg.org)**
> **Note**: FFmpeg does not affect the download, only affects the final file merge.
### Install via `go get`
To install Annie, use `go get`, or download the binary file from [Releases](https://github.com/iawia002/annie/releases) page.
```bash
$ go get github.com/iawia002/annie
```
### Homebrew (macOS only)
For macOS users, you can install `annie` via:
```bash
$ brew install annie
```
### Arch Linux
For Arch Users [AUR](https://aur.archlinux.org/packages/annie) package is available.
### Void Linux
For Void linux users, you can install `annie` via:
```
$ xbps-install -S annie
```
### [Scoop](https://scoop.sh/) on Windows
```sh
$ scoop install annie
```
### [Chocolatey](https://chocolatey.org/) on Windows
```
$ choco install annie
```
## Getting Started
Usage:
```
annie [OPTIONS] URL [URL...]
```
### Download a video
```console
$ annie https://www.youtube.com/watch?v=dQw4w9WgXcQ
Site: YouTube youtube.com
Title: Rick Astley - Never Gonna Give You Up (Video)
Type: video
Stream:
[248] -------------------
Quality: 1080p video/webm; codecs="vp9"
Size: 63.93 MiB (67038963 Bytes)
# download with: annie -f 248 ...
41.88 MiB / 63.93 MiB [=================>-------------] 65.51% 4.22 MiB/s 00m05s
```
> Note: wrap the URL in quotation marks if it contains special characters. (thanks @tonyxyl for pointing this out)
>
> `$ annie 'https://...'`
The `-i` option displays all available quality of video without downloading.
```console
$ annie -i https://www.youtube.com/watch?v=dQw4w9WgXcQ
Site: YouTube youtube.com
Title: Rick Astley - Never Gonna Give You Up (Video)
Type: video
Streams: # All available quality
[248] -------------------
Quality: 1080p video/webm; codecs="vp9"
Size: 49.29 MiB (51687554 Bytes)
# download with: annie -f 248 ...
[137] -------------------
Quality: 1080p video/mp4; codecs="avc1.640028"
Size: 43.45 MiB (45564306 Bytes)
# download with: annie -f 137 ...
[398] -------------------
Quality: 720p video/mp4; codecs="av01.0.05M.08"
Size: 37.12 MiB (38926432 Bytes)
# download with: annie -f 398 ...
[136] -------------------
Quality: 720p video/mp4; codecs="avc1.4d401f"
Size: 31.34 MiB (32867324 Bytes)
# download with: annie -f 136 ...
[247] -------------------
Quality: 720p video/webm; codecs="vp9"
Size: 31.03 MiB (32536181 Bytes)
# download with: annie -f 247 ...
```
Use `annie -f stream "URL"` to download a specific stream listed in the output of `-i` option.
### Download anything else
If Annie is provided the URL of a specific resource, then it will be downloaded directly:
```console
$ annie https://img9.bcyimg.com/drawer/15294/post/1799t/1f5a87801a0711e898b12b640777720f.jpg
annie doesn't support this URL right now, but it will try to download it directly
Site: Universal
Title: 1f5a87801a0711e898b12b640777720f
Type: image/jpeg
Stream:
[default] -------------------
Size: 1.00 MiB (1051042 Bytes)
# download with: annie -f default "URL"
1.00 MiB / 1.00 MiB [===================================] 100.00% 1.21 MiB/s 0s
```
### Download playlist
The `-p` option downloads an entire playlist instead of a single video.
```console
$ annie -i -p https://www.bilibili.com/bangumi/play/ep198061
Site: 哔哩哔哩 bilibili.com
Title: Doctor X 第四季:第一集
Type: video
Streams: # All available quality
[default] -------------------
Quality: 高清 1080P
Size: 845.66 MiB (886738354 Bytes)
# download with: annie -f default "URL"
Site: 哔哩哔哩 bilibili.com
Title: Doctor X 第四季:第二集
Type: video
Streams: # All available quality
[default] -------------------
Quality: 高清 1080P
Size: 930.71 MiB (975919195 Bytes)
# download with: annie -f default "URL"
......
```
You can use the `-start`, `-end` or `-items` option to specify the download range of the list:
```
-start
Playlist video to start at (default 1)
-end
Playlist video to end at
-items
Playlist video items to download. Separated by commas like: 1,5,6,8-10
```
### Multiple inputs
You can also download multiple URLs at once:
```console
$ annie -i https://www.bilibili.com/video/av21877586 https://www.bilibili.com/video/av21990740
Site: 哔哩哔哩 bilibili.com
Title: 【莓机会了】甜到虐哭的13集单集MAD「我现在什么都不想干,更不想看14集」
Type: video
Streams: # All available quality
[default] -------------------
Quality: 高清 1080P
Size: 51.88 MiB (54403767 Bytes)
# download with: annie -f default "URL"
Site: 哔哩哔哩 bilibili.com
Title: 【莓救了】甜到虐哭国家队单集MAD-当熟悉的bgm响起眼泪从脸颊滑下
Type: video
Streams: # All available quality
[default] -------------------
Quality: 高清 1080P
Size: 77.63 MiB (81404093 Bytes)
# download with: annie -f default "URL"
```
These URLs will be downloaded one by one.
You can also use the `-F` option to read URLs from file:
```console
$ annie -F ~/Desktop/u.txt
Site: 微博 weibo.com
Title: 在Google我们设计什么 via@阑夕
Type: video
Stream:
[default] -------------------
Size: 19.19 MiB (20118196 Bytes)
# download with: annie -f default "URL"
19.19 MiB / 19.19 MiB [=================================] 100.00% 9.69 MiB/s 1s
......
```
You can use the `-start`, `-end` or `-items` option to specify the download range of the list:
```
-start
File line to start at (default 1)
-end
File line to end at
-items
File lines to download. Separated by commas like: 1,5,6,8-10
```
### Resume a download
<kbd>Ctrl</kbd>+<kbd>C</kbd> interrupts a download.
A temporary `.download` file is kept in the output directory. If `annie` is ran with the same arguments, then the download progress will resume from the last session.
### Auto retry
annie will auto retry when the download failed, you can specify the retry times by `-retry` option (default is 100).
### Cookies
Cookies can be provided to `annie` with the `-c` option if they are required for accessing the video.
Cookies can be the following format or [Netscape Cookie](https://curl.haxx.se/rfc/cookie_spec.html) format:
```console
name=value; name2=value2; ...
```
Cookies can be a string or a text file, supply cookies in one of the two following ways.
As a string:
```console
$ annie -c "name=value; name2=value2" https://www.bilibili.com/video/av20203945
```
As a text file:
```console
$ annie -c cookies.txt https://www.bilibili.com/video/av20203945
```
### Proxy
You can set the HTTP/SOCKS5 proxy using environment variables:
```console
$ HTTP_PROXY="http://127.0.0.1:1087/" annie -i https://www.youtube.com/watch?v=Gnbch2osEeo
```
```console
$ HTTP_PROXY="socks5://127.0.0.1:1080/" annie -i https://www.youtube.com/watch?v=Gnbch2osEeo
```
### Multi-Thread
Use `-n` option to set the number of download threads(default is 10, only works for multiple-parts video).
> **Special Tips:** Use too many threads in **mgtv** download will cause HTTP 403 error, we recommend setting the number of threads to **1**.
### Short link
#### bilibili
You can just use `av` or `ep` number to download bilibili's video:
```console
$ annie -i ep198381 av21877586
Site: 哔哩哔哩 bilibili.com
Title: 狐妖小红娘第79话 南国公主的吃货本色
Type: video
Streams: # All available quality
[default] -------------------
Quality: 高清 1080P
Size: 485.23 MiB (508798478 Bytes)
# download with: annie -f default "URL"
Site: 哔哩哔哩 bilibili.com
Title: 【莓机会了】甜到虐哭的13集单集MAD「我现在什么都不想干,更不想看14集」
Type: video
Streams: # All available quality
[default] -------------------
Quality: 高清 1080P
Size: 51.88 MiB (54403767 Bytes)
# download with: annie -f default "URL"
```
### Use specified Referrer
A Referrer can be used for the request with the `-r` option:
```console
$ annie -r https://www.bilibili.com/video/av20383055/ http://cn-scnc1-dx.acgvideo.com/...
...
```
### Specify the output path and name
The `-o` option sets the path, and `-O` option sets the name of the downloaded file:
```console
$ annie -o ../ -O "hello" https://...
```
### Debug Mode
The `-d` option outputs network request messages:
```console
$ annie -i -d http://www.bilibili.com/video/av20088587
URL: http://www.bilibili.com/video/av20088587
Method: GET
Headers: http.Header{
"Referer": {"http://www.bilibili.com/video/av20088587"},
"Accept": {"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"},
"Accept-Charset": {"UTF-8,*;q=0.5"},
"Accept-Encoding": {"gzip,deflate,sdch"},
"Accept-Language": {"en-US,en;q=0.8"},
"User-Agent": {"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36"},
}
Status Code: 200
URL: https://interface.bilibili.com/v2/playurl?appkey=84956560bc028eb7&cid=32782944&otype=json&qn=116&quality=116&type=&sign=fb2e3f261fec398652f96d358517e535
Method: GET
Headers: http.Header{
"Accept-Charset": {"UTF-8,*;q=0.5"},
"Accept-Encoding": {"gzip,deflate,sdch"},
"Accept-Language": {"en-US,en;q=0.8"},
"User-Agent": {"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36"},
"Referer": {"https://interface.bilibili.com/v2/playurl?appkey=84956560bc028eb7&cid=32782944&otype=json&qn=116&quality=116&type=&sign=fb2e3f261fec398652f96d358517e535"},
"Accept": {"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"},
}
Status Code: 200
Site: 哔哩哔哩 bilibili.com
Title: 燃油动力的遥控奥迪R8跑赛道
Type: video
Streams: # All available quality
[default] -------------------
Quality: 高清 1080P
Size: 64.38 MiB (67504795 Bytes)
# download with: annie -f default "URL"
```
### Reuse extracted data
The `-j` option will print the extracted data in JSON format.
```console
$ annie -j https://www.bilibili.com/video/av20203945
{
"site": "哔哩哔哩 bilibili.com",
"title": "【2018拜年祭单品】相遇day by day",
"type": "video",
"streams": {
"15": {
"urls": [
{
"url": "...",
"size": 18355205,
"ext": "flv"
}
],
"quality": "流畅 360P",
"size": 18355205
},
"32": {
"urls": [
{
"url": "...",
"size": 40058632,
"ext": "flv"
}
],
"quality": "清晰 480P",
"size": 40058632
},
"64": {
"urls": [
{
"url": "...",
"size": 82691087,
"ext": "flv"
}
],
"quality": "高清 720P",
"size": 82691087
},
"80": {
"urls": [
{
"url": "...",
"size": 121735559,
"ext": "flv"
}
],
"quality": "高清 1080P",
"size": 121735559
}
}
}
```
### Options
```
-i Information only
-F string
URLs file path
-d Debug mode
-j Print extracted data
-v Show version
```
#### Download:
```
-f string
Select specific stream to download
-p Download playlist
-n int
The number of download thread (only works for multiple-parts video) (default 10)
-c string
Cookie
-r string
Use specified Referrer
-cs int
HTTP chunk size for downloading (in MB) (default 0)
```
#### Network:
```
-retry int
How many times to retry when the download failed (default 10)
```
#### Playlist:
```
-start int
Playlist video to start at (default 1)
-end int
Playlist video to end at
-items string
Playlist video items to download. Separated by commas like: 1,5,6,8-10
```
#### Filesystem:
```
-o string
Specify the output path
-O string
Specify the output file name
```
#### Subtitle:
```
-C Download captions
```
#### Youku:
```
-ccode string
Youku ccode (default "0590")
-ckey string
Youku ckey (default "7B19C0AB12633B22E7FE81271162026020570708D6CC189E4924503C49D243A0DE6CD84A766832C2C99898FC5ED31F3709BB3CDD82C96492E721BDD381735026")
-password string
Youku password
```
#### aria2:
> Note: If you use aria2 to download, you need to merge the multi-part videos yourself.
```
-aria2
Use Aria2 RPC to download
-aria2addr string
Aria2 Address (default "localhost:6800")
-aria2method string
Aria2 Method (default "http")
-aria2token string
Aria2 RPC Token
```
## Supported Sites
Site | URL | 🎬 Videos | 🌁 Images | 📚 Playlist | 🍪 VIP adaptation
--- | --- | ---------| -------- | -------- | --------------
抖音 | <https://www.douyin.com> | ✓ | | | |
哔哩哔哩 | <https://www.bilibili.com> | ✓ | | ✓ | ✓ |
半次元 | <https://bcy.net> | | ✓ | | |
pixivision | <https://www.pixivision.net> | | ✓ | | |
优酷 | <https://www.youku.com> | ✓ | | | ✓ |
YouTube | <https://www.youtube.com> | ✓ | | ✓ | |
爱奇艺 | <https://www.iqiyi.com> | ✓ | | | |
芒果TV | <https://www.mgtv.com> | ✓ | | | |
糖豆广场舞 | <http://www.tangdou.com> | ✓ | | ✓ | |
Tumblr | <https://www.tumblr.com> | ✓ | ✓ | | |
Vimeo | <https://vimeo.com> | ✓ | | | |
Facebook | <https://facebook.com> | ✓ | | | |
斗鱼视频 | <https://v.douyu.com> | ✓ | | | |
秒拍 | <https://www.miaopai.com> | ✓ | | | |
微博 | <https://weibo.com> | ✓ | | | |
Instagram | <https://www.instagram.com> | ✓ | ✓ | | |
Twitter | <https://twitter.com> | ✓ | | | |
腾讯视频 | <https://v.qq.com> | ✓ | | | |
网易云音乐 | <https://music.163.com> | ✓ | | | |
音悦台 | <https://yinyuetai.com> | ✓ | | | |
极客时间 | <https://time.geekbang.org> | ✓ | | | |
Pornhub | <https://pornhub.com> | ✓ | | | |
XVIDEOS | <https://xvideos.com> | ✓ | | | |
聯合新聞網 | <https://udn.com> | ✓ | | | |
TikTok | <https://www.tiktok.com> | ✓ | | | |
## Known issues
### 优酷
优酷的 `ccode` 经常变化导致 annie 不可用,如果你知道有新的可用的 `ccode`,可以直接使用 `annie -ccode ...` 而不用等待 annie 更新(当然,也欢迎你给我们提一个 Pull request 来更新默认的 `ccode`
最好是每次下载都附带登录过的 Cookie 以避免部分 `ccode` 的问题
## Contributing
Annie is an open source project and built on the top of open-source projects. If you are interested, then you are welcome to contribute. Let's make Annie better, together. 💪
Check out the [Contributing Guide](./CONTRIBUTING.md) to get started.
Special thanks to [@Yasujizr](https://github.com/Yasujizr) who designed the amazing logo!
Thanks for [JetBrains](https://www.jetbrains.com/?from=annie) for the wonderful IDE.
<a href="https://www.jetbrains.com/?from=annie"><img src="static/jetbrains-variant-3.svg" /></a>
## Authors
Code with ❤️ by [iawia002](https://github.com/iawia002) and lovely [contributors](https://github.com/iawia002/annie/graphs/contributors)
## Similar projects
* [youtube-dl](https://github.com/rg3/youtube-dl)
* [you-get](https://github.com/soimort/you-get)
* [ytdl](https://github.com/rylio/ytdl)
## License
MIT
Copyright (c) 2018-present, iawia002

2
vendor/github.com/iawia002/annie/codecov.yml generated vendored Normal file
View File

@ -0,0 +1,2 @@
codecov:
token: e0f2d44f-c6a7-469a-a688-37c72c0f18f9

2
vendor/github.com/iawia002/annie/compress.bat generated vendored Normal file
View File

@ -0,0 +1,2 @@
:: Please install upx first, https://github.com/upx/upx/releases
for /f "delims=" %%i in ('dir /b /a-d /s "annie*"') do upx --best "%%i"

3
vendor/github.com/iawia002/annie/compress.sh generated vendored Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
# Please install upx first, https://github.com/upx/upx/releases
find ./ -xdev -maxdepth 1 -type f -iname 'annie*' -executable -exec upx --best --brute --ultra-brute {} \;

63
vendor/github.com/iawia002/annie/config/config.go generated vendored Normal file
View File

@ -0,0 +1,63 @@
package config
var (
// Debug debug mode
Debug bool
// Version show version
Version bool
// InfoOnly Information only mode
InfoOnly bool
// Cookie http cookies
Cookie string
// Playlist download playlist
Playlist bool
// Refer use specified Referrer
Refer string
// Stream select specified stream to download
Stream string
// OutputPath output file path
OutputPath string
// OutputName output file name
OutputName string
// ExtractedData print extracted data
ExtractedData bool
// ChunkSizeMB HTTP chunk size for downloading (in MB)
ChunkSizeMB int
// UseAria2RPC Use Aria2 RPC to download
UseAria2RPC bool
// Aria2Token Aria2 RPC Token
Aria2Token string
// Aria2Addr Aria2 Address (default "localhost:6800")
Aria2Addr string
// Aria2Method Aria2 Method (default "http")
Aria2Method string
// ThreadNumber The number of download thread (only works for multiple-parts video)
ThreadNumber int
// File URLs file path
File string
// ItemStart Define the starting item of a playlist or a file input
ItemStart int
// ItemEnd Define the ending item of a playlist or a file input
ItemEnd int
// Items Define wanted items from a file or playlist. Separated by commas like: 1,5,6,8-10
Items string
// Caption download captions
Caption bool
// YoukuCcode youku ccode
YoukuCcode string
// YoukuCkey youku ckey
YoukuCkey string
// YoukuPassword youku password
YoukuPassword string
// RetryTimes how many times to retry when the download failed
RetryTimes int
)
// FakeHeaders fake http headers
var FakeHeaders = map[string]string{
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Charset": "UTF-8,*;q=0.5",
"Accept-Encoding": "gzip,deflate,sdch",
"Accept-Language": "en-US,en;q=0.8",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36",
}

4
vendor/github.com/iawia002/annie/config/version.go generated vendored Normal file
View File

@ -0,0 +1,4 @@
package config
// VERSION version of annie
const VERSION = "0.9.8"

View File

@ -0,0 +1,321 @@
package downloader
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"sync"
"time"
"github.com/cheggaaa/pb"
"github.com/iawia002/annie/config"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
func progressBar(size int64) *pb.ProgressBar {
bar := pb.New64(size).SetUnits(pb.U_BYTES).SetRefreshRate(time.Millisecond * 10)
bar.ShowSpeed = true
bar.ShowFinalTime = true
bar.SetMaxWidth(1000)
return bar
}
// Caption download danmaku, subtitles, etc
func Caption(url, refer, fileName, ext string) error {
if !config.Caption || config.InfoOnly {
return nil
}
fmt.Println("\nDownloading captions...")
body, err := request.GetByte(url, refer, nil)
if err != nil {
return err
}
filePath, err := utils.FilePath(fileName, ext, true)
if err != nil {
return err
}
file, fileError := os.Create(filePath)
if fileError != nil {
return fileError
}
defer file.Close()
if _, err = file.Write(body); err != nil {
return err
}
return nil
}
func writeFile(
url string, file *os.File, headers map[string]string, bar *pb.ProgressBar,
) (int64, error) {
res, err := request.Request(http.MethodGet, url, nil, headers)
if err != nil {
return 0, err
}
defer res.Body.Close()
writer := io.MultiWriter(file, bar)
// Note that io.Copy reads 32kb(maximum) from input and writes them to output, then repeats.
// So don't worry about memory.
written, copyErr := io.Copy(writer, res.Body)
if copyErr != nil && copyErr != io.EOF {
return written, fmt.Errorf("file copy error: %s", copyErr)
}
return written, nil
}
// Save save url file
func Save(
urlData URL, refer, fileName string, bar *pb.ProgressBar, chunkSizeMB int,
) error {
var err error
filePath, err := utils.FilePath(fileName, urlData.Ext, false)
if err != nil {
return err
}
fileSize, exists, err := utils.FileSize(filePath)
if err != nil {
return err
}
if bar == nil {
bar = progressBar(urlData.Size)
bar.Start()
}
// Skip segment file
// TODO: Live video URLs will not return the size
if exists && fileSize == urlData.Size {
bar.Add64(fileSize)
return nil
}
tempFilePath := filePath + ".download"
tempFileSize, _, err := utils.FileSize(tempFilePath)
if err != nil {
return err
}
headers := map[string]string{
"Referer": refer,
}
var (
file *os.File
fileError error
)
if tempFileSize > 0 {
// range start from 0, 0-1023 means the first 1024 bytes of the file
headers["Range"] = fmt.Sprintf("bytes=%d-", tempFileSize)
file, fileError = os.OpenFile(tempFilePath, os.O_APPEND|os.O_WRONLY, 0644)
bar.Add64(tempFileSize)
} else {
file, fileError = os.Create(tempFilePath)
}
if fileError != nil {
return fileError
}
// close and rename temp file at the end of this function
defer func() {
// must close the file before rename or it will cause
// `The process cannot access the file because it is being used by another process.` error.
file.Close()
if err == nil {
os.Rename(tempFilePath, filePath)
}
}()
if chunkSizeMB > 0 {
var start, end, chunkSize int64
chunkSize = int64(chunkSizeMB) * 1024 * 1024
remainingSize := urlData.Size
if tempFileSize > 0 {
start = tempFileSize
remainingSize -= tempFileSize
}
chunk := remainingSize / chunkSize
if remainingSize%chunkSize != 0 {
chunk++
}
var i int64 = 1
for ; i <= chunk; i++ {
end = start + chunkSize - 1
headers["Range"] = fmt.Sprintf("bytes=%d-%d", start, end)
temp := start
for i := 0; ; i++ {
written, err := writeFile(urlData.URL, file, headers, bar)
if err == nil {
break
} else if i+1 >= config.RetryTimes {
return err
}
temp += written
headers["Range"] = fmt.Sprintf("bytes=%d-%d", temp, end)
time.Sleep(1 * time.Second)
}
start = end + 1
}
} else {
temp := tempFileSize
for i := 0; ; i++ {
written, err := writeFile(urlData.URL, file, headers, bar)
if err == nil {
break
} else if i+1 >= config.RetryTimes {
return err
}
temp += written
headers["Range"] = fmt.Sprintf("bytes=%d-", temp)
time.Sleep(1 * time.Second)
}
}
return nil
}
// Download download urls
func Download(v Data, refer string, chunkSizeMB int) error {
v.genSortedStreams()
if config.ExtractedData {
jsonData, _ := json.MarshalIndent(v, "", " ")
fmt.Printf("%s\n", jsonData)
return nil
}
var (
title string
stream string
)
if config.OutputName == "" {
title = utils.FileName(v.Title, "")
} else {
title = utils.FileName(config.OutputName, "")
}
if config.Stream == "" {
stream = v.sortedStreams[0].name
} else {
stream = config.Stream
}
data, ok := v.Streams[stream]
if !ok {
return fmt.Errorf("no stream named %s", stream)
}
v.printInfo(stream) // if InfoOnly, this func will print all streams info
if config.InfoOnly {
return nil
}
// Use aria2 rpc to download
if config.UseAria2RPC {
rpcData := Aria2RPCData{
JSONRPC: "2.0",
ID: "annie", // can be modified
Method: "aria2.addUri",
}
rpcData.Params[0] = "token:" + config.Aria2Token
var urls []string
for _, p := range data.URLs {
urls = append(urls, p.URL)
}
var inputs Aria2Input
inputs.Header = append(inputs.Header, "Referer: "+refer)
for i := range urls {
rpcData.Params[1] = urls[i : i+1]
inputs.Out = fmt.Sprintf("%s[%d].%s", title, i, data.URLs[0].Ext)
rpcData.Params[2] = &inputs
jsonData, err := json.Marshal(rpcData)
if err != nil {
return err
}
reqURL := fmt.Sprintf("%s://%s/jsonrpc", config.Aria2Method, config.Aria2Addr)
req, err := http.NewRequest(http.MethodPost, reqURL, bytes.NewBuffer(jsonData))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json")
var client = http.Client{Timeout: 30 * time.Second}
res, err := client.Do(req)
if err != nil {
return err
}
// The http Client and Transport guarantee that Body is always
// non-nil, even on responses without a body or responses with
// a zero-length body.
res.Body.Close()
}
return nil
}
// Skip the complete file that has been merged
mergedFilePath, err := utils.FilePath(title, "mp4", false)
if err != nil {
return err
}
_, mergedFileExists, err := utils.FileSize(mergedFilePath)
if err != nil {
return err
}
// After the merge, the file size has changed, so we do not check whether the size matches
if mergedFileExists {
fmt.Printf("%s: file already exists, skipping\n", mergedFilePath)
return nil
}
bar := progressBar(data.Size)
bar.Start()
if len(data.URLs) == 1 {
// only one fragment
err := Save(data.URLs[0], refer, title, bar, chunkSizeMB)
if err != nil {
return err
}
bar.Finish()
return nil
}
wgp := utils.NewWaitGroupPool(config.ThreadNumber)
// multiple fragments
errs := make([]error, 0)
lock := sync.Mutex{}
parts := make([]string, len(data.URLs))
for index, url := range data.URLs {
if len(errs) > 0 {
break
}
partFileName := fmt.Sprintf("%s[%d]", title, index)
partFilePath, err := utils.FilePath(partFileName, url.Ext, false)
if err != nil {
return err
}
parts[index] = partFilePath
wgp.Add()
go func(url URL, refer, fileName string, bar *pb.ProgressBar) {
defer wgp.Done()
err := Save(url, refer, fileName, bar, chunkSizeMB)
if err != nil {
lock.Lock()
errs = append(errs, err)
lock.Unlock()
}
}(url, refer, partFileName, bar)
}
wgp.Wait()
if len(errs) > 0 {
return errs[0]
}
bar.Finish()
if v.Type != "video" {
return nil
}
// merge
fmt.Printf("Merging video parts into %s\n", mergedFilePath)
if v.Site == "YouTube youtube.com" {
err = utils.MergeAudioAndVideo(parts, mergedFilePath)
} else {
err = utils.MergeToMP4(parts, mergedFilePath, title)
}
return err
}

140
vendor/github.com/iawia002/annie/downloader/types.go generated vendored Normal file
View File

@ -0,0 +1,140 @@
package downloader
import (
"fmt"
"sort"
"github.com/fatih/color"
"github.com/iawia002/annie/config"
)
// URL data struct for single URL information
type URL struct {
URL string `json:"url"`
Size int64 `json:"size"`
Ext string `json:"ext"`
}
// Stream data struct for each stream
type Stream struct {
// [URL: {URL, Size, Ext}, ...]
// Some video files have multiple fragments
// and support for downloading multiple image files at once
URLs []URL `json:"urls"`
Quality string `json:"quality"`
// total size of all urls
Size int64 `json:"size"`
// name used in sortedStreams
name string
}
// Data data struct for video information
type Data struct {
Site string `json:"site"`
Title string `json:"title"`
Type string `json:"type"`
// each stream has it's own URLs and Quality
Streams map[string]Stream `json:"streams"`
sortedStreams []Stream
// Err is used to record whether an error occurred when extracting data.
// It is used to record the error information corresponding to each url when extracting the list data.
// NOTE(iawia002): err is only used in Data list
Err error `json:"-"`
// URL is used to record the address of this download
URL string `json:"url"`
}
// EmptyData returns an "empty" Data object with the given URL and error
func EmptyData(url string, err error) Data {
return Data{
URL: url,
Err: err,
}
}
func (data *Stream) calculateTotalSize() {
var size int64
for _, urlData := range data.URLs {
size += urlData.Size
}
data.Size = size
}
func (data Stream) printStream() {
blue := color.New(color.FgBlue)
cyan := color.New(color.FgCyan)
blue.Println(fmt.Sprintf(" [%s] -------------------", data.name))
if data.Quality != "" {
cyan.Printf(" Quality: ")
fmt.Println(data.Quality)
}
cyan.Printf(" Size: ")
if data.Size == 0 {
data.calculateTotalSize()
}
fmt.Printf("%.2f MiB (%d Bytes)\n", float64(data.Size)/(1024*1024), data.Size)
cyan.Printf(" # download with: ")
fmt.Printf("annie -f %s ...\n\n", data.name)
}
func (v *Data) genSortedStreams() {
for k, data := range v.Streams {
if data.Size == 0 {
data.calculateTotalSize()
}
data.name = k
v.Streams[k] = data
v.sortedStreams = append(v.sortedStreams, data)
}
if len(v.Streams) > 1 {
sort.Slice(
v.sortedStreams, func(i, j int) bool { return v.sortedStreams[i].Size > v.sortedStreams[j].Size },
)
}
}
func (v *Data) printInfo(stream string) {
cyan := color.New(color.FgCyan)
fmt.Println()
cyan.Printf(" Site: ")
fmt.Println(v.Site)
cyan.Printf(" Title: ")
fmt.Println(v.Title)
cyan.Printf(" Type: ")
fmt.Println(v.Type)
if config.InfoOnly {
cyan.Printf(" Streams: ")
fmt.Println("# All available quality")
for _, data := range v.sortedStreams {
data.printStream()
}
} else {
cyan.Printf(" Stream: ")
fmt.Println()
v.Streams[stream].printStream()
}
}
// Aria2RPCData json RPC 2.0 for Aria2
type Aria2RPCData struct {
// More info about RPC interface please refer to
// https://aria2.github.io/manual/en/html/aria2c.html#rpc-interface
JSONRPC string `json:"jsonrpc"`
ID string `json:"id"`
// For a simple download, only inplemented `addUri`
Method string `json:"method"`
// secret, uris, options
Params [3]interface{} `json:"params"`
}
// Aria2Input options for `aria2.addUri`
// https://aria2.github.io/manual/en/html/aria2c.html#id3
type Aria2Input struct {
// The file name of the downloaded file
Out string `json:"out"`
// For a simple download, only add headers
Header []string `json:"header"`
}

84
vendor/github.com/iawia002/annie/extractors/bcy/bcy.go generated vendored Normal file
View File

@ -0,0 +1,84 @@
package bcy
import (
"encoding/json"
"fmt"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/parser"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type bcyData struct {
Detail struct {
PostData struct {
Multi []struct {
OriginalPath string `json:"original_path"`
} `json:"multi"`
} `json:"post_data"`
} `json:"detail"`
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
// parse json data
rep := strings.NewReplacer(`\"`, `"`, `\\`, `\`)
realURLs := utils.MatchOneOf(html, `JSON.parse\("(.+?)"\);`)
if realURLs == nil || len(realURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
jsonString := rep.Replace(realURLs[1])
var data bcyData
if err = json.Unmarshal([]byte(jsonString), &data); err != nil {
return nil, fmt.Errorf("json unmarshal failed, err: %v", err)
}
doc, err := parser.GetDoc(html)
if err != nil {
return nil, err
}
title := strings.Replace(parser.Title(doc), " - 半次元 banciyuan - ACG爱好者社区", "", -1)
urls := make([]downloader.URL, 0, len(data.Detail.PostData.Multi))
var totalSize int64
for _, img := range data.Detail.PostData.Multi {
size, err := request.Size(img.OriginalPath, url)
if err != nil {
return nil, err
}
totalSize += size
_, ext, err := utils.GetNameAndExt(img.OriginalPath)
if err != nil {
return nil, err
}
urls = append(urls, downloader.URL{
URL: img.OriginalPath,
Size: size,
Ext: ext,
})
}
streams := map[string]downloader.Stream{
"default": {
URLs: urls,
Size: totalSize,
},
}
return []downloader.Data{
{
Site: "半次元 bcy.net",
Title: title,
Type: "image",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,349 @@
package bilibili
import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"github.com/iawia002/annie/config"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/parser"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
const (
bilibiliAPI = "https://interface.bilibili.com/v2/playurl?"
bilibiliBangumiAPI = "https://bangumi.bilibili.com/player/web_api/v2/playurl?"
bilibiliTokenAPI = "https://api.bilibili.com/x/player/playurl/token?"
)
const (
// BiliBili blocks keys from time to time.
// You can extract from the Android client or bilibiliPlayer.min.js
appKey = "iVGUTjsxvpLeuDCf"
secKey = "aHRmhWMLkdeMuILqORnYZocwMBpMEOdt"
)
const referer = "https://www.bilibili.com"
var utoken string
func genAPI(aid, cid int, bangumi bool, quality string, seasonType string) (string, error) {
var (
err error
baseAPIURL string
params string
)
if config.Cookie != "" && utoken == "" {
utoken, err = request.Get(
fmt.Sprintf("%said=%d&cid=%d", bilibiliTokenAPI, aid, cid),
referer,
nil,
)
if err != nil {
return "", err
}
var t token
err = json.Unmarshal([]byte(utoken), &t)
if err != nil {
return "", err
}
if t.Code != 0 {
return "", fmt.Errorf("cookie error: %s", t.Message)
}
utoken = t.Data.Token
}
if bangumi {
// The parameters need to be sorted by name
// qn=0 flag makes the CDN address different every time
// quality=116(1080P 60) is the highest quality so far
params = fmt.Sprintf(
"appkey=%s&cid=%d&module=bangumi&otype=json&qn=%s&quality=%s&season_type=%s&type=",
appKey, cid, quality, quality, seasonType,
)
baseAPIURL = bilibiliBangumiAPI
} else {
params = fmt.Sprintf(
"appkey=%s&cid=%d&otype=json&qn=%s&quality=%s&type=",
appKey, cid, quality, quality,
)
baseAPIURL = bilibiliAPI
}
// bangumi utoken also need to put in params to sign, but the ordinary video doesn't need
api := fmt.Sprintf(
"%s%s&sign=%s", baseAPIURL, params, utils.Md5(params+secKey),
)
if !bangumi && utoken != "" {
api = fmt.Sprintf("%s&utoken=%s", api, utoken)
}
return api, nil
}
func genURL(durl []dURLData) ([]downloader.URL, int64) {
var size int64
urls := make([]downloader.URL, len(durl))
for index, data := range durl {
size += data.Size
urls[index] = downloader.URL{
URL: data.URL,
Size: data.Size,
Ext: "flv",
}
}
return urls, size
}
type bilibiliOptions struct {
url string
html string
bangumi bool
aid int
cid int
page int
subtitle string
}
func extractBangumi(url, html string) ([]downloader.Data, error) {
dataString := utils.MatchOneOf(html, `window.__INITIAL_STATE__=(.+?);\(function`)[1]
var data bangumiData
err := json.Unmarshal([]byte(dataString), &data)
if err != nil {
return nil, err
}
if !config.Playlist {
options := bilibiliOptions{
url: url,
html: html,
bangumi: true,
aid: data.EpInfo.Aid,
cid: data.EpInfo.Cid,
}
return []downloader.Data{bilibiliDownload(options)}, nil
}
// handle bangumi playlist
needDownloadItems := utils.NeedDownloadList(len(data.EpList))
extractedData := make([]downloader.Data, len(needDownloadItems))
wgp := utils.NewWaitGroupPool(config.ThreadNumber)
dataIndex := 0
for index, u := range data.EpList {
if !utils.ItemInSlice(index+1, needDownloadItems) {
continue
}
wgp.Add()
id := u.EpID
if id == 0 {
id = u.ID
}
// html content can't be reused here
options := bilibiliOptions{
url: fmt.Sprintf("https://www.bilibili.com/bangumi/play/ep%d", id),
bangumi: true,
aid: u.Aid,
cid: u.Cid,
}
go func(index int, options bilibiliOptions, extractedData []downloader.Data) {
defer wgp.Done()
extractedData[index] = bilibiliDownload(options)
}(dataIndex, options, extractedData)
dataIndex++
}
wgp.Wait()
return extractedData, nil
}
func getMultiPageData(html string) (*multiPage, error) {
var data multiPage
multiPageDataString := utils.MatchOneOf(
html, `window.__INITIAL_STATE__=(.+?);\(function`,
)
if multiPageDataString == nil {
return &data, errors.New("this page has no playlist")
}
err := json.Unmarshal([]byte(multiPageDataString[1]), &data)
if err != nil {
return nil, err
}
return &data, nil
}
func extractNormalVideo(url, html string) ([]downloader.Data, error) {
pageData, err := getMultiPageData(html)
if err != nil {
return nil, err
}
if !config.Playlist {
// handle URL that has a playlist, mainly for unified titles
// <h1> tag does not include subtitles
// bangumi doesn't need this
pageString := utils.MatchOneOf(url, `\?p=(\d+)`)
var p int
if pageString == nil {
// https://www.bilibili.com/video/av20827366/
p = 1
} else {
// https://www.bilibili.com/video/av20827366/?p=2
p, _ = strconv.Atoi(pageString[1])
}
if len(pageData.VideoData.Pages) < p || p < 1 {
return nil, extractors.ErrURLParseFailed
}
page := pageData.VideoData.Pages[p-1]
options := bilibiliOptions{
url: url,
html: html,
aid: pageData.Aid,
cid: page.Cid,
page: p,
}
// "part":"" or "part":"Untitled"
if page.Part == "Untitled" || len(pageData.VideoData.Pages) == 1 {
options.subtitle = ""
} else {
options.subtitle = page.Part
}
return []downloader.Data{bilibiliDownload(options)}, nil
}
// handle normal video playlist
// https://www.bilibili.com/video/av20827366/?p=1
needDownloadItems := utils.NeedDownloadList(len(pageData.VideoData.Pages))
extractedData := make([]downloader.Data, len(needDownloadItems))
wgp := utils.NewWaitGroupPool(config.ThreadNumber)
dataIndex := 0
for index, u := range pageData.VideoData.Pages {
if !utils.ItemInSlice(index+1, needDownloadItems) {
continue
}
wgp.Add()
options := bilibiliOptions{
url: url,
html: html,
aid: pageData.Aid,
cid: u.Cid,
subtitle: u.Part,
page: u.Page,
}
go func(index int, options bilibiliOptions, extractedData []downloader.Data) {
defer wgp.Done()
extractedData[index] = bilibiliDownload(options)
}(dataIndex, options, extractedData)
dataIndex++
}
wgp.Wait()
return extractedData, nil
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
var err error
html, err := request.Get(url, referer, nil)
if err != nil {
return nil, err
}
if strings.Contains(url, "bangumi") {
// handle bangumi
return extractBangumi(url, html)
}
// handle normal video
return extractNormalVideo(url, html)
}
// bilibiliDownload is the download function for a single URL
func bilibiliDownload(options bilibiliOptions) downloader.Data {
var (
err error
html string
seasonType string
)
if options.html != "" {
// reuse html string, but this can't be reused in case of playlist
html = options.html
} else {
html, err = request.Get(options.url, referer, nil)
if err != nil {
return downloader.EmptyData(options.url, err)
}
}
if options.bangumi {
seasonType = utils.MatchOneOf(html, `"season_type":(\d+)`, `"ssType":(\d+)`)[1]
}
// Get "accept_quality" and "accept_description"
// "accept_description":["高清 1080P","高清 720P","清晰 480P","流畅 360P"],
// "accept_quality":[80,48,32,16],
api, err := genAPI(options.aid, options.cid, options.bangumi, "15", seasonType)
if err != nil {
return downloader.EmptyData(options.url, err)
}
jsonString, err := request.Get(api, referer, nil)
if err != nil {
return downloader.EmptyData(options.url, err)
}
var quality qualityInfo
err = json.Unmarshal([]byte(jsonString), &quality)
if err != nil {
return downloader.EmptyData(options.url, err)
}
streams := make(map[string]downloader.Stream, len(quality.Quality))
for _, q := range quality.Quality {
apiURL, err := genAPI(options.aid, options.cid, options.bangumi, strconv.Itoa(q), seasonType)
if err != nil {
return downloader.EmptyData(options.url, err)
}
jsonString, err := request.Get(apiURL, referer, nil)
if err != nil {
return downloader.EmptyData(options.url, err)
}
var data bilibiliData
err = json.Unmarshal([]byte(jsonString), &data)
if err != nil {
return downloader.EmptyData(options.url, err)
}
// Avoid duplicate streams
if _, ok := streams[strconv.Itoa(data.Quality)]; ok {
continue
}
urls, size := genURL(data.DURL)
streams[strconv.Itoa(data.Quality)] = downloader.Stream{
URLs: urls,
Size: size,
Quality: qualityString[data.Quality],
}
}
// get the title
doc, err := parser.GetDoc(html)
if err != nil {
return downloader.EmptyData(options.url, err)
}
title := parser.Title(doc)
if options.subtitle != "" {
title = fmt.Sprintf("%s P%d %s", title, options.page, options.subtitle)
}
err = downloader.Caption(
fmt.Sprintf("https://comment.bilibili.com/%d.xml", options.cid),
options.url, title, "xml",
)
if err != nil {
return downloader.EmptyData(options.url, err)
}
return downloader.Data{
Site: "哔哩哔哩 bilibili.com",
Title: title,
Type: "video",
Streams: streams,
URL: options.url,
}
}

View File

@ -0,0 +1,70 @@
package bilibili
type qualityInfo struct {
Description []string `json:"accept_description"`
Quality []int `json:"accept_quality"`
}
type dURLData struct {
Size int64 `json:"size"`
URL string `json:"url"`
Order int `json:"order"`
}
type bilibiliData struct {
DURL []dURLData `json:"durl"`
Format string `json:"format"`
Quality int `json:"quality"`
}
// {"code":0,"message":"0","ttl":1,"data":{"token":"aaa"}}
// {"code":-101,"message":"账号未登录","ttl":1}
type tokenData struct {
Token string `json:"token"`
}
type token struct {
Code int `json:"code"`
Message string `json:"message"`
Data tokenData `json:"data"`
}
type bangumiEpData struct {
Aid int `json:"aid"`
Cid int `json:"cid"`
ID int `json:"id"`
EpID int `json:"ep_id"`
}
type bangumiData struct {
EpInfo bangumiEpData `json:"epInfo"`
EpList []bangumiEpData `json:"epList"`
}
type videoPagesData struct {
Cid int `json:"cid"`
Part string `json:"part"`
Page int `json:"page"`
}
type multiPageVideoData struct {
Title string `json:"title"`
Pages []videoPagesData `json:"pages"`
}
type multiPage struct {
Aid int `json:"aid"`
VideoData multiPageVideoData `json:"videoData"`
}
var qualityString = map[int]string{
116: "高清 1080P60",
74: "高清 720P60",
112: "高清 1080P+",
80: "高清 1080P",
64: "高清 720P",
48: "高清 720P",
32: "清晰 480P",
16: "流畅 360P",
15: "流畅 360P",
}

8
vendor/github.com/iawia002/annie/extractors/defs.go generated vendored Normal file
View File

@ -0,0 +1,8 @@
package extractors
import (
"errors"
)
var ErrURLParseFailed = errors.New("url parse failed")
var ErrLoginRequired = errors.New("login required")

View File

@ -0,0 +1,54 @@
package douyin
import (
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
var err error
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
var title string
desc := utils.MatchOneOf(html, `<p class="desc">(.+?)</p>`)
if desc != nil {
title = desc[1]
} else {
title = "抖音短视频"
}
realURLs := utils.MatchOneOf(html, `playAddr: "(.+?)"`)
if realURLs == nil || len(realURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
realURL := realURLs[1]
size, err := request.Size(realURL, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: realURL,
Size: size,
Ext: "mp4",
}
streams := map[string]downloader.Stream{
"default": {
URLs: []downloader.URL{urlData},
Size: size,
},
}
return []downloader.Data{
{
Site: "抖音 douyin.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,110 @@
package douyu
import (
"encoding/json"
"errors"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type douyuData struct {
Error int `json:"error"`
Data struct {
VideoURL string `json:"video_url"`
} `json:"data"`
}
type douyuURLInfo struct {
URL string
Size int64
}
func douyuM3u8(url string) ([]douyuURLInfo, int64, error) {
var (
data []douyuURLInfo
temp douyuURLInfo
size, totalSize int64
err error
)
urls, err := utils.M3u8URLs(url)
if err != nil {
return nil, 0, err
}
for _, u := range urls {
size, err = request.Size(u, url)
if err != nil {
return nil, 0, err
}
totalSize += size
temp = douyuURLInfo{
URL: u,
Size: size,
}
data = append(data, temp)
}
return data, totalSize, nil
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
var err error
liveVid := utils.MatchOneOf(url, `https?://www.douyu.com/(\S+)`)
if liveVid != nil {
return nil, errors.New("暂不支持斗鱼直播")
}
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
titles := utils.MatchOneOf(html, `<title>(.*?)</title>`)
if titles == nil || len(titles) < 2 {
return nil, extractors.ErrURLParseFailed
}
title := titles[1]
vids := utils.MatchOneOf(url, `https?://v.douyu.com/show/(\S+)`)
if vids == nil || len(vids) < 2 {
return nil, extractors.ErrURLParseFailed
}
vid := vids[1]
dataString, err := request.Get("http://vmobile.douyu.com/video/getInfo?vid="+vid, url, nil)
if err != nil {
return nil, err
}
var dataDict douyuData
json.Unmarshal([]byte(dataString), &dataDict)
m3u8URLs, totalSize, err := douyuM3u8(dataDict.Data.VideoURL)
if err != nil {
return nil, err
}
urls := make([]downloader.URL, len(m3u8URLs))
for index, u := range m3u8URLs {
urls[index] = downloader.URL{
URL: u.URL,
Size: u.Size,
Ext: "ts",
}
}
streams := map[string]downloader.Stream{
"default": {
URLs: urls,
Size: totalSize,
},
}
return []downloader.Data{
{
Site: "斗鱼 douyu.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,60 @@
package facebook
import (
"fmt"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
var err error
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
titles := utils.MatchOneOf(html, `<title id="pageTitle">(.+)</title>`)
if titles == nil || len(titles) < 2 {
return nil, extractors.ErrURLParseFailed
}
title := titles[1]
streams := map[string]downloader.Stream{}
for _, quality := range []string{"sd", "hd"} {
srcElement := utils.MatchOneOf(
html, fmt.Sprintf(`%s_src_no_ratelimit:"(.+?)"`, quality),
)
if srcElement == nil || len(srcElement) < 2 {
continue
}
u := srcElement[1]
size, err := request.Size(u, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: u,
Size: size,
Ext: "mp4",
}
streams[quality] = downloader.Stream{
URLs: []downloader.URL{urlData},
Size: size,
Quality: quality,
}
}
return []downloader.Data{
{
Site: "Facebook facebook.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,114 @@
package geekbang
import (
"encoding/json"
"errors"
"net/http"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type geekData struct {
Code int `json:"code"`
Error json.RawMessage `json:"error"`
Data struct {
Title string `json:"article_sharetitle"`
VideoMediaMap map[string]struct {
URL string `json:"url"`
Size int64 `json:"size"`
} `json:"video_media_map"`
} `json:"data"`
}
type geekURLInfo struct {
URL string
Size int64
}
func geekM3u8(url string) ([]geekURLInfo, error) {
var (
data []geekURLInfo
temp geekURLInfo
size int64
err error
)
urls, err := utils.M3u8URLs(url)
if err != nil {
return nil, err
}
for _, u := range urls {
temp = geekURLInfo{
URL: u,
Size: size,
}
data = append(data, temp)
}
return data, nil
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
var err error
matches := utils.MatchOneOf(url, `https?://time.geekbang.org/course/detail/(\d+)-(\d+)`)
if matches == nil || len(matches) < 3 {
return nil, extractors.ErrURLParseFailed
}
heanders := map[string]string{"Origin": "https://time.geekbang.org", "Content-Type": "application/json", "Referer": url}
params := strings.NewReader("{\"id\":" + string(matches[2]+"}"))
res, err := request.Request(http.MethodPost, "https://time.geekbang.org/serv/v1/article", params, heanders)
if err != nil {
return nil, err
}
defer res.Body.Close()
var data geekData
if err = json.NewDecoder(res.Body).Decode(&data); err != nil {
return nil, err
}
if data.Code < 0 {
return nil, errors.New(string(data.Error))
}
title := data.Data.Title
streams := make(map[string]downloader.Stream, len(data.Data.VideoMediaMap))
for key, media := range data.Data.VideoMediaMap {
m3u8URLs, err := geekM3u8(media.URL)
if err != nil {
return nil, err
}
urls := make([]downloader.URL, len(m3u8URLs))
for index, u := range m3u8URLs {
urls[index] = downloader.URL{
URL: u.URL,
Size: u.Size,
Ext: "ts",
}
}
streams[key] = downloader.Stream{
URLs: urls,
Size: media.Size,
Quality: key,
}
}
return []downloader.Data{
{
Site: "极客时间 geekbang.org",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,133 @@
package instagram
import (
"encoding/json"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/parser"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type instagram struct {
EntryData struct {
PostPage []struct {
Graphql struct {
ShortcodeMedia struct {
DisplayURL string `json:"display_url"`
VideoURL string `json:"video_url"`
EdgeSidecar struct {
Edges []struct {
Node struct {
DisplayURL string `json:"display_url"`
} `json:"node"`
} `json:"edges"`
} `json:"edge_sidecar_to_children"`
} `json:"shortcode_media"`
} `json:"graphql"`
} `json:"PostPage"`
} `json:"entry_data"`
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
// get the title
doc, err := parser.GetDoc(html)
if err != nil {
return nil, err
}
title := parser.Title(doc)
dataStrings := utils.MatchOneOf(html, `window\._sharedData\s*=\s*(.*);`)
if dataStrings == nil || len(dataStrings) < 2 {
return nil, extractors.ErrURLParseFailed
}
dataString := dataStrings[1]
var data instagram
if err = json.Unmarshal([]byte(dataString), &data); err != nil {
return nil, extractors.ErrURLParseFailed
}
var realURL, dataType string
var size int64
streams := map[string]downloader.Stream{}
if data.EntryData.PostPage[0].Graphql.ShortcodeMedia.VideoURL != "" {
// Data
dataType = "video"
realURL = data.EntryData.PostPage[0].Graphql.ShortcodeMedia.VideoURL
size, err = request.Size(realURL, url)
if err != nil {
return nil, err
}
streams["default"] = downloader.Stream{
URLs: []downloader.URL{
{
URL: realURL,
Size: size,
Ext: "mp4",
},
},
Size: size,
}
} else {
// Image
dataType = "image"
if data.EntryData.PostPage[0].Graphql.ShortcodeMedia.EdgeSidecar.Edges == nil {
// Single
realURL = data.EntryData.PostPage[0].Graphql.ShortcodeMedia.DisplayURL
size, err = request.Size(realURL, url)
if err != nil {
return nil, err
}
streams["default"] = downloader.Stream{
URLs: []downloader.URL{
{
URL: realURL,
Size: size,
Ext: "jpg",
},
},
Size: size,
}
} else {
// Album
var totalSize int64
var urls []downloader.URL
for _, u := range data.EntryData.PostPage[0].Graphql.ShortcodeMedia.EdgeSidecar.Edges {
realURL = u.Node.DisplayURL
size, err = request.Size(realURL, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: realURL,
Size: size,
Ext: "jpg",
}
urls = append(urls, urlData)
totalSize += size
}
streams["default"] = downloader.Stream{
URLs: urls,
Size: totalSize,
}
}
}
return []downloader.Data{
{
Site: "Instagram instagram.com",
Title: title,
Type: dataType,
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,197 @@
package iqiyi
import (
"encoding/json"
"fmt"
"math/rand"
"strconv"
"strings"
"time"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/parser"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type iqiyi struct {
Code string `json:"code"`
Data struct {
VP struct {
Du string `json:"du"`
Tkl []struct {
Vs []struct {
Bid int `json:"bid"`
Scrsz string `json:"scrsz"`
Vsize int64 `json:"vsize"`
Fs []struct {
L string `json:"l"`
B int64 `json:"b"`
} `json:"fs"`
} `json:"vs"`
} `json:"tkl"`
} `json:"vp"`
} `json:"data"`
Msg string `json:"msg"`
}
type iqiyiURL struct {
L string `json:"l"`
}
const iqiyiReferer = "https://www.iqiyi.com"
func getMacID() string {
var macID string
chars := []string{
"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "n", "m", "o", "p", "q", "r", "s", "t", "u", "v",
"w", "x", "y", "z", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
}
size := len(chars)
for i := 0; i < 32; i++ {
macID += chars[rand.Intn(size)]
}
return macID
}
func getVF(params string) string {
var suffix string
for j := 0; j < 8; j++ {
for k := 0; k < 4; k++ {
var v8 int
v4 := 13 * (66*k + 27*j) % 35
if v4 >= 10 {
v8 = v4 + 88
} else {
v8 = v4 + 49
}
suffix += string(v8) // string(97) -> "a"
}
}
params += suffix
return utils.Md5(params)
}
func getVPS(tvid, vid string) (iqiyi, error) {
t := time.Now().Unix() * 1000
host := "http://cache.video.qiyi.com"
params := fmt.Sprintf(
"/vps?tvid=%s&vid=%s&v=0&qypid=%s_12&src=01012001010000000000&t=%d&k_tag=1&k_uid=%s&rs=1",
tvid, vid, tvid, t, getMacID(),
)
vf := getVF(params)
apiURL := fmt.Sprintf("%s%s&vf=%s", host, params, vf)
info, err := request.Get(apiURL, iqiyiReferer, nil)
if err != nil {
return iqiyi{}, err
}
var data iqiyi
json.Unmarshal([]byte(info), &data)
return data, nil
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, iqiyiReferer, nil)
if err != nil {
return nil, err
}
tvid := utils.MatchOneOf(
url,
`#curid=(.+)_`,
`tvid=([^&]+)`,
)
if tvid == nil {
tvid = utils.MatchOneOf(
html,
`data-player-tvid="([^"]+)"`,
`param\['tvid'\]\s*=\s*"(.+?)"`,
`"tvid":"(\d+)"`,
)
}
if tvid == nil || len(tvid) < 2 {
return nil, extractors.ErrURLParseFailed
}
vid := utils.MatchOneOf(
url,
`#curid=.+_(.*)$`,
`vid=([^&]+)`,
)
if vid == nil {
vid = utils.MatchOneOf(
html,
`data-player-videoid="([^"]+)"`,
`param\['vid'\]\s*=\s*"(.+?)"`,
`"vid":"(\w+)"`,
)
}
if vid == nil || len(vid) < 2 {
return nil, extractors.ErrURLParseFailed
}
doc, err := parser.GetDoc(html)
if err != nil {
return nil, err
}
title := strings.TrimSpace(doc.Find("h1>a").First().Text())
var sub string
for _, k := range []string{"span", "em"} {
if sub != "" {
break
}
sub = strings.TrimSpace(doc.Find("h1>" + k).First().Text())
}
title += sub
if title == "" {
title = doc.Find("title").Text()
}
videoDatas, err := getVPS(tvid[1], vid[1])
if err != nil {
return nil, err
}
if videoDatas.Code != "A00000" {
return nil, fmt.Errorf("can't play this video: %s", videoDatas.Msg)
}
streams := map[string]downloader.Stream{}
urlPrefix := videoDatas.Data.VP.Du
for _, video := range videoDatas.Data.VP.Tkl[0].Vs {
urls := make([]downloader.URL, len(video.Fs))
for index, v := range video.Fs {
realURLData, err := request.Get(urlPrefix+v.L, iqiyiReferer, nil)
if err != nil {
return nil, err
}
var realURL iqiyiURL
if err = json.Unmarshal([]byte(realURLData), &realURL); err != nil {
return nil, err
}
_, ext, err := utils.GetNameAndExt(realURL.L)
if err != nil {
return nil, err
}
urls[index] = downloader.URL{
URL: realURL.L,
Size: v.B,
Ext: ext,
}
}
streams[strconv.Itoa(video.Bid)] = downloader.Stream{
URLs: urls,
Size: video.Vsize,
Quality: video.Scrsz,
}
}
return []downloader.Data{
{
Site: "爱奇艺 iqiyi.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,206 @@
package mgtv
import (
"encoding/base64"
"encoding/json"
"fmt"
"regexp"
"strconv"
"strings"
"time"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type mgtvVideoStream struct {
Name string `json:"name"`
URL string `json:"url"`
Def string `json:"def"`
}
type mgtvVideoInfo struct {
Title string `json:"title"`
Desc string `json:"desc"`
}
type mgtvVideoData struct {
Stream []mgtvVideoStream `json:"stream"`
StreamDomain []string `json:"stream_domain"`
Info mgtvVideoInfo `json:"info"`
}
type mgtv struct {
Data mgtvVideoData `json:"data"`
}
type mgtvVideoAddr struct {
Info string `json:"info"`
}
type mgtvURLInfo struct {
URL string
Size int64
}
type mgtvPm2Data struct {
Data struct {
Atc struct {
Pm2 string `json:"pm2"`
} `json:"atc"`
Info mgtvVideoInfo `json:"info"`
} `json:"data"`
}
func mgtvM3u8(url string) ([]mgtvURLInfo, int64, error) {
var data []mgtvURLInfo
var temp mgtvURLInfo
var size, totalSize int64
urls, err := utils.M3u8URLs(url)
if err != nil {
return nil, 0, err
}
m3u8String, err := request.Get(url, url, nil)
if err != nil {
return nil, 0, err
}
sizes := utils.MatchAll(m3u8String, `#EXT-MGTV-File-SIZE:(\d+)`)
// sizes: [[#EXT-MGTV-File-SIZE:1893724, 1893724]]
for index, u := range urls {
size, err = strconv.ParseInt(sizes[index][1], 10, 64)
if err != nil {
return nil, 0, err
}
totalSize += size
temp = mgtvURLInfo{
URL: u,
Size: size,
}
data = append(data, temp)
}
return data, totalSize, nil
}
func encodeTk2(str string) string {
encodeString := base64.StdEncoding.EncodeToString([]byte(str))
r1 := regexp.MustCompile(`/\+/g`)
r2 := regexp.MustCompile(`/\//g`)
r3 := regexp.MustCompile(`/=/g`)
r1.ReplaceAllString(encodeString, "_")
r2.ReplaceAllString(encodeString, "~")
r3.ReplaceAllString(encodeString, "-")
encodeString = utils.Reverse(encodeString)
return encodeString
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
vid := utils.MatchOneOf(
url,
`https?://www.mgtv.com/(?:b|l)/\d+/(\d+).html`,
`https?://www.mgtv.com/hz/bdpz/\d+/(\d+).html`,
)
if vid == nil {
vid = utils.MatchOneOf(html, `vid: (\d+),`)
}
if vid == nil || len(vid) < 2 {
return nil, extractors.ErrURLParseFailed
}
// API extract from https://js.mgtv.com/imgotv-miniv6/global/page/play-tv.js
// getSource and getPlayInfo function
// Chrome Network JS panel
headers := map[string]string{
"Cookie": "PM_CHKID=1",
}
clit := fmt.Sprintf("clit=%d", time.Now().Unix()/1000)
pm2DataString, err := request.Get(
fmt.Sprintf(
"https://pcweb.api.mgtv.com/player/video?video_id=%s&tk2=%s",
vid[1],
encodeTk2(fmt.Sprintf(
"did=f11dee65-4e0d-4d25-bfce-719ad9dc991d|pno=1030|ver=5.5.1|%s", clit,
)),
),
url,
headers,
)
if err != nil {
return nil, err
}
var pm2 mgtvPm2Data
if err = json.Unmarshal([]byte(pm2DataString), &pm2); err != nil {
return nil, err
}
dataString, err := request.Get(
fmt.Sprintf(
"https://pcweb.api.mgtv.com/player/getSource?video_id=%s&tk2=%s&pm2=%s",
vid[1], encodeTk2(clit), pm2.Data.Atc.Pm2,
),
url,
headers,
)
if err != nil {
return nil, err
}
var mgtvData mgtv
if err = json.Unmarshal([]byte(dataString), &mgtvData); err != nil {
return nil, err
}
title := strings.TrimSpace(
pm2.Data.Info.Title + " " + pm2.Data.Info.Desc,
)
mgtvStreams := mgtvData.Data.Stream
var addr mgtvVideoAddr
streams := map[string]downloader.Stream{}
for _, stream := range mgtvStreams {
if stream.URL == "" {
continue
}
// real download address
addr = mgtvVideoAddr{}
addrInfo, err := request.GetByte(mgtvData.Data.StreamDomain[0]+stream.URL, url, headers)
if err != nil {
return nil, err
}
if err = json.Unmarshal(addrInfo, &addr); err != nil {
return nil, err
}
m3u8URLs, totalSize, err := mgtvM3u8(addr.Info)
if err != nil {
return nil, err
}
urls := make([]downloader.URL, len(m3u8URLs))
for index, u := range m3u8URLs {
urls[index] = downloader.URL{
URL: u.URL,
Size: u.Size,
Ext: "ts",
}
}
streams[stream.Def] = downloader.Stream{
URLs: urls,
Size: totalSize,
Quality: stream.Name,
}
}
return []downloader.Data{
{
Site: "芒果TV mgtv.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,87 @@
package miaopai
import (
"encoding/json"
"fmt"
"math/rand"
"strings"
"time"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type miaopaiData struct {
Data struct {
Description string `json:"description"`
MetaData []struct {
URLs struct {
M string `json:"m"`
} `json:"play_urls"`
} `json:"meta_data"`
} `json:"data"`
}
func getRandomString(l int) string {
rand.Seed(time.Now().UnixNano())
s := make([]string, 0)
chars := []string{
"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "n", "m", "o", "p", "q", "r", "s", "t", "u", "v",
"w", "x", "y", "z", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
}
for i := 0; i < l; i++ {
s = append(s, chars[rand.Intn(len(chars)-1)])
}
return strings.Join(s, ",")
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
ids := utils.MatchOneOf(url, `/media/([^\./]+)`, `/show(?:/channel)?/([^\./]+)`)
if ids == nil || len(ids) < 2 {
return nil, extractors.ErrURLParseFailed
}
id := ids[1]
var data miaopaiData
jsonString, err := request.Get(
fmt.Sprintf("https://n.miaopai.com/api/aj_media/info.json?smid=%s&appid=530&_cb=_jsonp%s", id, getRandomString(10)),
url, nil,
)
if err != nil {
return nil, err
}
err = json.Unmarshal([]byte(jsonString), &data)
if err != nil {
return nil, err
}
realURL := data.Data.MetaData[0].URLs.M
size, err := request.Size(realURL, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: realURL,
Size: size,
Ext: "mp4",
}
streams := map[string]downloader.Stream{
"default": {
URLs: []downloader.URL{urlData},
Size: size,
},
}
return []downloader.Data{
{
Site: "秒拍 miaopai.com",
Title: data.Data.Description,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,66 @@
package netease
import (
"errors"
netURL "net/url"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
url = strings.Replace(url, "/#/", "/", 1)
vid := utils.MatchOneOf(url, `/(mv|video)\?id=(\w+)`)
if vid == nil {
return nil, errors.New("invalid url for netease music")
}
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
if strings.Contains(html, "u-errlg-404") {
return nil, errors.New("404 music not found")
}
titles := utils.MatchOneOf(html, `<meta property="og:title" content="(.+?)" />`)
if titles == nil || len(titles) < 2 {
return nil, extractors.ErrURLParseFailed
}
title := titles[1]
realURLs := utils.MatchOneOf(html, `<meta property="og:video" content="(.+?)" />`)
if realURLs == nil || len(realURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
realURL, _ := netURL.QueryUnescape(realURLs[1])
size, err := request.Size(realURL, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: realURL,
Size: size,
Ext: "mp4",
}
streams := map[string]downloader.Stream{
"default": {
URLs: []downloader.URL{urlData},
Size: size,
},
}
return []downloader.Data{
{
Site: "网易云音乐 music.163.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,35 @@
package pixivision
import (
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/parser"
"github.com/iawia002/annie/request"
)
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
title, urls, err := parser.GetImages(url, html, "am__work__illust ", nil)
if err != nil {
return nil, err
}
streams := map[string]downloader.Stream{
"default": {
URLs: urls,
Size: 0,
},
}
return []downloader.Data{
{
Site: "pixivision pixivision.net",
Title: title,
Type: "image",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,91 @@
package pornhub
import (
"bytes"
"encoding/json"
"fmt"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type pornhubData struct {
Format string `json:"format"`
Quality json.RawMessage `json:"quality"`
VideoURL string `json:"videoUrl"`
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
var title string
desc := utils.MatchOneOf(html, `<span class="inlineFree">(.+?)</span>`)
if desc != nil && len(desc) > 1 {
title = desc[1]
} else {
title = "pornhub video"
}
realURLs := utils.MatchOneOf(html, `"mediaDefinitions":(.+?),"isVertical"`)
if realURLs == nil || len(realURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
var pornhubs []pornhubData
if err = json.Unmarshal([]byte(realURLs[1]), &pornhubs); err != nil {
return nil, err
}
streams := make(map[string]downloader.Stream, len(pornhubs))
for _, data := range pornhubs {
if data.Format == "hls" {
continue
}
if bytes.ContainsRune(data.Quality, '[') {
// skip the case where the quality value is an array
// "quality": [
// 720,
// 480,
// 240
// ]
continue
}
quality := string(data.Quality)
realURL := data.VideoURL
if len(realURL) == 0 {
continue
}
size, err := request.Size(realURL, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: realURL,
Size: size,
Ext: "mp4",
}
streams[quality] = downloader.Stream{
URLs: []downloader.URL{urlData},
Size: size,
Quality: fmt.Sprintf("%sP", quality),
}
}
return []downloader.Data{
{
Site: "Pornhub pornhub.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

206
vendor/github.com/iawia002/annie/extractors/qq/qq.go generated vendored Normal file
View File

@ -0,0 +1,206 @@
package qq
import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type qqVideoInfo struct {
Fl struct {
Fi []struct {
ID int `json:"id"`
Name string `json:"name"`
Cname string `json:"cname"`
Fs int `json:"fs"`
} `json:"fi"`
} `json:"fl"`
Vl struct {
Vi []struct {
Fn string `json:"fn"`
Ti string `json:"ti"`
Fvkey string `json:"fvkey"`
Cl struct {
Fc int `json:"fc"`
Ci []struct {
Idx int `json:"idx"`
} `json:"ci"`
} `json:"cl"`
Ul struct {
UI []struct {
URL string `json:"url"`
} `json:"ui"`
} `json:"ul"`
} `json:"vi"`
} `json:"vl"`
Msg string `json:"msg"`
}
type qqKeyInfo struct {
Key string `json:"key"`
}
const qqPlayerVersion string = "3.2.19.333"
func genStreams(vid, cdn string, data qqVideoInfo) (map[string]downloader.Stream, error) {
streams := map[string]downloader.Stream{}
var vkey string
// number of fragments
clips := data.Vl.Vi[0].Cl.Fc
if clips == 0 {
clips = 1
}
for _, fi := range data.Fl.Fi {
var fmtIDPrefix string
fns := strings.Split(data.Vl.Vi[0].Fn, ".")
if fi.ID > 100000 {
fmtIDPrefix = "m"
} else if fi.ID > 10000 {
fmtIDPrefix = "p"
}
if fmtIDPrefix != "" {
fmtIDName := fmt.Sprintf("%s%d", fmtIDPrefix, fi.ID%10000)
if len(fns) < 3 {
// v0739eolv38.mp4 -> v0739eolv38.m701.mp4
fns = append(fns[:1], append([]string{fmtIDName}, fns[1:]...)...)
} else {
// n0687peq62x.p709.mp4 -> n0687peq62x.m709.mp4
fns[1] = fmtIDName
}
} else if len(fns) >= 3 {
// delete ID part
// e0765r4mwcr.2.mp4 -> e0765r4mwcr.mp4
fns = append(fns[:1], fns[2:]...)
}
var urls []downloader.URL
var totalSize int64
var filename string
for part := 1; part < clips+1; part++ {
// Multiple fragments per streams
if fmtIDPrefix == "p" {
if len(fns) < 4 {
// If the number of fragments > 0, the filename needs to add the number of fragments
// n0687peq62x.p709.mp4 -> n0687peq62x.p709.1.mp4
fns = append(fns[:2], append([]string{strconv.Itoa(part)}, fns[2:]...)...)
} else {
fns[2] = strconv.Itoa(part)
}
}
filename = strings.Join(fns, ".")
html, err := request.Get(
fmt.Sprintf(
"http://vv.video.qq.com/getkey?otype=json&platform=11&appver=%s&filename=%s&format=%d&vid=%s",
qqPlayerVersion, filename, fi.ID, vid,
), cdn, nil,
)
if err != nil {
return nil, err
}
jsonStrings := utils.MatchOneOf(html, `QZOutputJson=(.+);$`)
if jsonStrings == nil || len(jsonStrings) < 2 {
return nil, extractors.ErrURLParseFailed
}
jsonString := jsonStrings[1]
var keyData qqKeyInfo
if err = json.Unmarshal([]byte(jsonString), &keyData); err != nil {
return nil, err
}
vkey = keyData.Key
if vkey == "" {
vkey = data.Vl.Vi[0].Fvkey
}
realURL := fmt.Sprintf("%s%s?vkey=%s", cdn, filename, vkey)
size, err := request.Size(realURL, cdn)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: realURL,
Size: size,
Ext: "mp4",
}
urls = append(urls, urlData)
totalSize += size
}
streams[fi.Name] = downloader.Stream{
URLs: urls,
Size: totalSize,
Quality: fi.Cname,
}
}
return streams, nil
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
vids := utils.MatchOneOf(url, `vid=(\w+)`, `/(\w+)\.html`)
if vids == nil || len(vids) < 2 {
return nil, extractors.ErrURLParseFailed
}
vid := vids[1]
if len(vid) != 11 {
u, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
vids = utils.MatchOneOf(
u, `vid=(\w+)`, `vid:\s*["'](\w+)`, `vid\s*=\s*["']\s*(\w+)`,
)
if vids == nil || len(vids) < 2 {
return nil, extractors.ErrURLParseFailed
}
vid = vids[1]
}
html, err := request.Get(
fmt.Sprintf(
"http://vv.video.qq.com/getinfo?otype=json&platform=11&defnpayver=1&appver=%s&defn=shd&vid=%s",
qqPlayerVersion, vid,
), url, nil,
)
if err != nil {
return nil, err
}
jsonStrings := utils.MatchOneOf(html, `QZOutputJson=(.+);$`)
if jsonStrings == nil || len(jsonStrings) < 2 {
return nil, extractors.ErrURLParseFailed
}
jsonString := jsonStrings[1]
var data qqVideoInfo
if err = json.Unmarshal([]byte(jsonString), &data); err != nil {
return nil, err
}
// API request error
if data.Msg != "" {
return nil, errors.New(data.Msg)
}
cdn := data.Vl.Vi[0].Ul.UI[len(data.Vl.Vi[0].Ul.UI)-1].URL
streams, err := genStreams(vid, cdn, data)
if err != nil {
return nil, err
}
return []downloader.Data{
{
Site: "腾讯视频 v.qq.com",
Title: data.Vl.Vi[0].Ti,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,114 @@
package tangdou
import (
"github.com/iawia002/annie/config"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
const referer = "http://www.tangdou.com/html/playlist/view/4173"
// Extract is the main function for extracting data
func Extract(uri string) ([]downloader.Data, error) {
if !config.Playlist {
return []downloader.Data{tangdouDownload(uri)}, nil
}
html, err := request.Get(uri, referer, nil)
if err != nil {
return nil, err
}
videoIDs := utils.MatchAll(html, `<a target="tdplayer" href="(.+?)" class="title">`)
needDownloadItems := utils.NeedDownloadList(len(videoIDs))
extractedData := make([]downloader.Data, len(needDownloadItems))
wgp := utils.NewWaitGroupPool(config.ThreadNumber)
dataIndex := 0
for index, videoID := range videoIDs {
if !utils.ItemInSlice(index+1, needDownloadItems) || len(videoID) < 2 {
continue
}
wgp.Add()
go func(index int, videURI string, extractedData []downloader.Data) {
defer wgp.Done()
extractedData[index] = tangdouDownload(videURI)
}(dataIndex, videoID[1], extractedData)
dataIndex++
}
wgp.Wait()
return extractedData, nil
}
// tangdouDownload download function for single url
func tangdouDownload(uri string) downloader.Data {
html, err := request.Get(uri, referer, nil)
if err != nil {
return downloader.EmptyData(uri, err)
}
titles := utils.MatchOneOf(
html, `<div class="title">(.+?)</div>`, `<meta name="description" content="(.+?)"`, `<title>(.+?)</title>`,
)
if titles == nil || len(titles) < 2 {
return downloader.EmptyData(uri, extractors.ErrURLParseFailed)
}
title := titles[1]
var realURL string
videoURLs := utils.MatchOneOf(
html, `video:'(.+?)'`, `video:"(.+?)"`, `<video.*src="(.+?)"`,
)
if videoURLs == nil {
shareURLs := utils.MatchOneOf(
html, `<div class="video">\s*<script src="(.+?)"`,
)
if shareURLs == nil || len(shareURLs) < 2 {
return downloader.EmptyData(uri, extractors.ErrURLParseFailed)
}
shareURL := shareURLs[1]
signedVideo, err := request.Get(shareURL, uri, nil)
if err != nil {
return downloader.EmptyData(uri, err)
}
realURLs := utils.MatchOneOf(
signedVideo, `src=\\"(.+?)\\"`,
)
if realURLs == nil || len(realURLs) < 2 {
return downloader.EmptyData(uri, extractors.ErrURLParseFailed)
}
realURL = realURLs[1]
} else {
if len(videoURLs) < 2 {
return downloader.EmptyData(uri, extractors.ErrURLParseFailed)
}
realURL = videoURLs[1]
}
size, err := request.Size(realURL, uri)
if err != nil {
return downloader.EmptyData(uri, err)
}
streams := map[string]downloader.Stream{
"default": {
URLs: []downloader.URL{
{
URL: realURL,
Size: size,
Ext: "mp4",
},
},
Size: size,
},
}
return downloader.Data{
Site: "糖豆广场舞 tangdou.com",
Title: title,
Type: "video",
Streams: streams,
URL: uri,
}
}

View File

@ -0,0 +1,61 @@
package tiktok
import (
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
// Extract is the main function for extracting data
func Extract(uri string) ([]downloader.Data, error) {
html, err := request.Get(uri, uri, nil)
if err != nil {
return nil, err
}
// There are a few json objects loaded into the html that are useful. We're able to parse the video url from the
// videoObject json.
videoScriptTag := utils.MatchOneOf(html, `<script type="application\/ld\+json" id="videoObject">(.*?)<\/script>`)
if videoScriptTag == nil || len(videoScriptTag) < 2 {
return nil, extractors.ErrURLParseFailed
}
videoJSON := videoScriptTag[1]
videoURL := utils.GetStringFromJson(videoJSON, "contentUrl")
// We can receive the title directly from this __NEXT_DATA__ object.
nextScriptTag := utils.MatchOneOf(html, `<script id="__NEXT_DATA__" type="application\/json" crossorigin="anonymous">(.*?)<\/script>`)
if nextScriptTag == nil || len(nextScriptTag) < 2 {
return nil, extractors.ErrURLParseFailed
}
nextJSON := nextScriptTag[1]
title := utils.GetStringFromJson(nextJSON, "props.pageProps.videoData.itemInfos.text")
streams := map[string]downloader.Stream{}
size, err := request.Size(videoURL, uri)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: videoURL,
Size: size,
Ext: "mp4",
}
streams["default"] = downloader.Stream{
URLs: []downloader.URL{urlData},
Size: size,
}
return []downloader.Data{
{
Site: "TikTok tiktok.com",
Title: title,
Type: "video",
Streams: streams,
URL: uri,
},
}, nil
}

View File

@ -0,0 +1,161 @@
package tumblr
import (
"encoding/json"
"errors"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/parser"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type imageList struct {
List []string `json:"@list"`
}
type tumblrImageList struct {
Image imageList `json:"image"`
}
type tumblrImage struct {
Image string `json:"image"`
}
func genURLData(url, referer string) (downloader.URL, int64, error) {
size, err := request.Size(url, referer)
if err != nil {
return downloader.URL{}, 0, err
}
_, ext, err := utils.GetNameAndExt(url)
if err != nil {
return downloader.URL{}, 0, err
}
data := downloader.URL{
URL: url,
Size: size,
Ext: ext,
}
return data, size, nil
}
func tumblrImageDownload(url, html, title string) ([]downloader.Data, error) {
jsonStrings := utils.MatchOneOf(
html, `<script type="application/ld\+json">\s*(.+?)</script>`,
)
if jsonStrings == nil || len(jsonStrings) < 2 {
return nil, extractors.ErrURLParseFailed
}
jsonString := jsonStrings[1]
var totalSize int64
var urls []downloader.URL
if strings.Contains(jsonString, `"image":{"@list"`) {
// there are two data structures in the same field(image)
var imageList tumblrImageList
if err := json.Unmarshal([]byte(jsonString), &imageList); err != nil {
return nil, err
}
for _, u := range imageList.Image.List {
urlData, size, err := genURLData(u, url)
if err != nil {
return nil, err
}
totalSize += size
urls = append(urls, urlData)
}
} else {
var image tumblrImage
if err := json.Unmarshal([]byte(jsonString), &image); err != nil {
return nil, err
}
urlData, size, err := genURLData(image.Image, url)
if err != nil {
return nil, err
}
totalSize = size
urls = append(urls, urlData)
}
streams := map[string]downloader.Stream{
"default": {
URLs: urls,
Size: totalSize,
},
}
return []downloader.Data{
{
Site: "Tumblr tumblr.com",
Title: title,
Type: "image",
Streams: streams,
URL: url,
},
}, nil
}
func tumblrVideoDownload(url, html, title string) ([]downloader.Data, error) {
videoURLs := utils.MatchOneOf(html, `<iframe src='(.+?)'`)
if videoURLs == nil || len(videoURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
videoURL := videoURLs[1]
if !strings.Contains(videoURL, "tumblr.com/video") {
return nil, errors.New("annie doesn't support this URL right now")
}
videoHTML, err := request.Get(videoURL, url, nil)
if err != nil {
return nil, err
}
realURLs := utils.MatchOneOf(videoHTML, `source src="(.+?)"`)
if realURLs == nil || len(realURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
realURL := realURLs[1]
urlData, size, err := genURLData(realURL, url)
if err != nil {
return nil, err
}
streams := map[string]downloader.Stream{
"default": {
URLs: []downloader.URL{urlData},
Size: size,
},
}
return []downloader.Data{
{
Site: "Tumblr tumblr.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
// get the title
doc, err := parser.GetDoc(html)
if err != nil {
return nil, err
}
title := parser.Title(doc)
if strings.Contains(html, "<iframe src=") {
// Data
return tumblrVideoDownload(url, html, title)
}
// Image
return tumblrImageDownload(url, html, title)
}

View File

@ -0,0 +1,134 @@
package twitter
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type twitter struct {
Track struct {
URL string `json:"playbackUrl"`
} `json:"track"`
TweetID string
Username string
}
// Extract is the main function for extracting data
func Extract(uri string) ([]downloader.Data, error) {
html, err := request.Get(uri, uri, nil)
if err != nil {
return nil, err
}
usernames := utils.MatchOneOf(html, `property="og:title"\s+content="(.+)"`)
if usernames == nil || len(usernames) < 2 {
return nil, extractors.ErrURLParseFailed
}
username := usernames[1]
tweetIDs := utils.MatchOneOf(uri, `(status|statuses)/(\d+)`)
if tweetIDs == nil || len(tweetIDs) < 3 {
return nil, extractors.ErrURLParseFailed
}
tweetID := tweetIDs[2]
api := fmt.Sprintf(
"https://api.twitter.com/1.1/videos/tweet/config/%s.json", tweetID,
)
headers := map[string]string{
"Authorization": "Bearer AAAAAAAAAAAAAAAAAAAAAIK1zgAAAAAA2tUWuhGZ2JceoId5GwYWU5GspY4%3DUq7gzFoCZs1QfwGoVdvSac3IniczZEYXIcDyumCauIXpcAPorE",
}
jsonString, err := request.Get(api, uri, headers)
if err != nil {
return nil, err
}
var twitterData twitter
if err := json.Unmarshal([]byte(jsonString), &twitterData); err != nil {
return nil, extractors.ErrURLParseFailed
}
twitterData.TweetID = tweetID
twitterData.Username = username
extractedData, err := download(twitterData, uri)
if err != nil {
return nil, err
}
return extractedData, nil
}
func download(data twitter, uri string) ([]downloader.Data, error) {
var (
err error
size int64
)
streams := make(map[string]downloader.Stream)
switch {
// if video file is m3u8 and ts
case strings.Contains(data.Track.URL, ".m3u8"):
m3u8urls, err := utils.M3u8URLs(data.Track.URL)
if err != nil {
return nil, err
}
for index, m3u8 := range m3u8urls {
var totalSize int64
var urls []downloader.URL
ts, err := utils.M3u8URLs(m3u8)
if err != nil {
return nil, err
}
for _, i := range ts {
size, err := request.Size(i, uri)
if err != nil {
return nil, err
}
temp := downloader.URL{
URL: i,
Size: size,
Ext: "ts",
}
totalSize += size
urls = append(urls, temp)
}
qualityString := utils.MatchOneOf(m3u8, `/(\d+x\d+)/`)[1]
quality := strconv.Itoa(index + 1)
streams[quality] = downloader.Stream{
Quality: qualityString,
URLs: urls,
Size: totalSize,
}
}
// if video file is mp4
case strings.Contains(data.Track.URL, ".mp4"):
size, err = request.Size(data.Track.URL, uri)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: data.Track.URL,
Size: size,
Ext: "mp4",
}
streams["default"] = downloader.Stream{
URLs: []downloader.URL{urlData},
Size: size,
}
}
return []downloader.Data{
{
Site: "Twitter twitter.com",
Title: fmt.Sprintf("%s %s", data.Username, data.TweetID),
Type: "video",
Streams: streams,
URL: uri,
},
}, nil
}

92
vendor/github.com/iawia002/annie/extractors/udn/udn.go generated vendored Normal file
View File

@ -0,0 +1,92 @@
package udn
import (
"errors"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
const (
startFlag = `',
mp4: '//`
endFlag = `'
},
subtitles`
)
func getCDNUrl(html string) string {
if cdnURLs := utils.MatchOneOf(html, startFlag+"(.+?)"+endFlag); cdnURLs != nil && len(cdnURLs) > 1 && cdnURLs[1] != "" {
return cdnURLs[1]
}
return ""
}
func prepareEmbedURL(url string) string {
if !strings.Contains(url, "https://video.udn.com/embed/") {
newIDs := strings.Split(url, "/")
if len(newIDs) < 1 {
return ""
}
return "https://video.udn.com/embed/news/" + newIDs[len(newIDs)-1]
}
return url
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
url = prepareEmbedURL(url)
if len(url) == 0 {
return nil, extractors.ErrURLParseFailed
}
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
var title string
desc := utils.MatchOneOf(html, `title: '(.+?)',
link:`)
if desc != nil && len(desc) > 1 {
title = desc[1]
} else {
title = "udn"
}
cdnURL := getCDNUrl(html)
if cdnURL == "" {
return nil, errors.New("empty list")
}
srcURL, err := request.Get("http://"+cdnURL, url, nil)
if err != nil {
return nil, err
}
size, err := request.Size(srcURL, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: srcURL,
Size: size,
Ext: "mp4",
}
quality := "normal"
streams := map[string]downloader.Stream{
quality: {
URLs: []downloader.URL{urlData},
Size: size,
Quality: quality,
},
}
return []downloader.Data{
{
Site: "udn udn.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,48 @@
package universal
import (
"fmt"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
fmt.Println("\nannie doesn't support this URL right now, but it will try to download it directly")
filename, ext, err := utils.GetNameAndExt(url)
if err != nil {
return nil, err
}
size, err := request.Size(url, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: url,
Size: size,
Ext: ext,
}
streams := map[string]downloader.Stream{
"default": {
URLs: []downloader.URL{urlData},
Size: size,
},
}
contentType, err := request.ContentType(url, url)
if err != nil {
return nil, err
}
return []downloader.Data{
{
Site: "Universal",
Title: filename,
Type: contentType,
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,97 @@
package vimeo
import (
"encoding/json"
"strconv"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type vimeoProgressive struct {
Profile int `json:"profile"`
Width int `json:"width"`
Height int `json:"height"`
Quality string `json:"quality"`
URL string `json:"url"`
}
type vimeoFiles struct {
Progressive []vimeoProgressive `json:"progressive"`
}
type vimeoRequest struct {
Files vimeoFiles `json:"files"`
}
type vimeoVideo struct {
Title string `json:"title"`
}
type vimeo struct {
Request vimeoRequest `json:"request"`
Video vimeoVideo `json:"video"`
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
var (
html, vid string
err error
)
if strings.Contains(url, "player.vimeo.com") {
html, err = request.Get(url, url, nil)
if err != nil {
return nil, err
}
} else {
vid = utils.MatchOneOf(url, `vimeo\.com/(\d+)`)[1]
html, err = request.Get("https://player.vimeo.com/video/"+vid, url, nil)
if err != nil {
return nil, err
}
}
jsonStrings := utils.MatchOneOf(html, `var \w+\s?=\s?({.+?});`)
if jsonStrings == nil || len(jsonStrings) < 2 {
return nil, extractors.ErrURLParseFailed
}
jsonString := jsonStrings[1]
var vimeoData vimeo
if err = json.Unmarshal([]byte(jsonString), &vimeoData); err != nil {
return nil, err
}
streams := map[string]downloader.Stream{}
var size int64
var urlData downloader.URL
for _, video := range vimeoData.Request.Files.Progressive {
size, err = request.Size(video.URL, url)
if err != nil {
return nil, err
}
urlData = downloader.URL{
URL: video.URL,
Size: size,
Ext: "mp4",
}
streams[strconv.Itoa(video.Profile)] = downloader.Stream{
URLs: []downloader.URL{urlData},
Size: size,
Quality: video.Quality,
}
}
return []downloader.Data{
{
Site: "Vimeo vimeo.com",
Title: vimeoData.Video.Title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,130 @@
package weibo
import (
"fmt"
netURL "net/url"
"strings"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/parser"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
func downloadWeiboTV(url string) ([]downloader.Data, error) {
headers := map[string]string{
"Cookie": "SUB=_2AkMsZ8xOf8NxqwJRmP4RzGLqbo5xyQDEieKaOz2VJRMxHRl-yj83qlEotRB6B-fiobWQ5vdEoYw7bCoCdf4KyP8O3Ujq",
}
html, err := request.Get(url, url, headers)
if err != nil {
return nil, err
}
doc, err := parser.GetDoc(html)
if err != nil {
return nil, err
}
title := strings.TrimSpace(
strings.Replace(doc.Find(".info_txt").First().Text(), "\u200B", " ", -1), // Zero width space.
)
// http://f.us.sinaimg.cn/003Cddn4lx07oCX1hC0001040200hkQk0k010.mp4?label=mp4_hd&template=852x480.20&Expires=1541041515&ssig=%2BYnCmZaToS&KID=unistore,video
// &480=http://f.us.sinaimg.cn/003Cddn4lx07oCX1hC0001040200hkQk0k010.mp4?label=mp4_hd&template=852x480.20&Expires=1541041515&ssig=%2BYnCmZaToS&KID=unistore,video
// &720=http://f.us.sinaimg.cn/004cqzndlx07oCX1kMOQ01040200vyxj0k010.mp4?label=mp4_720p&template=1280x720.20&Expires=1541041515&ssig=Fdasnr1aW6&KID=unistore,video&qType=720
realURLs := utils.MatchOneOf(html, `video-sources="fluency=(.+?)"`)
if realURLs == nil || len(realURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
realURL, err := netURL.PathUnescape(realURLs[1])
if err != nil {
return nil, err
}
quality := []string{"480", "720"}
streams := make(map[string]downloader.Stream, len(quality))
for _, q := range quality {
urlList := strings.Split(realURL, fmt.Sprintf("&%s=", q))
u := urlList[len(urlList)-1]
if !strings.HasPrefix(u, "http") {
continue
}
size, err := request.Size(u, url)
if err != nil {
return nil, err
}
streams[q] = downloader.Stream{
URLs: []downloader.URL{
{
URL: u,
Size: size,
Ext: "mp4",
},
},
Size: size,
Quality: q,
}
}
return []downloader.Data{
{
Site: "微博 weibo.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
if !strings.Contains(url, "m.weibo.cn") {
if strings.Contains(url, "weibo.com/tv/v/") {
return downloadWeiboTV(url)
}
url = strings.Replace(url, "weibo.com", "m.weibo.cn", 1)
}
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
titles := utils.MatchOneOf(
html, `"content2": "(.+?)",`, `"status_title": "(.+?)",`,
)
if titles == nil || len(titles) < 2 {
return nil, extractors.ErrURLParseFailed
}
title := titles[1]
realURLs := utils.MatchOneOf(
html, `"stream_url_hd": "(.+?)"`, `"stream_url": "(.+?)"`,
)
if realURLs == nil || len(realURLs) < 2 {
return nil, extractors.ErrURLParseFailed
}
realURL := realURLs[1]
size, err := request.Size(realURL, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: realURL,
Size: size,
Ext: "mp4",
}
streams := map[string]downloader.Stream{
"default": {
URLs: []downloader.URL{urlData},
Size: size,
},
}
return []downloader.Data{
{
Site: "微博 weibo.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,119 @@
package xvideos
import (
"fmt"
"strings"
"sync"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
const (
lowFlag = "html5player.setVideoUrlLow('"
lowFinalFlag = `');
html5player.setVideoUrlHigh(`
highFlag = "html5player.setVideoUrlHigh('"
highFinalFlag = `');
html5player.setVideoHLS(`
qualityLow = "low"
qualityHigh = "high"
)
var (
lowFlagLength = len(lowFlag)
highFlagLength = len(highFlag)
)
type src struct {
url string
quality string
}
func getSrc(html string) []*src {
var wg sync.WaitGroup
wg.Add(4)
startIndexLow := 0
go func() {
startIndexLow = strings.Index(html, lowFlag)
startIndexLow += lowFlagLength
wg.Done()
}()
endIndexLow := 0
go func() {
endIndexLow = strings.Index(html, lowFinalFlag)
wg.Done()
}()
startIndexHigh := 0
go func() {
startIndexHigh = strings.Index(html, highFlag)
startIndexHigh += highFlagLength
wg.Done()
}()
endIndexHigh := 0
go func() {
endIndexHigh = strings.Index(html, highFinalFlag)
wg.Done()
}()
wg.Wait()
var srcs []*src
if startIndexLow != -1 {
srcs = append(srcs, &src{
url: html[startIndexLow:endIndexLow],
quality: qualityLow,
})
}
if startIndexHigh != -1 {
srcs = append(srcs, &src{
url: html[startIndexHigh:endIndexHigh],
quality: qualityHigh,
})
}
return srcs
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
}
var title string
desc := utils.MatchOneOf(html, `<title>(.+?)</title>`)
if desc != nil && len(desc) > 1 {
title = desc[1]
} else {
title = "xvideos"
}
streams := make(map[string]downloader.Stream)
for _, src := range getSrc(html) {
size, err := request.Size(src.url, url)
if err != nil {
return nil, err
}
urlData := downloader.URL{
URL: src.url,
Size: size,
Ext: "mp4",
}
streams[src.quality] = downloader.Stream{
URLs: []downloader.URL{urlData},
Size: size,
Quality: fmt.Sprintf("%s", src.quality),
}
}
return []downloader.Data{
{
Site: "XVIDEOS xvideos.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,32 @@
package yinyuetai
type yinyuetaiMvData struct {
Error bool `json:"error"`
Message string `json:"message"`
VideoInfo videoInfo `json:"videoInfo"`
}
type videoInfo struct {
CoreVideoInfo coreVideoInfo `json:"coreVideoInfo"`
}
type coreVideoInfo struct {
ArtistNames string `json:"artistNames"`
Duration int `json:"duration"`
Error bool `json:"error"`
ErrorMsg string `json:"errorMsg"`
VideoID int `json:"videoID"`
VideoName string `json:"videoName"`
VideoURLModels []videoURLModel `json:"videoURLModels"`
}
type videoURLModel struct {
Bitrate int `json:"bitrate"`
BitrateType int `json:"bitrateType"`
FileSize int64 `json:"fileSize"`
MD5 string `json:"md5"`
SHA1 string `json:"sha1"`
QualityLevel string `json:"qualityLevel"`
QualityLevelName string `json:"qualityLevelName"`
VideoURL string `json:"videoURL"`
}

View File

@ -0,0 +1,79 @@
package yinyuetai
import (
"encoding/json"
"errors"
"fmt"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
const yinyuetaiAPI = "https://ext.yinyuetai.com/main/"
const (
actionGetMvInfo = "get-h-mv-info"
)
func genAPI(action string, param string) string {
return fmt.Sprintf("%s%s?json=true&%s", yinyuetaiAPI, action, param)
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
vid := utils.MatchOneOf(
url,
`https?://v.yinyuetai.com/video/(\d+)(?:\?vid=\d+)?`,
`https?://v.yinyuetai.com/video/h5/(\d+)(?:\?vid=\d+)?`,
`https?://m2.yinyuetai.com/video.html\?id=(\d+)`,
)
if vid == nil || len(vid) < 2 {
return nil, errors.New("invalid url for yinyuetai")
}
params := fmt.Sprintf("videoId=%s", vid[1])
// generate api url
apiUrl := genAPI(actionGetMvInfo, params)
var err error
html, err := request.Get(apiUrl, url, nil)
if err != nil {
return nil, err
}
// parse yinyuetai data
data := yinyuetaiMvData{}
if err = json.Unmarshal([]byte(html), &data); err != nil {
return nil, extractors.ErrURLParseFailed
}
// handle api error
if data.Error {
return nil, errors.New(data.Message)
}
if data.VideoInfo.CoreVideoInfo.Error {
return nil, errors.New(data.VideoInfo.CoreVideoInfo.ErrorMsg)
}
title := data.VideoInfo.CoreVideoInfo.VideoName
streams := map[string]downloader.Stream{}
// set streams
for _, model := range data.VideoInfo.CoreVideoInfo.VideoURLModels {
urlData := downloader.URL{
URL: model.VideoURL,
Size: model.FileSize,
Ext: "mp4",
}
streams[model.QualityLevel] = downloader.Stream{
URLs: []downloader.URL{urlData},
Size: model.FileSize,
Quality: model.QualityLevelName,
}
}
return []downloader.Data{
{
Site: "音悦台 yinyuetai.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,245 @@
package youku
import (
"bytes"
"crypto/hmac"
"crypto/sha1"
"encoding/base64"
"encoding/binary"
"encoding/json"
"errors"
"fmt"
"math/rand"
netURL "net/url"
"strings"
"time"
"github.com/iawia002/annie/config"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type errorData struct {
Note string `json:"note"`
Code int `json:"code"`
}
type segs struct {
Size int64 `json:"size"`
URL string `json:"cdn_url"`
}
type stream struct {
Size int64 `json:"size"`
Width int `json:"width"`
Height int `json:"height"`
Segs []segs `json:"segs"`
Type string `json:"stream_type"`
AudioLang string `json:"audio_lang"`
}
type youkuVideo struct {
Title string `json:"title"`
}
type youkuShow struct {
Title string `json:"title"`
}
type data struct {
Error errorData `json:"error"`
Stream []stream `json:"stream"`
Video youkuVideo `json:"video"`
Show youkuShow `json:"show"`
}
type youkuData struct {
Data data `json:"data"`
}
const youkuReferer = "https://v.youku.com"
func getAudioLang(lang string) string {
var youkuAudioLang = map[string]string{
"guoyu": "国语",
"ja": "日语",
"yue": "粤语",
}
translate, ok := youkuAudioLang[lang]
if !ok {
return lang
}
return translate
}
// https://g.alicdn.com/player/ykplayer/0.5.61/youku-player.min.js
// {"0505":"interior","050F":"interior","0501":"interior","0502":"interior","0503":"interior","0510":"adshow","0512":"BDskin","0590":"BDskin"}
// var ccodes = []string{"0510", "0502", "0507", "0508", "0512", "0513", "0514", "0503", "0590"}
func youkuUps(vid string) (*youkuData, error) {
var (
url string
utid string
utids []string
data youkuData
)
if strings.Contains(config.Cookie, "cna") {
utids = utils.MatchOneOf(config.Cookie, `cna=(.+?);`, `cna\s+(.+?)\s`, `cna\s+(.+?)$`)
} else {
headers, err := request.Headers("http://log.mmstat.com/eg.js", youkuReferer)
if err != nil {
return nil, err
}
setCookie := headers.Get("Set-Cookie")
utids = utils.MatchOneOf(setCookie, `cna=(.+?);`)
}
if utids == nil || len(utids) < 2 {
return nil, extractors.ErrURLParseFailed
}
utid = utids[1]
// https://g.alicdn.com/player/ykplayer/0.5.61/youku-player.min.js
// grep -oE '"[0-9a-zA-Z+/=]{256}"' youku-player.min.js
for _, ccode := range []string{config.YoukuCcode} {
if ccode == "0103010102" {
utid = generateUtdid()
}
url = fmt.Sprintf(
"https://ups.youku.com/ups/get.json?vid=%s&ccode=%s&client_ip=192.168.1.1&client_ts=%d&utid=%s&ckey=%s",
vid, ccode, time.Now().Unix()/1000, netURL.QueryEscape(utid), netURL.QueryEscape(config.YoukuCkey),
)
if config.YoukuPassword != "" {
url = fmt.Sprintf("%s&password=%s", url, config.YoukuPassword)
}
html, err := request.GetByte(url, youkuReferer, nil)
if err != nil {
return nil, err
}
// data must be emptied before reassignment, otherwise it will contain the previous value(the 'error' data)
data = youkuData{}
if err = json.Unmarshal(html, &data); err != nil {
return nil, err
}
if data.Data.Error == (errorData{}) {
return &data, nil
}
}
return &data, nil
}
func getBytes(val int32) []byte {
var buff bytes.Buffer
binary.Write(&buff, binary.BigEndian, val)
return buff.Bytes()
}
func hashCode(s string) int32 {
var result int32
for _, c := range s {
result = result*0x1f + int32(c)
}
return result
}
func hmacSha1(key []byte, msg []byte) []byte {
mac := hmac.New(sha1.New, key)
mac.Write(msg)
return mac.Sum(nil)
}
func generateUtdid() string {
timestamp := int32(time.Now().Unix())
var buffer bytes.Buffer
buffer.Write(getBytes(timestamp - 60*60*8))
buffer.Write(getBytes(rand.Int31()))
buffer.WriteByte(0x03)
buffer.WriteByte(0x00)
imei := fmt.Sprintf("%d", rand.Int31())
buffer.Write(getBytes(hashCode(imei)))
data := hmacSha1([]byte("d6fc3a4a06adbde89223bvefedc24fecde188aaa9161"), buffer.Bytes())
buffer.Write(getBytes(hashCode(base64.StdEncoding.EncodeToString(data))))
return base64.StdEncoding.EncodeToString(buffer.Bytes())
}
func genData(youkuData data) map[string]downloader.Stream {
var (
streamString string
quality string
)
streams := map[string]downloader.Stream{}
for _, stream := range youkuData.Stream {
if stream.AudioLang == "default" {
streamString = stream.Type
quality = fmt.Sprintf(
"%s %dx%d", stream.Type, stream.Width, stream.Height,
)
} else {
streamString = fmt.Sprintf("%s-%s", stream.Type, stream.AudioLang)
quality = fmt.Sprintf(
"%s %dx%d %s", stream.Type, stream.Width, stream.Height,
getAudioLang(stream.AudioLang),
)
}
ext := strings.Split(
strings.Split(stream.Segs[0].URL, "?")[0],
".",
)
urls := make([]downloader.URL, len(stream.Segs))
for index, data := range stream.Segs {
urls[index] = downloader.URL{
URL: data.URL,
Size: data.Size,
Ext: ext[len(ext)-1],
}
}
streams[streamString] = downloader.Stream{
URLs: urls,
Size: stream.Size,
Quality: quality,
}
}
return streams
}
// Extract is the main function for extracting data
func Extract(url string) ([]downloader.Data, error) {
vids := utils.MatchOneOf(
url, `id_(.+?)\.html`, `id_(.+)`,
)
if vids == nil || len(vids) < 2 {
return nil, extractors.ErrURLParseFailed
}
vid := vids[1]
youkuData, err := youkuUps(vid)
if err != nil {
return nil, err
}
if youkuData.Data.Error.Code != 0 {
return nil, errors.New(youkuData.Data.Error.Note)
}
streams := genData(youkuData.Data)
var title string
if youkuData.Data.Show.Title == "" || strings.Contains(
youkuData.Data.Video.Title, youkuData.Data.Show.Title,
) {
title = youkuData.Data.Video.Title
} else {
title = fmt.Sprintf("%s %s", youkuData.Data.Show.Title, youkuData.Data.Video.Title)
}
return []downloader.Data{
{
Site: "优酷 youku.com",
Title: title,
Type: "video",
Streams: streams,
URL: url,
},
}, nil
}

View File

@ -0,0 +1,246 @@
package youtube
import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"github.com/rylio/ytdl"
"github.com/iawia002/annie/config"
"github.com/iawia002/annie/downloader"
"github.com/iawia002/annie/extractors"
"github.com/iawia002/annie/request"
"github.com/iawia002/annie/utils"
)
type streamFormat struct {
Itag int `json:"itag"`
URL string `json:"url"`
MimeType string `json:"mimeType"`
ContentLength string `json:"contentLength"`
QualityLabel string `json:"qualityLabel"`
AudioQuality string `json:"audioQuality"`
}
type playerResponseType struct {
StreamingData struct {
Formats []streamFormat `json:"formats"`
AdaptiveFormats []streamFormat `json:"adaptiveFormats"`
} `json:"streamingData"`
VideoDetails struct {
Title string `json:"title"`
} `json:"videoDetails"`
}
type youtubeData struct {
Args struct {
PlayerResponse string `json:"player_response"`
} `json:"args"`
}
const referer = "https://www.youtube.com"
// Extract is the main function for extracting data
func Extract(uri string) ([]downloader.Data, error) {
var err error
if !config.Playlist {
return []downloader.Data{youtubeDownload(uri)}, nil
}
listIDs := utils.MatchOneOf(uri, `(list|p)=([^/&]+)`)
if listIDs == nil || len(listIDs) < 3 {
return nil, extractors.ErrURLParseFailed
}
listID := listIDs[2]
if len(listID) == 0 {
return nil, errors.New("can't get list ID from URL")
}
html, err := request.Get("https://www.youtube.com/playlist?list="+listID, referer, nil)
if err != nil {
return nil, err
}
// "videoId":"OQxX8zgyzuM","thumbnail"
videoIDs := utils.MatchAll(html, `"videoId":"([^,]+?)","thumbnail"`)
needDownloadItems := utils.NeedDownloadList(len(videoIDs))
extractedData := make([]downloader.Data, len(needDownloadItems))
wgp := utils.NewWaitGroupPool(config.ThreadNumber)
dataIndex := 0
for index, videoID := range videoIDs {
if !utils.ItemInSlice(index+1, needDownloadItems) || len(videoID) < 2 {
continue
}
u := fmt.Sprintf(
"https://www.youtube.com/watch?v=%s&list=%s", videoID[1], listID,
)
wgp.Add()
go func(index int, u string, extractedData []downloader.Data) {
defer wgp.Done()
extractedData[index] = youtubeDownload(u)
}(dataIndex, u, extractedData)
dataIndex++
}
wgp.Wait()
return extractedData, nil
}
// youtubeDownload download function for single url
func youtubeDownload(uri string) downloader.Data {
vid := utils.MatchOneOf(
uri,
`watch\?v=([^/&]+)`,
`youtu\.be/([^?/]+)`,
`embed/([^/?]+)`,
`v/([^/?]+)`,
)
if vid == nil || len(vid) < 2 {
return downloader.EmptyData(uri, errors.New("can't find vid"))
}
videoURL := fmt.Sprintf(
"https://www.youtube.com/watch?v=%s",
vid[1],
)
videoInfo, err := ytdl.GetVideoInfo(uri)
if err != nil {
return downloader.EmptyData(uri, err)
}
html, err := request.Get(videoURL, referer, nil)
if err != nil {
return downloader.EmptyData(uri, err)
}
ytplayer := utils.MatchOneOf(html, `;ytplayer\.config\s*=\s*({.+?});`)
if ytplayer == nil || len(ytplayer) < 2 {
if strings.Contains(html, "LOGIN_REQUIRED") ||
strings.Contains(html, "Sign in to confirm your age") {
return downloader.EmptyData(uri, extractors.ErrLoginRequired)
}
return downloader.EmptyData(uri, extractors.ErrURLParseFailed)
}
var data youtubeData
if err = json.Unmarshal([]byte(ytplayer[1]), &data); err != nil {
return downloader.EmptyData(uri, err)
}
var playerResponse playerResponseType
if err = json.Unmarshal([]byte(data.Args.PlayerResponse), &playerResponse); err != nil {
return downloader.EmptyData(uri, err)
}
title := playerResponse.VideoDetails.Title
streams, err := extractVideoURLS(playerResponse, videoInfo)
if err != nil {
return downloader.EmptyData(uri, err)
}
return downloader.Data{
Site: "YouTube youtube.com",
Title: title,
Type: "video",
Streams: streams,
URL: uri,
}
}
func getStreamExt(streamType string) string {
// video/webm; codecs="vp8.0, vorbis" --> webm
exts := utils.MatchOneOf(streamType, `(\w+)/(\w+);`)
if exts == nil || len(exts) < 3 {
return ""
}
return exts[2]
}
func getRealURL(videoFormat streamFormat, videoInfo *ytdl.VideoInfo, ext string) (*downloader.URL, error) {
ytdlFormat := new(ytdl.Format)
for _, f := range videoInfo.Formats {
if f.Itag.Number == videoFormat.Itag {
ytdlFormat = f
break
}
}
if ytdlFormat == nil {
return nil, fmt.Errorf("unable to get info for itag %d", videoFormat.Itag)
}
realURL, err := videoInfo.GetDownloadURL(ytdlFormat)
if err != nil {
return nil, err
}
size, _ := strconv.ParseInt(videoFormat.ContentLength, 10, 64)
return &downloader.URL{
URL: realURL.String(),
Size: size,
Ext: ext,
}, nil
}
func genStream(videoFormat streamFormat, videoInfo *ytdl.VideoInfo) (*downloader.Stream, error) {
streamType := videoFormat.MimeType
ext := getStreamExt(streamType)
if ext == "" {
return nil, fmt.Errorf("unable to get file extension of MimeType %s", streamType)
}
video, err := getRealURL(videoFormat, videoInfo, ext)
if err != nil {
return nil, err
}
var quality string
if videoFormat.QualityLabel != "" {
quality = fmt.Sprintf("%s %s", videoFormat.QualityLabel, streamType)
} else {
quality = streamType
}
return &downloader.Stream{
URLs: []downloader.URL{*video},
Quality: quality,
}, nil
}
func extractVideoURLS(data playerResponseType, videoInfo *ytdl.VideoInfo) (map[string]downloader.Stream, error) {
streams := make(map[string]downloader.Stream, len(data.StreamingData.Formats)+len(data.StreamingData.AdaptiveFormats))
for _, f := range data.StreamingData.Formats {
stream, err := genStream(f, videoInfo)
if err != nil {
return nil, err
}
streams[strconv.Itoa(f.Itag)] = *stream
}
// Unlike `url_encoded_fmt_stream_map`, all videos in `adaptive_fmts` have no sound,
// we need download video and audio both and then merge them.
// get audio file for videos in AdaptiveFormats
var audio downloader.URL
for _, f := range data.StreamingData.AdaptiveFormats {
if strings.HasPrefix(f.MimeType, "audio/mp4") {
audioURL, err := getRealURL(f, videoInfo, "m4a")
if err != nil {
return nil, err
}
audio = *audioURL
break
}
}
for _, f := range data.StreamingData.AdaptiveFormats {
stream, err := genStream(f, videoInfo)
if err != nil {
return nil, err
}
stream.URLs = append(stream.URLs, audio)
streams[strconv.Itoa(f.Itag)] = *stream
}
return streams, nil
}

18
vendor/github.com/iawia002/annie/go.mod generated vendored Normal file
View File

@ -0,0 +1,18 @@
module github.com/iawia002/annie
go 1.12
require (
github.com/MercuryEngineering/CookieMonster v0.0.0-20180304172713-1584578b3403
github.com/PuerkitoBio/goquery v1.4.1
github.com/andybalholm/cascadia v1.0.0 // indirect
github.com/cheggaaa/pb v1.0.25
github.com/fatih/color v1.7.0
github.com/kr/pretty v0.1.0
github.com/mattn/go-colorable v0.0.9 // indirect
github.com/rs/zerolog v1.16.0
github.com/rylio/ytdl v0.6.2
github.com/tidwall/gjson v1.3.2
golang.org/x/net v0.0.0-20190620200207-3b0461eec859
gopkg.in/cheggaaa/pb.v1 v1.0.28 // indirect
)

74
vendor/github.com/iawia002/annie/go.sum generated vendored Normal file
View File

@ -0,0 +1,74 @@
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/MercuryEngineering/CookieMonster v0.0.0-20180304172713-1584578b3403 h1:EtZwYyLbkEcIt+B//6sujwRCnHuTEK3qiSypAX5aJeM=
github.com/MercuryEngineering/CookieMonster v0.0.0-20180304172713-1584578b3403/go.mod h1:mM6WvakkX2m+NgMiPCfFFjwfH4KzENC07zeGEqq9U7s=
github.com/PuerkitoBio/goquery v1.4.1 h1:smcIRGdYm/w7JSbcdeLHEMzxmsBQvl8lhf0dSw2nzMI=
github.com/PuerkitoBio/goquery v1.4.1/go.mod h1:T9ezsOHcCrDCgA8aF1Cqr3sSYbO/xgdy8/R/XiIMAhA=
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/cheggaaa/pb v1.0.25 h1:tFpebHTkI7QZx1q1rWGOKhbunhZ3fMaxTvHDWn1bH/4=
github.com/cheggaaa/pb v1.0.25/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-isatty v0.0.3 h1:ns/ykhmWi7G9O+8a448SecJU3nSMBXJfqQkl0upE1jI=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.10 h1:qxFzApOv4WsAL965uUPIsXzAKCZxN2p9UqdhFS4ZW10=
github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=
github.com/mattn/go-runewidth v0.0.2 h1:UnlwIPBGaTZfPQ6T1IGzPI0EkYAQmT9fAEJ/poFC63o=
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.6 h1:V2iyH+aX9C5fsYCpK60U8BYIvmhqxuOL3JZcqc1NB7k=
github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.16.0 h1:AaELmZdcJHT8m6oZ5py4213cdFK8XGXkB3dFdAQ+P7Q=
github.com/rs/zerolog v1.16.0/go.mod h1:9nvC1axdVrAHcu/s9taAVfBuIdTZLVQmKQyvrUjF5+I=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/rylio/ytdl v0.6.2 h1:ZYzaoUqAniH/yb7yscdWlVoecVRcvpFDgS15NyDTZQA=
github.com/rylio/ytdl v0.6.2/go.mod h1:F0WX8szfQ00mhmfla+0xVJp483SBV4VO/ByUaNioNSM=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/tidwall/gjson v1.3.2 h1:+7p3qQFaH3fOMXAJSrdZwGKcOO/lYdGS0HqGhPqDdTI=
github.com/tidwall/gjson v1.3.2/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJHhxOls=
github.com/tidwall/match v1.0.1 h1:PnKP62LPNxHKTwvHHZZzdOAOCtsJTjo6dZLCwpKm5xc=
github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E=
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180621144259-afe8f62b1d6b h1:VgWwPFFrh6LyvObbug7TZtI8EX5zwx7WsS6Y4Bqezqo=
golang.org/x/net v0.0.0-20180621144259-afe8f62b1d6b/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180622082034-63fc586f45fe h1:ay7inWg28/GEO1erz2KR0ywSgsw4yPHUw1egz2vGcN0=
golang.org/x/sys v0.0.0-20180622082034-63fc586f45fe/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191104094858-e8c54fb511f6 h1:ZJUmhYTp8GbGC0ViZRc2U+MIYQ8xx9MscsdXnclfIhw=
golang.org/x/sys v0.0.0-20191104094858-e8c54fb511f6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/cheggaaa/pb.v1 v1.0.28 h1:n1tBJnnK2r7g9OW2btFH91V92STTUevLXYFb8gy9EMk=
gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

Some files were not shown because too many files have changed in this diff Show More