flyscrape

package module
v0.2.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 12, 2023 License: MPL-2.0 Imports: 23 Imported by: 0

README



flyscrape is an expressive and elegant web scraper, combining the speed of Go with the
flexibility of JavaScript. — Focus on data extraction rather than request juggling.


Features

  • Domains and URL filtering
  • Depth control
  • Request caching
  • Rate limiting
  • Development mode
  • Single binary executable

Example script

export const config = {
    url: "https://news.ycombinator.com/",
}

export default function ({ doc, absoluteURL }) {
    const title = doc.find("title");
    const posts = doc.find(".athing");

    return {
        title: title.text(),
        posts: posts.map((post) => {
            const link = post.find(".titleline > a");

            return {
                title: link.text(),
                url: link.attr("href"),
            };
        }),
    }
}
$ flyscrape run hackernews.js
[
  {
    "title": "Hacker News",
    "url": "https://news.ycombinator.com/",
    "data": {
      "posts": [
        {
          "title": "Show HN: flyscrape - An expressive and elegant web scraper",
          "url": "https://flyscrape.com"
        },
        ...
      ],
    }
  }
]

Installation

To install flyscrape, follow these simple steps:

  1. Install Go: Make sure you have Go installed on your system. If not, you can download it from https://golang.org/.

  2. Install flyscrape: Open a terminal and run the following command:

    go install github.com/philippta/flyscrape/cmd/flyscrape@latest
    

Usage

$ flyscrape
flyscrape is an elegant scraping tool for efficiently extracting data from websites.

Usage:

    flyscrape <command> [arguments]

Commands:

    new    creates a sample scraping script
    run    runs a scraping script
    dev    watches and re-runs a scraping script

Create a new sample scraping script

The new command allows you to create a new boilerplate sample script which helps you getting started.

flyscrape new example.js

Watch the script for changes during development

The dev command allows you to watch your scraping script for changes and quickly iterate during development. In development mode, flyscrape will not follow any links and request caching is enabled.

flyscrape dev example.js

Run the scraping script

The dev command allows you to run your script to its fullest extend.

flyscrape run example.js

Configuration

Below is an example scraping script that showcases the capabilities of flyscrape:

export const config = {
    url: "https://example.com/", // Specify the URL to start scraping from.
    depth: 0,                    // Specify how deep links should be followed.  (default = 0, no follow)
    allowedDomains: [],          // Specify the allowed domains. ['*'] for all. (default = domain from url)
    blockedDomains: [],          // Specify the blocked domains.                (default = none)
    allowedURLs: [],             // Specify the allowed URLs as regex.          (default = all allowed)
    blockedURLs: [],             // Specify the blocked URLs as regex.          (default = none)
    rate: 100,                   // Specify the rate in requests per second.    (default = no rate limit)
    cache: "file",               // Enable file-based request caching.          (default = no cache)
};

export default function ({ doc, url, absoluteURL }) {
    // doc              - Contains the parsed HTML document
    // url              - Contains the scraped URL
    // absoluteURL(...) - Transforms relative URLs into absolute URLs
}

Query API

// <div class="element" foo="bar">Hey</div>
const el = doc.find(".element")
el.text()                                 // "Hey"
el.html()                                 // `<div class="element">Hey</div>`
el.attr("foo")                            // "bar"
el.hasAttr("foo")                         // true
el.hasClass("element")                    // true

// <ul>
//   <li class="a">Item 1</li>
//   <li>Item 2</li>
//   <li>Item 3</li>
// </ul>
const list = doc.find("ul")
list.children()                           // [<li class="a">Item 1</li>, <li>Item 2</li>, <li>Item 3</li>]

const items = list.find("li")
items.length()                            // 3
items.first()                             // <li>Item 1</li>
items.last()                              // <li>Item 3</li>
items.get(1)                              // <li>Item 2</li>
items.get(1).prev()                       // <li>Item 1</li>
items.get(1).next()                       // <li>Item 3</li>
items.get(1).parent()                     // <ul>...</ul>
items.get(1).siblings()                   // [<li class="a">Item 1</li>, <li>Item 2</li>, <li>Item 3</li>]
items.map(item => item.text())            // ["Item 1", "Item 2", "Item 3"]
items.filter(item => item.hasClass("a"))  // [<li class="a">Item 1</li>]

Contributing

We welcome contributions from the community! If you encounter any issues or have suggestions for improvement, please submit an issue.

Documentation

Index

Constants

This section is empty.

Variables

View Source
var ScriptTemplate []byte
View Source
var StopWatch = errors.New("stop watch")

Functions

func Compile

func Compile(src string) (Config, ScrapeFunc, error)

func LoadModules added in v0.2.0

func LoadModules(s *Scraper, cfg Config)

func MockResponse added in v0.2.0

func MockResponse(statusCode int, html string) (*http.Response, error)

func Prettify added in v0.2.0

func Prettify(v any, prefix string) string

func RegisterModule added in v0.2.0

func RegisterModule(mod Module)

func Watch

func Watch(path string, fn func(string) error) error

Types

type Config added in v0.2.0

type Config []byte

type Context added in v0.2.0

type Context interface {
	ScriptName() string
	Visit(url string)
	MarkVisited(url string)
	MarkUnvisited(url string)
	DisableModule(id string)
}

type FetchFunc

type FetchFunc func(url string) (string, error)

type Finalizer added in v0.2.0

type Finalizer interface {
	Finalize()
}

type Module added in v0.2.0

type Module interface {
	ModuleInfo() ModuleInfo
}

type ModuleInfo added in v0.2.0

type ModuleInfo struct {
	ID  string
	New func() Module
}

type Provisioner added in v0.2.0

type Provisioner interface {
	Provision(Context)
}

type Request added in v0.2.0

type Request struct {
	Method  string
	URL     string
	Headers http.Header
	Cookies http.CookieJar
	Depth   int
}

type RequestBuilder added in v0.2.0

type RequestBuilder interface {
	BuildRequest(*Request)
}

type RequestValidator added in v0.2.0

type RequestValidator interface {
	ValidateRequest(*Request) bool
}

type Response added in v0.2.0

type Response struct {
	StatusCode int
	Headers    http.Header
	Body       []byte
	Data       any
	Error      error
	Request    *Request

	Visit func(url string)
}

type ResponseReceiver added in v0.2.0

type ResponseReceiver interface {
	ReceiveResponse(*Response)
}

type RoundTripFunc added in v0.2.0

type RoundTripFunc func(*http.Request) (*http.Response, error)

func MockTransport added in v0.2.0

func MockTransport(statusCode int, html string) RoundTripFunc

func (RoundTripFunc) RoundTrip added in v0.2.0

func (f RoundTripFunc) RoundTrip(r *http.Request) (*http.Response, error)

type ScrapeFunc

type ScrapeFunc func(ScrapeParams) (any, error)

type ScrapeParams

type ScrapeParams struct {
	HTML string
	URL  string
}

type Scraper

type Scraper struct {
	ScrapeFunc ScrapeFunc
	Script     string
	// contains filtered or unexported fields
}

func NewScraper added in v0.2.0

func NewScraper() *Scraper

func (*Scraper) DisableModule added in v0.2.0

func (s *Scraper) DisableModule(id string)

func (*Scraper) LoadModule added in v0.2.0

func (s *Scraper) LoadModule(mod Module)

func (*Scraper) MarkUnvisited added in v0.2.0

func (s *Scraper) MarkUnvisited(url string)

func (*Scraper) MarkVisited added in v0.2.0

func (s *Scraper) MarkVisited(url string)

func (*Scraper) Run added in v0.2.0

func (s *Scraper) Run()

func (*Scraper) ScriptName added in v0.2.0

func (s *Scraper) ScriptName() string

func (*Scraper) Visit added in v0.2.0

func (s *Scraper) Visit(url string)

type TransformError

type TransformError struct {
	Line   int
	Column int
	Text   string
}

func (TransformError) Error

func (err TransformError) Error() string

type TransportAdapter added in v0.2.0

type TransportAdapter interface {
	AdaptTransport(http.RoundTripper) http.RoundTripper
}

Directories

Path Synopsis
cmd
flyscrape command
modules

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL