crawler

package
v0.0.0-...-974d401 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 8, 2017 License: GPL-3.0 Imports: 12 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Dice

type Dice struct {
	JobWriter chan structures.JobDetail
	Storage   service.Storage
	Skills    []string
	Url       string
	// contains filtered or unexported fields
}

func NewDiceCrawler

func NewDiceCrawler(
	salaryParser *service.SalaryParser,
	skillParser *service.SkillParser,
	dateParser *service.DateParser) *Dice

func (*Dice) Crawl

func (dice *Dice) Crawl()

Crawl() starts the crawling process. It is the only method anyone outside this object cares about

type DiceTagCrawler

type DiceTagCrawler struct {
	Skills []string
	Url    string
	// contains filtered or unexported fields
}

func NewDiceTagCrawler

func NewDiceTagCrawler(skillWriter chan string, skillParser *service.SkillParser) *DiceTagCrawler

func (*DiceTagCrawler) Crawl

func (dice *DiceTagCrawler) Crawl()

Crawl() starts the crawling process. It is the only method anyone outside this object cares about

type IndeedJobCrawler

type IndeedJobCrawler struct {
	JobWriter chan structures.JobDetail
	Storage   service.Storage
	Skills    []string
	Url       string
	Host      string
	Client    http.Client
	// contains filtered or unexported fields
}

func NewIndeedJobCrawler

func NewIndeedJobCrawler(
	salaryParser service.ISalaryParser,
	skillParser service.ISkillParser,
	dateParser service.IDateParser) *IndeedJobCrawler

func (*IndeedJobCrawler) Crawl

func (r *IndeedJobCrawler) Crawl()

type LinkedIn

type LinkedIn struct {
	JobWriter chan structures.JobDetail
	Search    **elastic.Client
	Skills    []string
	Url       string
}

func (*LinkedIn) Crawl

func (l *LinkedIn) Crawl()

type RemoteWork

type RemoteWork struct {
	Host               string
	Url                string
	Search             **elastic.Client
	SearchWriteChannel chan structures.JobDetail
}

This is not complete. I realized they have expired jobs and not enough info. so it's probably not worth it to index them

func (*RemoteWork) Crawl

func (rw *RemoteWork) Crawl()

type StackOverflow

type StackOverflow struct {
	JobWriter chan structures.JobDetail
	Storage   service.Storage
	Skills    []string
	Url       string
	Host      string
	// contains filtered or unexported fields
}

func NewStackOverflowJobCrawler

func NewStackOverflowJobCrawler(salaryParser *service.SalaryParser, skillParser *service.SkillParser, dateParser *service.DateParser) *StackOverflow

func (*StackOverflow) Crawl

func (r *StackOverflow) Crawl()

Crawl() starts the crawling process. It is the only method anyone outside this object cares about

type StackOverflowTagCrawler

type StackOverflowTagCrawler struct {
	Host string
	Url  string
	// contains filtered or unexported fields
}

func NewStackOverflowTagCrawler

func NewStackOverflowTagCrawler(skillWriter chan string, skillParser *service.SkillParser) *StackOverflowTagCrawler

func (*StackOverflowTagCrawler) Crawl

func (r *StackOverflowTagCrawler) Crawl()

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL