Skip to content

Commit

Permalink
Merge pull request #10 from p1ass/fix-readme
Browse files Browse the repository at this point in the history
Fix example
  • Loading branch information
p1ass authored Aug 17, 2019
2 parents b891886 + 1275e05 commit 06bb7cf
Showing 1 changed file with 6 additions and 7 deletions.
13 changes: 6 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,10 @@ go get -u github.com/p1ass/feeder
import "github.com/p1ass/feeder"

func crawl(){
rssCrawler := feeder.NewRSSCrawler("https://example.com/rss")
qiitaCrawler := feeder.NewQiitaCrawler("https://qiita.com/api/v2/users/plus_kyoto/items")
rss1 := feeder.NewRSSCrawler("https://example.com/rss1")
rss2 := feeder.NewRSSCrawler("https://example.com/rss2")

// Crawl data using goroutine.
items, err := feeder.Crawl(rssCrawler, qiitaCrawler)
items, err := feeder.Crawl(rss1, rss2)

feed := &feeder.Feed{
Title: "My feeds",
Expand All @@ -44,7 +43,7 @@ func crawl(){
You can create a original crawler by implementing `feeder.Crawler`.
```go
type Crawler interface {
Fetch() (*Items, error)
Crawl() ([]*Item, error)
}
```

Expand All @@ -70,7 +69,7 @@ type qiitaCrawler struct {
URL string
}

func (crawler *qiitaCrawler) Fetch() (*feeder.Items, error) {
func (crawler *qiitaCrawler) Fetch() ([]*feeder.Item, error) {
resp, err := http.Get(crawler.URL)
if err != nil {
return nil, errors.Wrap(err, "failed to get response from qiita.")
Expand All @@ -86,7 +85,7 @@ func (crawler *qiitaCrawler) Fetch() (*feeder.Items, error) {
for _, i := range qiita {
items = append(items, convertQiitaToItem(i))
}
return &feeder.Items{items}, nil
return items, nil
}

func convertQiitaToItem(q *qiitaResponse) *feeder.Item {
Expand Down

0 comments on commit 06bb7cf

Please sign in to comment.