newsbot-api/services/ffxiv_test.go
James Tombleson 11892b9a7b
Features/ffxiv (#6)
* starting the ffxiv reader

* working on getting the standard interface for sources based on the work for ffxiv

* got more of ffxiv working and updated tests

* Author and Description can be extracted and validated with tests

* added uuid package

* ffxiv core logic is working and testes updated to reflect it.

* Updated the scheduler with the current sources and moved them from main

* updated reddit to allow modern go to talk to the endpoint with a debug flag

* gave the func a better name

* cleaned up main

* Moved cache to its own package and updated tests"

* moved config to its own package and added basic tests

* updated imports

* minor update"

* interface update and cache model update

* updated the scheduler for basic services.  No DB calls yet

* updated db calls

* bypassed the reddit test as its flaky in github
2022-04-29 13:02:25 -07:00

148 lines
3.1 KiB
Go

package services_test
import (
"testing"
ffxiv "github.com/jtom38/newsbot/collector/services"
)
func TestFfxivGetParser(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
_, err := fc.GetParser()
if err != nil { panic(err) }
}
func TestFfxivPullFeed(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if len(links) == 0 { panic("expected links to come back but got 0") }
}
func TestFfxivExtractThumbnail(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
page := fc.GetPage(parser, links[0])
defer page.Close()
thumb, err := fc.ExtractThumbnail(page)
if err != nil { panic(err) }
if thumb == "" { panic("expected a link but got nothing.")}
}
func TestFfxivExtractPubDate(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
page := fc.GetPage(parser, links[0])
defer page.Close()
_, err = fc.ExtractPubDate(page)
if err != nil { panic(err) }
}
func TestFfxivExtractDescription(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
page := fc.GetPage(parser, links[0])
defer page.Close()
_, err = fc.ExtractDescription(page)
if err != nil { panic(err) }
}
func TestFfxivExtractAuthor(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
page := fc.GetPage(parser, links[0])
defer page.Close()
author, err := fc.ExtractAuthor(page)
if err != nil { panic(err) }
if author == "" { panic("failed to locate the author name") }
}
func TestFfxivExtractTags(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
page := fc.GetPage(parser, links[0])
defer page.Close()
res, err := fc.ExtractTags(page)
if err != nil { panic(err) }
if res == "" {panic("failed to locate the tags")}
}
func TestFfxivExtractTitle(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
page := fc.GetPage(parser, links[0])
defer page.Close()
res, err := fc.ExtractTitle(page)
if err != nil { panic(err) }
if res == "" { panic("failed to locate the tags") }
}
func TestFFxivExtractAuthorIamge(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
parser := fc.GetBrowser()
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
page := fc.GetPage(parser, links[0])
defer page.Close()
res, err := fc.ExtractAuthorImage(page)
if err != nil { panic(err) }
if res == "" { panic("failed to locate the tags") }
}
func TestFfxivCheckSource(t *testing.T) {
fc := ffxiv.NewFFXIVClient("na")
fc.CheckSource()
}