Features/subscription/features for portal (#34)

* added a route to delete subscriptions based on the ID given

* added a new route to find a record based on the name and source

* added a route to query Discord Web Hooks by Server and Channel names

* tested the endpoints and they seem good to test more

* updated some routes for subscriptions and formatted files

* removed debug file

* fixing some panic calls

* swag
This commit is contained in:
James Tombleson 2022-12-04 08:49:17 -08:00 committed by GitHub
parent c161658487
commit ff4075383a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 478 additions and 291 deletions

1
.gitignore vendored
View File

@ -1,5 +1,6 @@
.env .env
dev.session.sql dev.session.sql
__debug_bin
.vscode .vscode

View File

@ -70,7 +70,7 @@ const docTemplate = `{
"responses": {} "responses": {}
} }
}, },
"/articles/{id}": { "/articles/{ID}": {
"get": { "get": {
"produces": [ "produces": [
"application/json" "application/json"
@ -569,7 +569,6 @@ const docTemplate = `{
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Config",
"Subscription" "Subscription"
], ],
"summary": "Returns the top 100 entries from the queue to be processed.", "summary": "Returns the top 100 entries from the queue to be processed.",
@ -582,7 +581,6 @@ const docTemplate = `{
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Config",
"Subscription" "Subscription"
], ],
"summary": "Returns the top 100 entries from the queue to be processed.", "summary": "Returns the top 100 entries from the queue to be processed.",
@ -604,7 +602,6 @@ const docTemplate = `{
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Config",
"Subscription" "Subscription"
], ],
"summary": "Returns the top 100 entries from the queue to be processed.", "summary": "Returns the top 100 entries from the queue to be processed.",
@ -644,9 +641,6 @@ const docTemplate = `{
"/subscriptions/new/discordwebhook": { "/subscriptions/new/discordwebhook": {
"post": { "post": {
"tags": [ "tags": [
"Config",
"Source",
"Discord",
"Subscription" "Subscription"
], ],
"summary": "Creates a new subscription to link a post from a Source to a DiscordWebHook.", "summary": "Creates a new subscription to link a post from a Source to a DiscordWebHook.",

View File

@ -61,7 +61,7 @@
"responses": {} "responses": {}
} }
}, },
"/articles/{id}": { "/articles/{ID}": {
"get": { "get": {
"produces": [ "produces": [
"application/json" "application/json"
@ -560,7 +560,6 @@
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Config",
"Subscription" "Subscription"
], ],
"summary": "Returns the top 100 entries from the queue to be processed.", "summary": "Returns the top 100 entries from the queue to be processed.",
@ -573,7 +572,6 @@
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Config",
"Subscription" "Subscription"
], ],
"summary": "Returns the top 100 entries from the queue to be processed.", "summary": "Returns the top 100 entries from the queue to be processed.",
@ -595,7 +593,6 @@
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Config",
"Subscription" "Subscription"
], ],
"summary": "Returns the top 100 entries from the queue to be processed.", "summary": "Returns the top 100 entries from the queue to be processed.",
@ -635,9 +632,6 @@
"/subscriptions/new/discordwebhook": { "/subscriptions/new/discordwebhook": {
"post": { "post": {
"tags": [ "tags": [
"Config",
"Source",
"Discord",
"Subscription" "Subscription"
], ],
"summary": "Creates a new subscription to link a post from a Source to a DiscordWebHook.", "summary": "Creates a new subscription to link a post from a Source to a DiscordWebHook.",

View File

@ -12,7 +12,7 @@ paths:
summary: Lists the top 50 records summary: Lists the top 50 records
tags: tags:
- Articles - Articles
/articles/{id}: /articles/{ID}:
get: get:
parameters: parameters:
- description: uuid - description: uuid
@ -391,7 +391,6 @@ paths:
responses: {} responses: {}
summary: Returns the top 100 entries from the queue to be processed. summary: Returns the top 100 entries from the queue to be processed.
tags: tags:
- Config
- Subscription - Subscription
/subscriptions/byDiscordId: /subscriptions/byDiscordId:
get: get:
@ -406,7 +405,6 @@ paths:
responses: {} responses: {}
summary: Returns the top 100 entries from the queue to be processed. summary: Returns the top 100 entries from the queue to be processed.
tags: tags:
- Config
- Subscription - Subscription
/subscriptions/bySourceId: /subscriptions/bySourceId:
get: get:
@ -421,7 +419,6 @@ paths:
responses: {} responses: {}
summary: Returns the top 100 entries from the queue to be processed. summary: Returns the top 100 entries from the queue to be processed.
tags: tags:
- Config
- Subscription - Subscription
/subscriptions/discord/webhook/delete: /subscriptions/discord/webhook/delete:
delete: delete:
@ -454,8 +451,5 @@ paths:
responses: {} responses: {}
summary: Creates a new subscription to link a post from a Source to a DiscordWebHook. summary: Creates a new subscription to link a post from a Source to a DiscordWebHook.
tags: tags:
- Config
- Source
- Discord
- Subscription - Subscription
swagger: "2.0" swagger: "2.0"

View File

@ -20,4 +20,3 @@ type Sources interface {
ExtractTags(page *rod.Page) (string, error) ExtractTags(page *rod.Page) (string, error)
ExtractTitle(page *rod.Page) (string, error) ExtractTitle(page *rod.Page) (string, error)
} }

View File

@ -10,7 +10,7 @@ type CacheItem struct {
// Group defines what it should be a reference to. // Group defines what it should be a reference to.
// youtube, reddit, ect // youtube, reddit, ect
Group string Group string
Expires time.Time Expires time.Time
IsTainted bool IsTainted bool
} }

View File

@ -6,85 +6,85 @@ import (
// Articles represents the model for an Article // Articles represents the model for an Article
type Articles struct { type Articles struct {
ID uint `json:"ID"` ID uint `json:"ID"`
SourceID uint `json:"sourceId"` SourceID uint `json:"sourceId"`
Tags string `json:"tags"` Tags string `json:"tags"`
Title string `json:"title"` Title string `json:"title"`
Url string `json:"url"` Url string `json:"url"`
PubDate time.Time `json:"pubdate"` PubDate time.Time `json:"pubdate"`
Video string `json:"video"` Video string `json:"video"`
VideoHeight uint16 `json:"videoHeight"` VideoHeight uint16 `json:"videoHeight"`
VideoWidth uint16 `json:"videoWidth"` VideoWidth uint16 `json:"videoWidth"`
Thumbnail string `json:"thumbnail"` Thumbnail string `json:"thumbnail"`
Description string `json:"description"` Description string `json:"description"`
AuthorName string `json:"authorName"` AuthorName string `json:"authorName"`
AuthorImage string `json:"authorImage"` AuthorImage string `json:"authorImage"`
} }
type DiscordQueue struct { type DiscordQueue struct {
ID uint `json:"ID"` ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"` CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"` UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"` DeletedAt time.Time `json:"DeletedAt"`
ArticleId string `json:"articleId"` ArticleId string `json:"articleId"`
} }
type DiscordWebHooks struct { type DiscordWebHooks struct {
ID uint `json:"ID"` ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"` CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"` UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"` DeletedAt time.Time `json:"DeletedAt"`
Name string `json:"name"` Name string `json:"name"`
Key string `json:"key"` Key string `json:"key"`
Url string `json:"url"` Url string `json:"url"`
Server string `json:"server"` Server string `json:"server"`
Channel string `json:"channel"` Channel string `json:"channel"`
Enabled bool `json:"enabled"` Enabled bool `json:"enabled"`
} }
type Icons struct { type Icons struct {
ID uint `json:"ID"` ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"` CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"` UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"` DeletedAt time.Time `json:"DeletedAt"`
FileName string `json:"fileName"` FileName string `json:"fileName"`
Site string `json:"site"` Site string `json:"site"`
} }
type Settings struct { type Settings struct {
ID uint `json:"ID"` ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"` CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"` UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"` DeletedAt time.Time `json:"DeletedAt"`
Key string `json:"key"` Key string `json:"key"`
Value string `json:"value"` Value string `json:"value"`
Options string `json:"options"` Options string `json:"options"`
} }
type Sources struct { type Sources struct {
ID uint `json:"ID"` ID uint `json:"ID"`
Site string `json:"site"` Site string `json:"site"`
Name string `json:"name"` Name string `json:"name"`
Source string `json:"source"` Source string `json:"source"`
Type string `json:"type"` Type string `json:"type"`
Value string `json:"value"` Value string `json:"value"`
Enabled bool `json:"enabled"` Enabled bool `json:"enabled"`
Url string `json:"url"` Url string `json:"url"`
Tags string `json:"tags"` Tags string `json:"tags"`
} }
type SourceLinks struct { type SourceLinks struct {
ID uint `json:"ID"` ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"` CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"` UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"` DeletedAt time.Time `json:"DeletedAt"`
SourceID uint `json:"sourceId"` SourceID uint `json:"sourceId"`
SourceType string `json:"sourceType"` SourceType string `json:"sourceType"`
SourceName string `json:"sourceName"` SourceName string `json:"sourceName"`
DiscordID uint `json:"discordId"` DiscordID uint `json:"discordId"`
DiscordName string `json:"discordName"` DiscordName string `json:"discordName"`
} }

View File

@ -2,34 +2,34 @@ package model
// This is the root Json object. It does not contain data that we care about though. // This is the root Json object. It does not contain data that we care about though.
type RedditJsonContent struct { type RedditJsonContent struct {
Kind string `json:"kind"` Kind string `json:"kind"`
Data RedditJsonContentData `json:"data"` Data RedditJsonContentData `json:"data"`
} }
type RedditJsonContentData struct { type RedditJsonContentData struct {
After string `json:"after"` After string `json:"after"`
Dist int `json:"dist"` Dist int `json:"dist"`
Modhash string `json:"modhash"` Modhash string `json:"modhash"`
Children []RedditJsonContentChildren `json:"children"` Children []RedditJsonContentChildren `json:"children"`
} }
type RedditJsonContentChildren struct { type RedditJsonContentChildren struct {
Kind string `json:"kind"` Kind string `json:"kind"`
Data RedditPost `json:"data"` Data RedditPost `json:"data"`
} }
// RedditPost contains the information that was posted by a user. // RedditPost contains the information that was posted by a user.
type RedditPost struct { type RedditPost struct {
Subreddit string `json:"subreddit"` Subreddit string `json:"subreddit"`
Title string `json:"title"` Title string `json:"title"`
Content string `json:"selftext"` Content string `json:"selftext"`
ContentHtml string `json:"selftext_html"` ContentHtml string `json:"selftext_html"`
Author string `json:"author"` Author string `json:"author"`
Permalink string `json:"permalink"` Permalink string `json:"permalink"`
IsVideo bool `json:"is_video"` IsVideo bool `json:"is_video"`
Media RedditPostMedia `json:"media"` Media RedditPostMedia `json:"media"`
Url string `json:"url"` Url string `json:"url"`
UrlOverriddenByDest string `json:"url_overridden_by_dest"` UrlOverriddenByDest string `json:"url_overridden_by_dest"`
Thumbnail string `json:"thumbnail"` Thumbnail string `json:"thumbnail"`
} }
@ -41,14 +41,14 @@ type RedditPostMedia struct {
// RedditVideo contains information about the video in the post. // RedditVideo contains information about the video in the post.
type RedditPostMediaRedditVideo struct { type RedditPostMediaRedditVideo struct {
BitrateKbps int `json:"bitrate_kpbs"` BitrateKbps int `json:"bitrate_kpbs"`
FallBackUrl string `json:"fallback_url"` FallBackUrl string `json:"fallback_url"`
Height int `json:"height"` Height int `json:"height"`
Width int `json:"width"` Width int `json:"width"`
ScrubberMediaUrl string `json:"scrubber_media_url"` ScrubberMediaUrl string `json:"scrubber_media_url"`
DashUrl string `json:"dash_url"` DashUrl string `json:"dash_url"`
Duration int `json:"duration"` Duration int `json:"duration"`
HlsUrl string `json:"hls_url"` HlsUrl string `json:"hls_url"`
IsGif bool `json:"is_gif"` IsGif bool `json:"is_gif"`
TranscodingStatus string `json:"transcoding_status"` TranscodingStatus string `json:"transcoding_status"`
} }

View File

@ -35,7 +35,7 @@ func (s *Server) listArticles(w http.ResponseWriter, r *http.Request) {
// @Param id path string true "uuid" // @Param id path string true "uuid"
// @Produce application/json // @Produce application/json
// @Tags Articles // @Tags Articles
// @Router /articles/{id} [get] // @Router /articles/{ID} [get]
func (s *Server) getArticleById(w http.ResponseWriter, r *http.Request) { func (s *Server) getArticleById(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")

View File

@ -44,8 +44,7 @@ func (s *Server) GetDiscordWebHooks(w http.ResponseWriter, r *http.Request) {
func (s *Server) GetDiscordWebHooksById(w http.ResponseWriter, r *http.Request) { func (s *Server) GetDiscordWebHooksById(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
query := r.URL.Query() _id := chi.URLParam(r, "ID")
_id := query["id"][0]
if _id == "" { if _id == "" {
http.Error(w, "id is missing", http.StatusBadRequest) http.Error(w, "id is missing", http.StatusBadRequest)
return return
@ -135,7 +134,7 @@ func (s *Server) NewDiscordWebHook(w http.ResponseWriter, r *http.Request) {
http.Error(w, "invalid url", http.StatusBadRequest) http.Error(w, "invalid url", http.StatusBadRequest)
return return
} }
if _server == ""{ if _server == "" {
http.Error(w, "server is missing", http.StatusBadRequest) http.Error(w, "server is missing", http.StatusBadRequest)
} }
if _channel == "" { if _channel == "" {

View File

@ -1,2 +1 @@
package routes_test package routes_test

View File

@ -12,7 +12,7 @@ import (
// GetSubscriptions // GetSubscriptions
// @Summary Returns the top 100 entries from the queue to be processed. // @Summary Returns the top 100 entries from the queue to be processed.
// @Produce application/json // @Produce application/json
// @Tags Config, Subscription // @Tags Subscription
// @Router /subscriptions [get] // @Router /subscriptions [get]
func (s *Server) ListSubscriptions(w http.ResponseWriter, r *http.Request) { func (s *Server) ListSubscriptions(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
@ -36,7 +36,7 @@ func (s *Server) ListSubscriptions(w http.ResponseWriter, r *http.Request) {
// @Summary Returns the top 100 entries from the queue to be processed. // @Summary Returns the top 100 entries from the queue to be processed.
// @Produce application/json // @Produce application/json
// @Param id query string true "id" // @Param id query string true "id"
// @Tags Config, Subscription // @Tags Subscription
// @Router /subscriptions/byDiscordId [get] // @Router /subscriptions/byDiscordId [get]
func (s *Server) GetSubscriptionsByDiscordId(w http.ResponseWriter, r *http.Request) { func (s *Server) GetSubscriptionsByDiscordId(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
@ -73,7 +73,7 @@ func (s *Server) GetSubscriptionsByDiscordId(w http.ResponseWriter, r *http.Requ
// @Summary Returns the top 100 entries from the queue to be processed. // @Summary Returns the top 100 entries from the queue to be processed.
// @Produce application/json // @Produce application/json
// @Param id query string true "id" // @Param id query string true "id"
// @Tags Config, Subscription // @Tags Subscription
// @Router /subscriptions/bySourceId [get] // @Router /subscriptions/bySourceId [get]
func (s *Server) GetSubscriptionsBySourceId(w http.ResponseWriter, r *http.Request) { func (s *Server) GetSubscriptionsBySourceId(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
@ -110,7 +110,7 @@ func (s *Server) GetSubscriptionsBySourceId(w http.ResponseWriter, r *http.Reque
// @Summary Creates a new subscription to link a post from a Source to a DiscordWebHook. // @Summary Creates a new subscription to link a post from a Source to a DiscordWebHook.
// @Param discordWebHookId query string true "discordWebHookId" // @Param discordWebHookId query string true "discordWebHookId"
// @Param sourceId query string true "sourceId" // @Param sourceId query string true "sourceId"
// @Tags Config, Source, Discord, Subscription // @Tags Subscription
// @Router /subscriptions/new/discordwebhook [post] // @Router /subscriptions/new/discordwebhook [post]
func (s *Server) newDiscordWebHookSubscription(w http.ResponseWriter, r *http.Request) { func (s *Server) newDiscordWebHookSubscription(w http.ResponseWriter, r *http.Request) {
// Extract the values given // Extract the values given

View File

@ -6,24 +6,24 @@ import (
"github.com/jtom38/newsbot/collector/domain/model" "github.com/jtom38/newsbot/collector/domain/model"
) )
type CacheClient struct{ type CacheClient struct {
group string group string
DefaultTimer time.Duration DefaultTimer time.Duration
} }
func NewCacheClient(group string) CacheClient { func NewCacheClient(group string) CacheClient {
return CacheClient{ return CacheClient{
group: group, group: group,
DefaultTimer: time.Hour, DefaultTimer: time.Hour,
} }
} }
func (cc *CacheClient) Insert(key string, value string) { func (cc *CacheClient) Insert(key string, value string) {
item := model.CacheItem{ item := model.CacheItem{
Key: key, Key: key,
Value: value, Value: value,
Group: cc.group, Group: cc.group,
Expires: time.Now().Add(1 * time.Hour), Expires: time.Now().Add(1 * time.Hour),
IsTainted: false, IsTainted: false,
} }
cacheStorage = append(cacheStorage, &item) cacheStorage = append(cacheStorage, &item)
@ -31,8 +31,12 @@ func (cc *CacheClient) Insert(key string, value string) {
func (cc *CacheClient) FindByKey(key string) (*model.CacheItem, error) { func (cc *CacheClient) FindByKey(key string) (*model.CacheItem, error) {
for _, item := range cacheStorage { for _, item := range cacheStorage {
if item.Group != cc.group { continue } if item.Group != cc.group {
if item.Key != key { continue } continue
}
if item.Key != key {
continue
}
// if it was tainted, renew the timer and remove the taint as this record was still needed // if it was tainted, renew the timer and remove the taint as this record was still needed
if item.IsTainted { if item.IsTainted {
@ -47,8 +51,12 @@ func (cc *CacheClient) FindByKey(key string) (*model.CacheItem, error) {
func (cc *CacheClient) FindByValue(value string) (*model.CacheItem, error) { func (cc *CacheClient) FindByValue(value string) (*model.CacheItem, error) {
for _, item := range cacheStorage { for _, item := range cacheStorage {
if item.Group != cc.group { continue } if item.Group != cc.group {
if item.Value != value { continue } continue
}
if item.Value != value {
continue
}
// if it was tainted, renew the timer and remove the taint as this record was still needed // if it was tainted, renew the timer and remove the taint as this record was still needed
if item.IsTainted { if item.IsTainted {
@ -59,4 +67,3 @@ func (cc *CacheClient) FindByValue(value string) (*model.CacheItem, error) {
} }
return &model.CacheItem{}, ErrCacheRecordMissing return &model.CacheItem{}, ErrCacheRecordMissing
} }

View File

@ -18,14 +18,18 @@ func TestInsert(t *testing.T) {
func TestFindGroupMissing(t *testing.T) { func TestFindGroupMissing(t *testing.T) {
cache := cache.NewCacheClient("faker") cache := cache.NewCacheClient("faker")
_, err := cache.FindByKey("UnitTesting") _, err := cache.FindByKey("UnitTesting")
if err == nil { panic("Nothing was appended with the requested group.") } if err == nil {
panic("Nothing was appended with the requested group.")
}
} }
func TestFindGroupExists(t *testing.T) { func TestFindGroupExists(t *testing.T) {
cache := cache.NewCacheClient("Testing") cache := cache.NewCacheClient("Testing")
cache.Insert("UnitTesting", "Something") cache.Insert("UnitTesting", "Something")
_, err := cache.FindByKey("UnitTesting") _, err := cache.FindByKey("UnitTesting")
if err != nil { panic("") } if err != nil {
panic("")
}
} }
func TestCacheStorage(t *testing.T) { func TestCacheStorage(t *testing.T) {
@ -35,6 +39,7 @@ func TestCacheStorage(t *testing.T) {
cache := cache.NewCacheClient("Testing") cache := cache.NewCacheClient("Testing")
_, err := cache.FindByKey("UnitTesting02") _, err := cache.FindByKey("UnitTesting02")
if err != nil { panic("expected to find the value")} if err != nil {
panic("expected to find the value")
}
} }

View File

@ -9,7 +9,7 @@ import (
// When a record becomes tainted, it needs to be renewed or it will be dropped from the cache. // When a record becomes tainted, it needs to be renewed or it will be dropped from the cache.
// If a record is tainted and used again, the taint will be removed and a new Expires value will be set. // If a record is tainted and used again, the taint will be removed and a new Expires value will be set.
// If its not renewed, it will be dropped. // If its not renewed, it will be dropped.
type CacheAgeMonitor struct {} type CacheAgeMonitor struct{}
func NewCacheAgeMonitor() CacheAgeMonitor { func NewCacheAgeMonitor() CacheAgeMonitor {
return CacheAgeMonitor{} return CacheAgeMonitor{}
@ -39,7 +39,9 @@ func (cam CacheAgeMonitor) CheckExpiredEntries() {
func (cam CacheAgeMonitor) removeEntry(index int) []*model.CacheItem { func (cam CacheAgeMonitor) removeEntry(index int) []*model.CacheItem {
var temp []*model.CacheItem var temp []*model.CacheItem
for i, item := range cacheStorage { for i, item := range cacheStorage {
if i != index { temp = append(temp, item )} if i != index {
temp = append(temp, item)
}
} }
return temp return temp
} }

View File

@ -49,6 +49,17 @@ func (cc *ConfigClient) GetConfig(key string) string {
return res return res
} }
// Looks for a value in the env and will panic if it does not exist.
func (c ConfigClient) MustGetString(key string) string {
res, filled := os.LookupEnv(key)
if !filled {
msg := fmt.Sprintf("No value was found for '%v'", key)
panic(msg)
}
return res
}
func (cc *ConfigClient) GetFeature(flag string) (bool, error) { func (cc *ConfigClient) GetFeature(flag string) (bool, error) {
cc.RefreshEnv() cc.RefreshEnv()

View File

@ -1,8 +1,8 @@
package config_test package config_test
import ( import (
"testing"
"os" "os"
"testing"
"github.com/jtom38/newsbot/collector/services/config" "github.com/jtom38/newsbot/collector/services/config"
) )
@ -15,6 +15,8 @@ func TestGetConfigExpectNull(t *testing.T) {
cc := config.New() cc := config.New()
os.Setenv(config.REDDIT_PULL_HOT, "") os.Setenv(config.REDDIT_PULL_HOT, "")
res := cc.GetConfig(config.REDDIT_PULL_HOT) res := cc.GetConfig(config.REDDIT_PULL_HOT)
if res != "" { panic("expected blank")} if res != "" {
panic("expected blank")
}
} }

View File

@ -9,18 +9,20 @@ import (
) )
var FFXIVRecord database.Source = database.Source{ var FFXIVRecord database.Source = database.Source{
ID: uuid.New(), ID: uuid.New(),
Site: "ffxiv", Site: "ffxiv",
Name: "Final Fantasy XIV - NA", Name: "Final Fantasy XIV - NA",
Source: "ffxiv", Source: "ffxiv",
Url: "https://na.finalfantasyxiv.com/lodestone/", Url: "https://na.finalfantasyxiv.com/lodestone/",
Tags: "ffxiv, final, fantasy, xiv, na, lodestone", Tags: "ffxiv, final, fantasy, xiv, na, lodestone",
} }
func TestFfxivGetParser(t *testing.T) { func TestFfxivGetParser(t *testing.T) {
fc := ffxiv.NewFFXIVClient(FFXIVRecord) fc := ffxiv.NewFFXIVClient(FFXIVRecord)
_, err := fc.GetParser() _, err := fc.GetParser()
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
} }
func TestFfxivPullFeed(t *testing.T) { func TestFfxivPullFeed(t *testing.T) {
@ -30,8 +32,12 @@ func TestFfxivPullFeed(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
if len(links) == 0 { panic("expected links to come back but got 0") } t.Error(err)
}
if len(links) == 0 {
t.Error("expected links to come back but got 0")
}
} }
@ -42,14 +48,20 @@ func TestFfxivExtractThumbnail(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0]) page := fc.GetPage(parser, links[0])
defer page.Close() defer page.Close()
thumb, err := fc.ExtractThumbnail(page) thumb, err := fc.ExtractThumbnail(page)
if err != nil { panic(err) } if err != nil {
if thumb == "" { panic("expected a link but got nothing.")} t.Error(err)
}
if thumb == "" {
t.Error("expected a link but got nothing.")
}
} }
func TestFfxivExtractPubDate(t *testing.T) { func TestFfxivExtractPubDate(t *testing.T) {
@ -59,13 +71,17 @@ func TestFfxivExtractPubDate(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0]) page := fc.GetPage(parser, links[0])
defer page.Close() defer page.Close()
_, err = fc.ExtractPubDate(page) _, err = fc.ExtractPubDate(page)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
} }
func TestFfxivExtractDescription(t *testing.T) { func TestFfxivExtractDescription(t *testing.T) {
@ -75,13 +91,17 @@ func TestFfxivExtractDescription(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0]) page := fc.GetPage(parser, links[0])
defer page.Close() defer page.Close()
_, err = fc.ExtractDescription(page) _, err = fc.ExtractDescription(page)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
} }
func TestFfxivExtractAuthor(t *testing.T) { func TestFfxivExtractAuthor(t *testing.T) {
@ -91,14 +111,20 @@ func TestFfxivExtractAuthor(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0]) page := fc.GetPage(parser, links[0])
defer page.Close() defer page.Close()
author, err := fc.ExtractAuthor(page) author, err := fc.ExtractAuthor(page)
if err != nil { panic(err) } if err != nil {
if author == "" { panic("failed to locate the author name") } t.Error(err)
}
if author == "" {
t.Error("failed to locate the author name")
}
} }
func TestFfxivExtractTags(t *testing.T) { func TestFfxivExtractTags(t *testing.T) {
@ -108,14 +134,20 @@ func TestFfxivExtractTags(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0]) page := fc.GetPage(parser, links[0])
defer page.Close() defer page.Close()
res, err := fc.ExtractTags(page) res, err := fc.ExtractTags(page)
if err != nil { panic(err) } if err != nil {
if res == "" {panic("failed to locate the tags")} t.Error(err)
}
if res == "" {
t.Error("failed to locate the tags")
}
} }
func TestFfxivExtractTitle(t *testing.T) { func TestFfxivExtractTitle(t *testing.T) {
@ -125,14 +157,20 @@ func TestFfxivExtractTitle(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0]) page := fc.GetPage(parser, links[0])
defer page.Close() defer page.Close()
res, err := fc.ExtractTitle(page) res, err := fc.ExtractTitle(page)
if err != nil { panic(err) } if err != nil {
if res == "" { panic("failed to locate the tags") } t.Error(err)
}
if res == "" {
t.Error("failed to locate the tags")
}
} }
func TestFFxivExtractAuthorIamge(t *testing.T) { func TestFFxivExtractAuthorIamge(t *testing.T) {
@ -142,14 +180,20 @@ func TestFFxivExtractAuthorIamge(t *testing.T) {
defer parser.Close() defer parser.Close()
links, err := fc.PullFeed(parser) links, err := fc.PullFeed(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0]) page := fc.GetPage(parser, links[0])
defer page.Close() defer page.Close()
res, err := fc.ExtractAuthorImage(page) res, err := fc.ExtractAuthorImage(page)
if err != nil { panic(err) } if err != nil {
if res == "" { panic("failed to locate the tags") } t.Error(err)
}
if res == "" {
t.Error("failed to locate the tags")
}
} }
func TestFfxivCheckSource(t *testing.T) { func TestFfxivCheckSource(t *testing.T) {

View File

@ -21,18 +21,24 @@ func getHttpContent(uri string) ([]byte, error) {
} }
req, err := http.NewRequest("GET", uri, nil) req, err := http.NewRequest("GET", uri, nil)
if err != nil { return nil, err } if err != nil {
return nil, err
}
// set the user agent header to avoid kick backs.. as much // set the user agent header to avoid kick backs.. as much
req.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.10; rv:75.0) Gecko/20100101 Firefox/75.0") req.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.10; rv:75.0) Gecko/20100101 Firefox/75.0")
log.Printf("Requesting content from %v\n", uri) log.Printf("Requesting content from %v\n", uri)
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { log.Fatalln(err) } if err != nil {
log.Fatalln(err)
}
defer resp.Body.Close() defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body) body, err := ioutil.ReadAll(resp.Body)
if err != nil { return nil, err } if err != nil {
return nil, err
}
return body, nil return body, nil
} }

View File

@ -47,7 +47,6 @@ func NewRedditClient(Record database.Source) *RedditClient {
// os.Setenv("GODEBUG", "http2client=0") // os.Setenv("GODEBUG", "http2client=0")
//} //}
func (rc *RedditClient) GetBrowser() *rod.Browser { func (rc *RedditClient) GetBrowser() *rod.Browser {
var browser *rod.Browser var browser *rod.Browser
if path, exists := launcher.LookPath(); exists { if path, exists := launcher.LookPath(); exists {
@ -74,8 +73,6 @@ func (rc *RedditClient) GetContent() (model.RedditJsonContent, error) {
log.Printf("[Reddit] Collecting results on '%v'", rc.record.Name) log.Printf("[Reddit] Collecting results on '%v'", rc.record.Name)
content, err := getHttpContent(Url) content, err := getHttpContent(Url)
if err != nil { if err != nil {
return items, err return items, err

View File

@ -9,12 +9,12 @@ import (
) )
var RedditRecord database.Source = database.Source{ var RedditRecord database.Source = database.Source{
ID: uuid.New(), ID: uuid.New(),
Name: "dadjokes", Name: "dadjokes",
Source: "reddit", Source: "reddit",
Site: "reddit", Site: "reddit",
Url: "https://reddit.com/r/dadjokes", Url: "https://reddit.com/r/dadjokes",
Tags: "reddit, dadjokes", Tags: "reddit, dadjokes",
} }
func TestGetContent(t *testing.T) { func TestGetContent(t *testing.T) {

View File

@ -22,7 +22,7 @@ func NewRssClient(sourceRecord model.Sources) rssClient {
} }
//func (rc rssClient) ReplaceSourceRecord(source model.Sources) { //func (rc rssClient) ReplaceSourceRecord(source model.Sources) {
//rc.SourceRecord = source //rc.SourceRecord = source
//} //}
func (rc rssClient) getCacheGroup() string { func (rc rssClient) getCacheGroup() string {
@ -31,7 +31,9 @@ func (rc rssClient) getCacheGroup() string {
func (rc rssClient) GetContent() error { func (rc rssClient) GetContent() error {
feed, err := rc.PullFeed() feed, err := rc.PullFeed()
if err != nil { return err } if err != nil {
return err
}
cacheClient := cache.NewCacheClient(rc.getCacheGroup()) cacheClient := cache.NewCacheClient(rc.getCacheGroup())
@ -49,7 +51,9 @@ func (rc rssClient) PullFeed() (*gofeed.Feed, error) {
feedUri := fmt.Sprintf("%v", rc.SourceRecord.Url) feedUri := fmt.Sprintf("%v", rc.SourceRecord.Url)
fp := gofeed.NewParser() fp := gofeed.NewParser()
feed, err := fp.ParseURL(feedUri) feed, err := fp.ParseURL(feedUri)
if err != nil { return nil, err } if err != nil {
return nil, err
}
return feed, nil return feed, nil
} }

View File

@ -7,10 +7,10 @@ import (
"github.com/jtom38/newsbot/collector/services/input" "github.com/jtom38/newsbot/collector/services/input"
) )
var rssRecord = model.Sources { var rssRecord = model.Sources{
ID: 1, ID: 1,
Name: "ArsTechnica", Name: "ArsTechnica",
Url: "https://feeds.arstechnica.com/arstechnica/index", Url: "https://feeds.arstechnica.com/arstechnica/index",
} }
func TestRssClientConstructor(t *testing.T) { func TestRssClientConstructor(t *testing.T) {
@ -20,7 +20,11 @@ func TestRssClientConstructor(t *testing.T) {
func TestRssGetFeed(t *testing.T) { func TestRssGetFeed(t *testing.T) {
client := input.NewRssClient(rssRecord) client := input.NewRssClient(rssRecord)
feed, err := client.PullFeed() feed, err := client.PullFeed()
if err != nil { t.Error(err) } if err != nil {
if len(feed.Items) >= 0 { t.Error("failed to collect items from the fees")} t.Error(err)
}
if len(feed.Items) >= 0 {
t.Error("failed to collect items from the fees")
}
} }

View File

@ -104,31 +104,47 @@ func (tc *TwitchClient) GetContent() ([]database.Article, error) {
var article database.Article var article database.Article
AuthorName, err := tc.ExtractAuthor(video) AuthorName, err := tc.ExtractAuthor(video)
if err != nil { return items, err } if err != nil {
return items, err
}
article.Authorname = sql.NullString{String: AuthorName} article.Authorname = sql.NullString{String: AuthorName}
Authorimage, err := tc.ExtractAuthorImage(user) Authorimage, err := tc.ExtractAuthorImage(user)
if err != nil { return items, err } if err != nil {
return items, err
}
article.Authorimage = sql.NullString{String: Authorimage} article.Authorimage = sql.NullString{String: Authorimage}
article.Description, err = tc.ExtractDescription(video) article.Description, err = tc.ExtractDescription(video)
if err != nil {return items, err } if err != nil {
return items, err
}
article.Pubdate, err = tc.ExtractPubDate(video) article.Pubdate, err = tc.ExtractPubDate(video)
if err != nil { return items, err } if err != nil {
return items, err
}
article.Sourceid = tc.SourceRecord.ID article.Sourceid = tc.SourceRecord.ID
article.Tags, err = tc.ExtractTags(video, user) article.Tags, err = tc.ExtractTags(video, user)
if err != nil { return items, err } if err != nil {
return items, err
}
article.Thumbnail, err = tc.ExtractThumbnail(video) article.Thumbnail, err = tc.ExtractThumbnail(video)
if err != nil { return items, err } if err != nil {
return items, err
}
article.Title, err = tc.ExtractTitle(video) article.Title, err = tc.ExtractTitle(video)
if err != nil { return items, err } if err != nil {
return items, err
}
article.Url, err = tc.ExtractUrl(video) article.Url, err = tc.ExtractUrl(video)
if err != nil { return items, err } if err != nil {
return items, err
}
items = append(items, article) items = append(items, article)
} }
@ -210,8 +226,12 @@ func (tc *TwitchClient) ExtractDescription(post helix.Video) (string, error) {
// Extracts the avatar of the author with some validation. // Extracts the avatar of the author with some validation.
func (tc *TwitchClient) ExtractAuthorImage(user helix.User) (string, error) { func (tc *TwitchClient) ExtractAuthorImage(user helix.User) (string, error) {
if user.ProfileImageURL == "" { return "", ErrMissingAuthorImage } if user.ProfileImageURL == "" {
if !strings.Contains(user.ProfileImageURL, "-profile_image-") { return "", ErrInvalidAuthorImage } return "", ErrMissingAuthorImage
}
if !strings.Contains(user.ProfileImageURL, "-profile_image-") {
return "", ErrInvalidAuthorImage
}
return user.ProfileImageURL, nil return user.ProfileImageURL, nil
} }
@ -231,6 +251,8 @@ func (tc *TwitchClient) ExtractTitle(post helix.Video) (string, error) {
} }
func (tc *TwitchClient) ExtractUrl(post helix.Video) (string, error) { func (tc *TwitchClient) ExtractUrl(post helix.Video) (string, error) {
if post.URL == "" { return "", ErrMissingUrl } if post.URL == "" {
return "", ErrMissingUrl
}
return post.URL, nil return post.URL, nil
} }

View File

@ -9,13 +9,13 @@ import (
"github.com/jtom38/newsbot/collector/services/input" "github.com/jtom38/newsbot/collector/services/input"
) )
var TwitchSourceRecord = database.Source { var TwitchSourceRecord = database.Source{
ID: uuid.New(), ID: uuid.New(),
Name: "nintendo", Name: "nintendo",
Source: "Twitch", Source: "Twitch",
} }
var TwitchInvalidRecord = database.Source { var TwitchInvalidRecord = database.Source{
ID: uuid.New(), ID: uuid.New(),
Name: "EvilNintendo", Name: "EvilNintendo",
Source: "Twitch", Source: "Twitch",
@ -115,40 +115,62 @@ func TestTwitchReturnsVideoAuthor(t *testing.T) {
func TestTwitchReturnsThumbnail(t *testing.T) { func TestTwitchReturnsThumbnail(t *testing.T) {
tc, err := input.NewTwitchClient() tc, err := input.NewTwitchClient()
if err != nil {t.Error(err) } if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord) tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login() err = tc.Login()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails() user, err := tc.GetUserDetails()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
posts, err := tc.GetPosts(user) posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
value, err := tc.ExtractThumbnail(posts[0]) value, err := tc.ExtractThumbnail(posts[0])
if err != nil { t.Error(err) } if err != nil {
if value == "" { t.Error("uable to parse username") } t.Error(err)
}
if value == "" {
t.Error("uable to parse username")
}
} }
func TestTwitchReturnsPubDate(t *testing.T) { func TestTwitchReturnsPubDate(t *testing.T) {
tc, err := input.NewTwitchClient() tc, err := input.NewTwitchClient()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord) tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login() err = tc.Login()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails() user, err := tc.GetUserDetails()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
posts, err := tc.GetPosts(user) posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
date, err := tc.ExtractPubDate(posts[0]) date, err := tc.ExtractPubDate(posts[0])
log.Println(date) log.Println(date)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
} }
func TestTwitchReturnsDescription(t *testing.T) { func TestTwitchReturnsDescription(t *testing.T) {
@ -181,17 +203,25 @@ func TestTwitchReturnsDescription(t *testing.T) {
func TestTwitchReturnsAuthorImage(t *testing.T) { func TestTwitchReturnsAuthorImage(t *testing.T) {
tc, err := input.NewTwitchClient() tc, err := input.NewTwitchClient()
if err != nil {t.Error(err) } if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord) tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login() err = tc.Login()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails() user, err := tc.GetUserDetails()
if err != nil {t.Error(err) } if err != nil {
t.Error(err)
}
_, err = tc.ExtractAuthorImage(user) _, err = tc.ExtractAuthorImage(user)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
} }
func TestTwitchReturnsTags(t *testing.T) { func TestTwitchReturnsTags(t *testing.T) {
@ -212,10 +242,14 @@ func TestTwitchReturnsTags(t *testing.T) {
} }
posts, err := tc.GetPosts(user) posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
_, err = tc.ExtractTags(posts[0], user) _, err = tc.ExtractTags(posts[0], user)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
} }
func TestTwitchReturnsTitle(t *testing.T) { func TestTwitchReturnsTitle(t *testing.T) {
@ -236,42 +270,70 @@ func TestTwitchReturnsTitle(t *testing.T) {
} }
posts, err := tc.GetPosts(user) posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
res, err := tc.ExtractTitle(posts[0]) res, err := tc.ExtractTitle(posts[0])
if err != nil { t.Error(err) } if err != nil {
if res == "" { t.Error("expected a filled string but got nil")} t.Error(err)
}
if res == "" {
t.Error("expected a filled string but got nil")
}
} }
func TestTwitchReturnsUrl(t *testing.T) { func TestTwitchReturnsUrl(t *testing.T) {
tc, err := input.NewTwitchClient() tc, err := input.NewTwitchClient()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord) tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login() err = tc.Login()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails() user, err := tc.GetUserDetails()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
posts, err := tc.GetPosts(user) posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
res, err := tc.ExtractUrl(posts[0]) res, err := tc.ExtractUrl(posts[0])
if err != nil { t.Error(err) } if err != nil {
if res == "" { t.Error("expected a filled string but got nil")} t.Error(err)
}
if res == "" {
t.Error("expected a filled string but got nil")
}
} }
func TestTwitchGetContent(t *testing.T) { func TestTwitchGetContent(t *testing.T) {
tc, err := input.NewTwitchClient() tc, err := input.NewTwitchClient()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord) tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login() err = tc.Login()
if err != nil { t.Error(err) } if err != nil {
t.Error(err)
}
posts, err := tc.GetContent() posts, err := tc.GetContent()
if err != nil {t.Error(err) } if err != nil {
if len(posts) == 0 { t.Error("posts came back with 0 posts") } t.Error(err)
if len(posts) != 20 { t.Error("expected 20 posts") } }
if len(posts) == 0 {
t.Error("posts came back with 0 posts")
}
if len(posts) != 20 {
t.Error("expected 20 posts")
}
} }

View File

@ -44,13 +44,6 @@ func NewYoutubeClient(Record database.Source) YoutubeClient {
record: Record, record: Record,
cacheGroup: "youtube", cacheGroup: "youtube",
} }
/*
cc := NewConfigClient()
debug, err := strconv.ParseBool(cc.GetConfig(YOUTUBE_DEBUG))
if err != nil { panic("'YOUTUBE_DEBUG' was not a bool value")}
yc.Config.Debug = debug
*/
return yc return yc
} }
@ -109,14 +102,18 @@ func (yc *YoutubeClient) GetContent() ([]database.Article, error) {
return items, nil return items, nil
} }
func (yc *YoutubeClient) GetBrowser() *rod.Browser { func (yc *YoutubeClient) GetBrowser() (*rod.Browser, error) {
//browser := rod.New().MustConnect() //browser := rod.New().MustConnect()
var browser *rod.Browser var browser *rod.Browser
if path, exists := launcher.LookPath(); exists { if path, exists := launcher.LookPath(); exists {
u := launcher.New().Bin(path).MustLaunch() u, err := launcher.New().Bin(path).Launch()
if err != nil {
return browser, err
}
browser = rod.New().ControlURL(u).MustConnect() browser = rod.New().ControlURL(u).MustConnect()
} }
return browser return browser, nil
} }
func (yc *YoutubeClient) GetPage(parser *rod.Browser, url string) *rod.Page { func (yc *YoutubeClient) GetPage(parser *rod.Browser, url string) *rod.Page {
@ -163,7 +160,11 @@ func (yc *YoutubeClient) GetAvatarUri() (string, error) {
var AvatarUri string var AvatarUri string
//browser := rod.New().MustConnect() //browser := rod.New().MustConnect()
browser := yc.GetBrowser() browser, err := yc.GetBrowser()
if err != nil {
return "", err
}
page := browser.MustPage(yc.record.Url) page := browser.MustPage(yc.record.Url)
res := page.MustElement("#channel-header-container > yt-img-shadow:nth-child(1) > img:nth-child(1)").MustAttribute("src") res := page.MustElement("#channel-header-container > yt-img-shadow:nth-child(1) > img:nth-child(1)").MustAttribute("src")

View File

@ -9,45 +9,61 @@ import (
) )
var YouTubeRecord database.Source = database.Source{ var YouTubeRecord database.Source = database.Source{
ID: uuid.New(), ID: uuid.New(),
Name: "dadjokes", Name: "dadjokes",
Source: "reddit", Source: "reddit",
Site: "reddit", Site: "reddit",
Url: "https://youtube.com/gamegrumps", Url: "https://youtube.com/gamegrumps",
} }
func TestGetPageParser(t *testing.T) { func TestGetPageParser(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord) yc := input.NewYoutubeClient(YouTubeRecord)
_, err := yc.GetParser(YouTubeRecord.Url) _, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
} }
func TestGetChannelId(t *testing.T) { func TestGetChannelId(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord) yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser(YouTubeRecord.Url) parser, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
_, err = yc.GetChannelId(parser) _, err = yc.GetChannelId(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
} }
func TestPullFeed(t *testing.T) { func TestPullFeed(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord) yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser(YouTubeRecord.Url) parser, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
_, err = yc.GetChannelId(parser) _, err = yc.GetChannelId(parser)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
_, err = yc.PullFeed() _, err = yc.PullFeed()
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
} }
func TestGetAvatarUri(t *testing.T) { func TestGetAvatarUri(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord) yc := input.NewYoutubeClient(YouTubeRecord)
res, err := yc.GetAvatarUri() res, err := yc.GetAvatarUri()
if err != nil { panic(err) } if err != nil {
if res == "" { panic(input.ErrMissingAuthorImage)} t.Error(err)
}
if res == "" {
t.Error(input.ErrMissingAuthorImage)
}
} }
func TestGetVideoTags(t *testing.T) { func TestGetVideoTags(t *testing.T) {
@ -56,39 +72,59 @@ func TestGetVideoTags(t *testing.T) {
var videoUri = "https://www.youtube.com/watch?v=k_sQEXOBe68" var videoUri = "https://www.youtube.com/watch?v=k_sQEXOBe68"
parser, err := yc.GetParser(videoUri) parser, err := yc.GetParser(videoUri)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
tags, err := yc.GetTags(parser) tags, err := yc.GetTags(parser)
if err == nil && tags == "" { panic("err was empty but value was missing.")} if err == nil && tags == "" {
if err != nil { panic(err) } t.Error("err was empty but value was missing.")
}
if err != nil {
t.Error(err)
}
} }
func TestGetChannelTags(t *testing.T) { func TestGetChannelTags(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord) yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser(YouTubeRecord.Url) parser, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
tags, err := yc.GetTags(parser) tags, err := yc.GetTags(parser)
if err == nil && tags == "" { panic("no err but expected value was missing.")} if err == nil && tags == "" {
if err != nil { panic(err) } t.Error("no err but expected value was missing.")
}
if err != nil {
t.Error(err)
}
} }
func TestGetVideoThumbnail(t *testing.T) { func TestGetVideoThumbnail(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord) yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser("https://www.youtube.com/watch?v=k_sQEXOBe68") parser, err := yc.GetParser("https://www.youtube.com/watch?v=k_sQEXOBe68")
if err != nil {panic(err) } if err != nil {
t.Error(err)
}
thumb, err := yc.GetVideoThumbnail(parser) thumb, err := yc.GetVideoThumbnail(parser)
if err == nil && thumb == "" { panic("no err but expected result was missing")} if err == nil && thumb == "" {
if err != nil { panic(err) } t.Error("no err but expected result was missing")
}
if err != nil {
t.Error(err)
}
} }
func TestCheckSource(t *testing.T) { func TestCheckSource(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord) yc := input.NewYoutubeClient(YouTubeRecord)
_, err := yc.GetContent() _, err := yc.GetContent()
if err != nil { panic(err) } if err != nil {
t.Error(err)
}
} }
func TestCheckUriCache(t *testing.T) { func TestCheckUriCache(t *testing.T) {
@ -97,7 +133,9 @@ func TestCheckUriCache(t *testing.T) {
input.YoutubeUriCache = append(input.YoutubeUriCache, &item) input.YoutubeUriCache = append(input.YoutubeUriCache, &item)
res := yc.CheckUriCache(&item) res := yc.CheckUriCache(&item)
if res == false { panic("expected a value to come back")} if res == false {
t.Error("expected a value to come back")
}
} }
func TestCheckUriCacheFails(t *testing.T) { func TestCheckUriCacheFails(t *testing.T) {
@ -105,6 +143,8 @@ func TestCheckUriCacheFails(t *testing.T) {
item := "demo1" item := "demo1"
res := yc.CheckUriCache(&item) res := yc.CheckUriCache(&item)
if res == true { panic("expected no value to come back")} if res == true {
t.Error("expected no value to come back")
}
} }

View File

@ -14,14 +14,14 @@ import (
var ( var (
article database.Article = database.Article{ article database.Article = database.Article{
ID: uuid.New(), ID: uuid.New(),
Sourceid: uuid.New(), Sourceid: uuid.New(),
Tags: "unit, testing", Tags: "unit, testing",
Title: "Demo", Title: "Demo",
Url: "https://github.com/jtom38/newsbot.collector.api", Url: "https://github.com/jtom38/newsbot.collector.api",
//Pubdate: time.Now(), //Pubdate: time.Now(),
Videoheight: 0, Videoheight: 0,
Videowidth: 0, Videowidth: 0,
Description: "Hello World", Description: "Hello World",
} }
blank string = "" blank string = ""
@ -131,7 +131,7 @@ func TestDiscordMessagePost(t *testing.T) {
t.Error(err) t.Error(err)
} }
err = d.SendPayload(msg, endpoints[0]) err = d.SendPayload(msg, endpoints[0])
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }