Features/subscription/features for portal (#34)

* added a route to delete subscriptions based on the ID given

* added a new route to find a record based on the name and source

* added a route to query Discord Web Hooks by Server and Channel names

* tested the endpoints and they seem good to test more

* updated some routes for subscriptions and formatted files

* removed debug file

* fixing some panic calls

* swag
This commit is contained in:
James Tombleson 2022-12-04 08:49:17 -08:00 committed by GitHub
parent c161658487
commit ff4075383a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 478 additions and 291 deletions

1
.gitignore vendored
View File

@ -1,5 +1,6 @@
.env
dev.session.sql
__debug_bin
.vscode

View File

@ -70,7 +70,7 @@ const docTemplate = `{
"responses": {}
}
},
"/articles/{id}": {
"/articles/{ID}": {
"get": {
"produces": [
"application/json"
@ -569,7 +569,6 @@ const docTemplate = `{
"application/json"
],
"tags": [
"Config",
"Subscription"
],
"summary": "Returns the top 100 entries from the queue to be processed.",
@ -582,7 +581,6 @@ const docTemplate = `{
"application/json"
],
"tags": [
"Config",
"Subscription"
],
"summary": "Returns the top 100 entries from the queue to be processed.",
@ -604,7 +602,6 @@ const docTemplate = `{
"application/json"
],
"tags": [
"Config",
"Subscription"
],
"summary": "Returns the top 100 entries from the queue to be processed.",
@ -644,9 +641,6 @@ const docTemplate = `{
"/subscriptions/new/discordwebhook": {
"post": {
"tags": [
"Config",
"Source",
"Discord",
"Subscription"
],
"summary": "Creates a new subscription to link a post from a Source to a DiscordWebHook.",

View File

@ -61,7 +61,7 @@
"responses": {}
}
},
"/articles/{id}": {
"/articles/{ID}": {
"get": {
"produces": [
"application/json"
@ -560,7 +560,6 @@
"application/json"
],
"tags": [
"Config",
"Subscription"
],
"summary": "Returns the top 100 entries from the queue to be processed.",
@ -573,7 +572,6 @@
"application/json"
],
"tags": [
"Config",
"Subscription"
],
"summary": "Returns the top 100 entries from the queue to be processed.",
@ -595,7 +593,6 @@
"application/json"
],
"tags": [
"Config",
"Subscription"
],
"summary": "Returns the top 100 entries from the queue to be processed.",
@ -635,9 +632,6 @@
"/subscriptions/new/discordwebhook": {
"post": {
"tags": [
"Config",
"Source",
"Discord",
"Subscription"
],
"summary": "Creates a new subscription to link a post from a Source to a DiscordWebHook.",

View File

@ -12,7 +12,7 @@ paths:
summary: Lists the top 50 records
tags:
- Articles
/articles/{id}:
/articles/{ID}:
get:
parameters:
- description: uuid
@ -391,7 +391,6 @@ paths:
responses: {}
summary: Returns the top 100 entries from the queue to be processed.
tags:
- Config
- Subscription
/subscriptions/byDiscordId:
get:
@ -406,7 +405,6 @@ paths:
responses: {}
summary: Returns the top 100 entries from the queue to be processed.
tags:
- Config
- Subscription
/subscriptions/bySourceId:
get:
@ -421,7 +419,6 @@ paths:
responses: {}
summary: Returns the top 100 entries from the queue to be processed.
tags:
- Config
- Subscription
/subscriptions/discord/webhook/delete:
delete:
@ -454,8 +451,5 @@ paths:
responses: {}
summary: Creates a new subscription to link a post from a Source to a DiscordWebHook.
tags:
- Config
- Source
- Discord
- Subscription
swagger: "2.0"

View File

@ -20,4 +20,3 @@ type Sources interface {
ExtractTags(page *rod.Page) (string, error)
ExtractTitle(page *rod.Page) (string, error)
}

View File

@ -10,7 +10,7 @@ type CacheItem struct {
// Group defines what it should be a reference to.
// youtube, reddit, ect
Group string
Expires time.Time
Group string
Expires time.Time
IsTainted bool
}
}

View File

@ -6,85 +6,85 @@ import (
// Articles represents the model for an Article
type Articles struct {
ID uint `json:"ID"`
SourceID uint `json:"sourceId"`
Tags string `json:"tags"`
Title string `json:"title"`
Url string `json:"url"`
PubDate time.Time `json:"pubdate"`
Video string `json:"video"`
VideoHeight uint16 `json:"videoHeight"`
VideoWidth uint16 `json:"videoWidth"`
Thumbnail string `json:"thumbnail"`
Description string `json:"description"`
AuthorName string `json:"authorName"`
AuthorImage string `json:"authorImage"`
ID uint `json:"ID"`
SourceID uint `json:"sourceId"`
Tags string `json:"tags"`
Title string `json:"title"`
Url string `json:"url"`
PubDate time.Time `json:"pubdate"`
Video string `json:"video"`
VideoHeight uint16 `json:"videoHeight"`
VideoWidth uint16 `json:"videoWidth"`
Thumbnail string `json:"thumbnail"`
Description string `json:"description"`
AuthorName string `json:"authorName"`
AuthorImage string `json:"authorImage"`
}
type DiscordQueue struct {
ID uint `json:"ID"`
ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"`
ArticleId string `json:"articleId"`
ArticleId string `json:"articleId"`
}
type DiscordWebHooks struct {
ID uint `json:"ID"`
ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"`
Name string `json:"name"`
Key string `json:"key"`
Url string `json:"url"`
Server string `json:"server"`
Channel string `json:"channel"`
Enabled bool `json:"enabled"`
Name string `json:"name"`
Key string `json:"key"`
Url string `json:"url"`
Server string `json:"server"`
Channel string `json:"channel"`
Enabled bool `json:"enabled"`
}
type Icons struct {
ID uint `json:"ID"`
ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"`
FileName string `json:"fileName"`
Site string `json:"site"`
FileName string `json:"fileName"`
Site string `json:"site"`
}
type Settings struct {
ID uint `json:"ID"`
ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"`
Key string `json:"key"`
Value string `json:"value"`
Options string `json:"options"`
Key string `json:"key"`
Value string `json:"value"`
Options string `json:"options"`
}
type Sources struct {
ID uint `json:"ID"`
Site string `json:"site"`
Name string `json:"name"`
Source string `json:"source"`
Type string `json:"type"`
Value string `json:"value"`
Enabled bool `json:"enabled"`
Url string `json:"url"`
Tags string `json:"tags"`
ID uint `json:"ID"`
Site string `json:"site"`
Name string `json:"name"`
Source string `json:"source"`
Type string `json:"type"`
Value string `json:"value"`
Enabled bool `json:"enabled"`
Url string `json:"url"`
Tags string `json:"tags"`
}
type SourceLinks struct {
ID uint `json:"ID"`
ID uint `json:"ID"`
CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"`
DeletedAt time.Time `json:"DeletedAt"`
SourceID uint `json:"sourceId"`
SourceType string `json:"sourceType"`
SourceName string `json:"sourceName"`
DiscordID uint `json:"discordId"`
DiscordName string `json:"discordName"`
SourceID uint `json:"sourceId"`
SourceType string `json:"sourceType"`
SourceName string `json:"sourceName"`
DiscordID uint `json:"discordId"`
DiscordName string `json:"discordName"`
}

View File

@ -2,34 +2,34 @@ package model
// This is the root Json object. It does not contain data that we care about though.
type RedditJsonContent struct {
Kind string `json:"kind"`
Kind string `json:"kind"`
Data RedditJsonContentData `json:"data"`
}
type RedditJsonContentData struct {
After string `json:"after"`
Dist int `json:"dist"`
Modhash string `json:"modhash"`
After string `json:"after"`
Dist int `json:"dist"`
Modhash string `json:"modhash"`
Children []RedditJsonContentChildren `json:"children"`
}
type RedditJsonContentChildren struct {
Kind string `json:"kind"`
Kind string `json:"kind"`
Data RedditPost `json:"data"`
}
// RedditPost contains the information that was posted by a user.
type RedditPost struct {
Subreddit string `json:"subreddit"`
Title string `json:"title"`
Content string `json:"selftext"`
ContentHtml string `json:"selftext_html"`
Author string `json:"author"`
Permalink string `json:"permalink"`
IsVideo bool `json:"is_video"`
Media RedditPostMedia `json:"media"`
Url string `json:"url"`
UrlOverriddenByDest string `json:"url_overridden_by_dest"`
Subreddit string `json:"subreddit"`
Title string `json:"title"`
Content string `json:"selftext"`
ContentHtml string `json:"selftext_html"`
Author string `json:"author"`
Permalink string `json:"permalink"`
IsVideo bool `json:"is_video"`
Media RedditPostMedia `json:"media"`
Url string `json:"url"`
UrlOverriddenByDest string `json:"url_overridden_by_dest"`
Thumbnail string `json:"thumbnail"`
}
@ -41,14 +41,14 @@ type RedditPostMedia struct {
// RedditVideo contains information about the video in the post.
type RedditPostMediaRedditVideo struct {
BitrateKbps int `json:"bitrate_kpbs"`
FallBackUrl string `json:"fallback_url"`
Height int `json:"height"`
Width int `json:"width"`
ScrubberMediaUrl string `json:"scrubber_media_url"`
DashUrl string `json:"dash_url"`
Duration int `json:"duration"`
HlsUrl string `json:"hls_url"`
IsGif bool `json:"is_gif"`
BitrateKbps int `json:"bitrate_kpbs"`
FallBackUrl string `json:"fallback_url"`
Height int `json:"height"`
Width int `json:"width"`
ScrubberMediaUrl string `json:"scrubber_media_url"`
DashUrl string `json:"dash_url"`
Duration int `json:"duration"`
HlsUrl string `json:"hls_url"`
IsGif bool `json:"is_gif"`
TranscodingStatus string `json:"transcoding_status"`
}
}

View File

@ -30,7 +30,7 @@ func main() {
fmt.Printf("Swagger: http://%v:8081/swagger/index.html\r\n", address)
err := http.ListenAndServe(":8081", server.Router)
if err != nil {
if err != nil {
panic(err)
}
}
}

View File

@ -35,7 +35,7 @@ func (s *Server) listArticles(w http.ResponseWriter, r *http.Request) {
// @Param id path string true "uuid"
// @Produce application/json
// @Tags Articles
// @Router /articles/{id} [get]
// @Router /articles/{ID} [get]
func (s *Server) getArticleById(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")

View File

@ -12,7 +12,7 @@ import (
// @Router /discord/queue [get]
func (s *Server) GetDiscordQueue(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
res, err := s.Db.ListDiscordQueueItems(*s.ctx, 100)
if err != nil {
w.Write([]byte(err.Error()))
@ -26,4 +26,4 @@ func (s *Server) GetDiscordQueue(w http.ResponseWriter, r *http.Request) {
}
w.Write(bres)
}
}

View File

@ -44,8 +44,7 @@ func (s *Server) GetDiscordWebHooks(w http.ResponseWriter, r *http.Request) {
func (s *Server) GetDiscordWebHooksById(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
query := r.URL.Query()
_id := query["id"][0]
_id := chi.URLParam(r, "ID")
if _id == "" {
http.Error(w, "id is missing", http.StatusBadRequest)
return
@ -56,7 +55,7 @@ func (s *Server) GetDiscordWebHooksById(w http.ResponseWriter, r *http.Request)
http.Error(w, "unable to parse id value", http.StatusBadRequest)
return
}
res, err := s.Db.GetDiscordWebHooksByID(*s.ctx, uuid)
if err != nil {
http.Error(w, "no record found", http.StatusBadRequest)
@ -135,7 +134,7 @@ func (s *Server) NewDiscordWebHook(w http.ResponseWriter, r *http.Request) {
http.Error(w, "invalid url", http.StatusBadRequest)
return
}
if _server == ""{
if _server == "" {
http.Error(w, "server is missing", http.StatusBadRequest)
}
if _channel == "" {

View File

@ -1,2 +1 @@
package routes_test

View File

@ -41,4 +41,4 @@ func (s *Server) getSettings(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Write(bResult)
}
}

View File

@ -12,7 +12,7 @@ import (
// GetSubscriptions
// @Summary Returns the top 100 entries from the queue to be processed.
// @Produce application/json
// @Tags Config, Subscription
// @Tags Subscription
// @Router /subscriptions [get]
func (s *Server) ListSubscriptions(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
@ -36,7 +36,7 @@ func (s *Server) ListSubscriptions(w http.ResponseWriter, r *http.Request) {
// @Summary Returns the top 100 entries from the queue to be processed.
// @Produce application/json
// @Param id query string true "id"
// @Tags Config, Subscription
// @Tags Subscription
// @Router /subscriptions/byDiscordId [get]
func (s *Server) GetSubscriptionsByDiscordId(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
@ -73,7 +73,7 @@ func (s *Server) GetSubscriptionsByDiscordId(w http.ResponseWriter, r *http.Requ
// @Summary Returns the top 100 entries from the queue to be processed.
// @Produce application/json
// @Param id query string true "id"
// @Tags Config, Subscription
// @Tags Subscription
// @Router /subscriptions/bySourceId [get]
func (s *Server) GetSubscriptionsBySourceId(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
@ -110,7 +110,7 @@ func (s *Server) GetSubscriptionsBySourceId(w http.ResponseWriter, r *http.Reque
// @Summary Creates a new subscription to link a post from a Source to a DiscordWebHook.
// @Param discordWebHookId query string true "discordWebHookId"
// @Param sourceId query string true "sourceId"
// @Tags Config, Source, Discord, Subscription
// @Tags Subscription
// @Router /subscriptions/new/discordwebhook [post]
func (s *Server) newDiscordWebHookSubscription(w http.ResponseWriter, r *http.Request) {
// Extract the values given

View File

@ -6,24 +6,24 @@ import (
"github.com/jtom38/newsbot/collector/domain/model"
)
type CacheClient struct{
group string
type CacheClient struct {
group string
DefaultTimer time.Duration
}
func NewCacheClient(group string) CacheClient {
return CacheClient{
group: group,
group: group,
DefaultTimer: time.Hour,
}
}
func (cc *CacheClient) Insert(key string, value string) {
item := model.CacheItem{
Key: key,
Value: value,
Group: cc.group,
Expires: time.Now().Add(1 * time.Hour),
Key: key,
Value: value,
Group: cc.group,
Expires: time.Now().Add(1 * time.Hour),
IsTainted: false,
}
cacheStorage = append(cacheStorage, &item)
@ -31,8 +31,12 @@ func (cc *CacheClient) Insert(key string, value string) {
func (cc *CacheClient) FindByKey(key string) (*model.CacheItem, error) {
for _, item := range cacheStorage {
if item.Group != cc.group { continue }
if item.Key != key { continue }
if item.Group != cc.group {
continue
}
if item.Key != key {
continue
}
// if it was tainted, renew the timer and remove the taint as this record was still needed
if item.IsTainted {
@ -47,8 +51,12 @@ func (cc *CacheClient) FindByKey(key string) (*model.CacheItem, error) {
func (cc *CacheClient) FindByValue(value string) (*model.CacheItem, error) {
for _, item := range cacheStorage {
if item.Group != cc.group { continue }
if item.Value != value { continue }
if item.Group != cc.group {
continue
}
if item.Value != value {
continue
}
// if it was tainted, renew the timer and remove the taint as this record was still needed
if item.IsTainted {
@ -59,4 +67,3 @@ func (cc *CacheClient) FindByValue(value string) (*model.CacheItem, error) {
}
return &model.CacheItem{}, ErrCacheRecordMissing
}

View File

@ -18,14 +18,18 @@ func TestInsert(t *testing.T) {
func TestFindGroupMissing(t *testing.T) {
cache := cache.NewCacheClient("faker")
_, err := cache.FindByKey("UnitTesting")
if err == nil { panic("Nothing was appended with the requested group.") }
if err == nil {
panic("Nothing was appended with the requested group.")
}
}
func TestFindGroupExists(t *testing.T) {
cache := cache.NewCacheClient("Testing")
cache.Insert("UnitTesting", "Something")
_, err := cache.FindByKey("UnitTesting")
if err != nil { panic("") }
if err != nil {
panic("")
}
}
func TestCacheStorage(t *testing.T) {
@ -35,6 +39,7 @@ func TestCacheStorage(t *testing.T) {
cache := cache.NewCacheClient("Testing")
_, err := cache.FindByKey("UnitTesting02")
if err != nil { panic("expected to find the value")}
if err != nil {
panic("expected to find the value")
}
}

View File

@ -10,4 +10,4 @@ var (
cacheStorage []*model.CacheItem
ErrCacheRecordMissing = errors.New("unable to find the requested record")
)
)

View File

@ -9,7 +9,7 @@ import (
// When a record becomes tainted, it needs to be renewed or it will be dropped from the cache.
// If a record is tainted and used again, the taint will be removed and a new Expires value will be set.
// If its not renewed, it will be dropped.
type CacheAgeMonitor struct {}
type CacheAgeMonitor struct{}
func NewCacheAgeMonitor() CacheAgeMonitor {
return CacheAgeMonitor{}
@ -20,10 +20,10 @@ func (cam CacheAgeMonitor) CheckExpiredEntries() {
now := time.Now()
for index, item := range cacheStorage {
if now.After(item.Expires) {
// the timer expired, and its not tainted, taint it
if !item.IsTainted {
item.IsTainted = true
item.IsTainted = true
item.Expires = now.Add(1 * time.Hour)
}
@ -39,7 +39,9 @@ func (cam CacheAgeMonitor) CheckExpiredEntries() {
func (cam CacheAgeMonitor) removeEntry(index int) []*model.CacheItem {
var temp []*model.CacheItem
for i, item := range cacheStorage {
if i != index { temp = append(temp, item )}
if i != index {
temp = append(temp, item)
}
}
return temp
}

View File

@ -9,5 +9,5 @@ import (
func TestCacheTaintItem(t *testing.T) {
cc := cache.NewCacheClient("Testing")
cc.Insert("UnitTesting01", "test")
}
}

View File

@ -49,6 +49,17 @@ func (cc *ConfigClient) GetConfig(key string) string {
return res
}
// Looks for a value in the env and will panic if it does not exist.
func (c ConfigClient) MustGetString(key string) string {
res, filled := os.LookupEnv(key)
if !filled {
msg := fmt.Sprintf("No value was found for '%v'", key)
panic(msg)
}
return res
}
func (cc *ConfigClient) GetFeature(flag string) (bool, error) {
cc.RefreshEnv()

View File

@ -1,8 +1,8 @@
package config_test
import (
"testing"
"os"
"testing"
"github.com/jtom38/newsbot/collector/services/config"
)
@ -15,6 +15,8 @@ func TestGetConfigExpectNull(t *testing.T) {
cc := config.New()
os.Setenv(config.REDDIT_PULL_HOT, "")
res := cc.GetConfig(config.REDDIT_PULL_HOT)
if res != "" { panic("expected blank")}
if res != "" {
panic("expected blank")
}
}
}

View File

@ -9,18 +9,20 @@ import (
)
var FFXIVRecord database.Source = database.Source{
ID: uuid.New(),
Site: "ffxiv",
Name: "Final Fantasy XIV - NA",
ID: uuid.New(),
Site: "ffxiv",
Name: "Final Fantasy XIV - NA",
Source: "ffxiv",
Url: "https://na.finalfantasyxiv.com/lodestone/",
Tags: "ffxiv, final, fantasy, xiv, na, lodestone",
Url: "https://na.finalfantasyxiv.com/lodestone/",
Tags: "ffxiv, final, fantasy, xiv, na, lodestone",
}
func TestFfxivGetParser(t *testing.T) {
fc := ffxiv.NewFFXIVClient(FFXIVRecord)
_, err := fc.GetParser()
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
}
func TestFfxivPullFeed(t *testing.T) {
@ -30,8 +32,12 @@ func TestFfxivPullFeed(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if len(links) == 0 { panic("expected links to come back but got 0") }
if err != nil {
t.Error(err)
}
if len(links) == 0 {
t.Error("expected links to come back but got 0")
}
}
@ -42,14 +48,20 @@ func TestFfxivExtractThumbnail(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0])
defer page.Close()
thumb, err := fc.ExtractThumbnail(page)
if err != nil { panic(err) }
if thumb == "" { panic("expected a link but got nothing.")}
if err != nil {
t.Error(err)
}
if thumb == "" {
t.Error("expected a link but got nothing.")
}
}
func TestFfxivExtractPubDate(t *testing.T) {
@ -59,13 +71,17 @@ func TestFfxivExtractPubDate(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0])
defer page.Close()
_, err = fc.ExtractPubDate(page)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
}
func TestFfxivExtractDescription(t *testing.T) {
@ -75,13 +91,17 @@ func TestFfxivExtractDescription(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0])
defer page.Close()
_, err = fc.ExtractDescription(page)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
}
func TestFfxivExtractAuthor(t *testing.T) {
@ -91,14 +111,20 @@ func TestFfxivExtractAuthor(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0])
defer page.Close()
author, err := fc.ExtractAuthor(page)
if err != nil { panic(err) }
if author == "" { panic("failed to locate the author name") }
if err != nil {
t.Error(err)
}
if author == "" {
t.Error("failed to locate the author name")
}
}
func TestFfxivExtractTags(t *testing.T) {
@ -108,14 +134,20 @@ func TestFfxivExtractTags(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0])
defer page.Close()
res, err := fc.ExtractTags(page)
if err != nil { panic(err) }
if res == "" {panic("failed to locate the tags")}
if err != nil {
t.Error(err)
}
if res == "" {
t.Error("failed to locate the tags")
}
}
func TestFfxivExtractTitle(t *testing.T) {
@ -125,14 +157,20 @@ func TestFfxivExtractTitle(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0])
defer page.Close()
res, err := fc.ExtractTitle(page)
if err != nil { panic(err) }
if res == "" { panic("failed to locate the tags") }
if err != nil {
t.Error(err)
}
if res == "" {
t.Error("failed to locate the tags")
}
}
func TestFFxivExtractAuthorIamge(t *testing.T) {
@ -142,18 +180,24 @@ func TestFFxivExtractAuthorIamge(t *testing.T) {
defer parser.Close()
links, err := fc.PullFeed(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
page := fc.GetPage(parser, links[0])
defer page.Close()
res, err := fc.ExtractAuthorImage(page)
if err != nil { panic(err) }
if res == "" { panic("failed to locate the tags") }
if err != nil {
t.Error(err)
}
if res == "" {
t.Error("failed to locate the tags")
}
}
func TestFfxivCheckSource(t *testing.T) {
fc := ffxiv.NewFFXIVClient(FFXIVRecord)
fc.CheckSource()
}
}

View File

@ -19,20 +19,26 @@ func getHttpContent(uri string) ([]byte, error) {
client := &http.Client{
Transport: tr,
}
req, err := http.NewRequest("GET", uri, nil)
if err != nil { return nil, err }
if err != nil {
return nil, err
}
// set the user agent header to avoid kick backs.. as much
req.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.10; rv:75.0) Gecko/20100101 Firefox/75.0")
log.Printf("Requesting content from %v\n", uri)
resp, err := client.Do(req)
if err != nil { log.Fatalln(err) }
if err != nil {
log.Fatalln(err)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil { return nil, err }
if err != nil {
return nil, err
}
return body, nil
}
}

View File

@ -47,7 +47,6 @@ func NewRedditClient(Record database.Source) *RedditClient {
// os.Setenv("GODEBUG", "http2client=0")
//}
func (rc *RedditClient) GetBrowser() *rod.Browser {
var browser *rod.Browser
if path, exists := launcher.LookPath(); exists {
@ -62,7 +61,7 @@ func (rc *RedditClient) GetPage(parser *rod.Browser, url string) *rod.Page {
return page
}
//func (rc RedditClient)
//func (rc RedditClient)
// GetContent() reaches out to Reddit and pulls the Json data.
// It will then convert the data to a struct and return the struct.
@ -74,8 +73,6 @@ func (rc *RedditClient) GetContent() (model.RedditJsonContent, error) {
log.Printf("[Reddit] Collecting results on '%v'", rc.record.Name)
content, err := getHttpContent(Url)
if err != nil {
return items, err

View File

@ -9,12 +9,12 @@ import (
)
var RedditRecord database.Source = database.Source{
ID: uuid.New(),
Name: "dadjokes",
ID: uuid.New(),
Name: "dadjokes",
Source: "reddit",
Site: "reddit",
Url: "https://reddit.com/r/dadjokes",
Tags: "reddit, dadjokes",
Site: "reddit",
Url: "https://reddit.com/r/dadjokes",
Tags: "reddit, dadjokes",
}
func TestGetContent(t *testing.T) {
@ -30,4 +30,4 @@ func TestGetContent(t *testing.T) {
t.Error("Title is missing")
}
}
}
}

View File

@ -22,7 +22,7 @@ func NewRssClient(sourceRecord model.Sources) rssClient {
}
//func (rc rssClient) ReplaceSourceRecord(source model.Sources) {
//rc.SourceRecord = source
//rc.SourceRecord = source
//}
func (rc rssClient) getCacheGroup() string {
@ -31,7 +31,9 @@ func (rc rssClient) getCacheGroup() string {
func (rc rssClient) GetContent() error {
feed, err := rc.PullFeed()
if err != nil { return err }
if err != nil {
return err
}
cacheClient := cache.NewCacheClient(rc.getCacheGroup())
@ -49,7 +51,9 @@ func (rc rssClient) PullFeed() (*gofeed.Feed, error) {
feedUri := fmt.Sprintf("%v", rc.SourceRecord.Url)
fp := gofeed.NewParser()
feed, err := fp.ParseURL(feedUri)
if err != nil { return nil, err }
if err != nil {
return nil, err
}
return feed, nil
}
}

View File

@ -7,10 +7,10 @@ import (
"github.com/jtom38/newsbot/collector/services/input"
)
var rssRecord = model.Sources {
ID: 1,
var rssRecord = model.Sources{
ID: 1,
Name: "ArsTechnica",
Url: "https://feeds.arstechnica.com/arstechnica/index",
Url: "https://feeds.arstechnica.com/arstechnica/index",
}
func TestRssClientConstructor(t *testing.T) {
@ -20,7 +20,11 @@ func TestRssClientConstructor(t *testing.T) {
func TestRssGetFeed(t *testing.T) {
client := input.NewRssClient(rssRecord)
feed, err := client.PullFeed()
if err != nil { t.Error(err) }
if len(feed.Items) >= 0 { t.Error("failed to collect items from the fees")}
if err != nil {
t.Error(err)
}
if len(feed.Items) >= 0 {
t.Error("failed to collect items from the fees")
}
}
}

View File

@ -104,31 +104,47 @@ func (tc *TwitchClient) GetContent() ([]database.Article, error) {
var article database.Article
AuthorName, err := tc.ExtractAuthor(video)
if err != nil { return items, err }
if err != nil {
return items, err
}
article.Authorname = sql.NullString{String: AuthorName}
Authorimage, err := tc.ExtractAuthorImage(user)
if err != nil { return items, err }
if err != nil {
return items, err
}
article.Authorimage = sql.NullString{String: Authorimage}
article.Description, err = tc.ExtractDescription(video)
if err != nil {return items, err }
if err != nil {
return items, err
}
article.Pubdate, err = tc.ExtractPubDate(video)
if err != nil { return items, err }
if err != nil {
return items, err
}
article.Sourceid = tc.SourceRecord.ID
article.Tags, err = tc.ExtractTags(video, user)
if err != nil { return items, err }
if err != nil {
return items, err
}
article.Thumbnail, err = tc.ExtractThumbnail(video)
if err != nil { return items, err }
if err != nil {
return items, err
}
article.Title, err = tc.ExtractTitle(video)
if err != nil { return items, err }
if err != nil {
return items, err
}
article.Url, err = tc.ExtractUrl(video)
if err != nil { return items, err }
if err != nil {
return items, err
}
items = append(items, article)
}
@ -210,8 +226,12 @@ func (tc *TwitchClient) ExtractDescription(post helix.Video) (string, error) {
// Extracts the avatar of the author with some validation.
func (tc *TwitchClient) ExtractAuthorImage(user helix.User) (string, error) {
if user.ProfileImageURL == "" { return "", ErrMissingAuthorImage }
if !strings.Contains(user.ProfileImageURL, "-profile_image-") { return "", ErrInvalidAuthorImage }
if user.ProfileImageURL == "" {
return "", ErrMissingAuthorImage
}
if !strings.Contains(user.ProfileImageURL, "-profile_image-") {
return "", ErrInvalidAuthorImage
}
return user.ProfileImageURL, nil
}
@ -231,6 +251,8 @@ func (tc *TwitchClient) ExtractTitle(post helix.Video) (string, error) {
}
func (tc *TwitchClient) ExtractUrl(post helix.Video) (string, error) {
if post.URL == "" { return "", ErrMissingUrl }
if post.URL == "" {
return "", ErrMissingUrl
}
return post.URL, nil
}
}

View File

@ -9,13 +9,13 @@ import (
"github.com/jtom38/newsbot/collector/services/input"
)
var TwitchSourceRecord = database.Source {
var TwitchSourceRecord = database.Source{
ID: uuid.New(),
Name: "nintendo",
Source: "Twitch",
}
var TwitchInvalidRecord = database.Source {
var TwitchInvalidRecord = database.Source{
ID: uuid.New(),
Name: "EvilNintendo",
Source: "Twitch",
@ -115,40 +115,62 @@ func TestTwitchReturnsVideoAuthor(t *testing.T) {
func TestTwitchReturnsThumbnail(t *testing.T) {
tc, err := input.NewTwitchClient()
if err != nil {t.Error(err) }
if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
value, err := tc.ExtractThumbnail(posts[0])
if err != nil { t.Error(err) }
if value == "" { t.Error("uable to parse username") }
if err != nil {
t.Error(err)
}
if value == "" {
t.Error("uable to parse username")
}
}
func TestTwitchReturnsPubDate(t *testing.T) {
tc, err := input.NewTwitchClient()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
date, err := tc.ExtractPubDate(posts[0])
log.Println(date)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
}
func TestTwitchReturnsDescription(t *testing.T) {
@ -181,17 +203,25 @@ func TestTwitchReturnsDescription(t *testing.T) {
func TestTwitchReturnsAuthorImage(t *testing.T) {
tc, err := input.NewTwitchClient()
if err != nil {t.Error(err) }
if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails()
if err != nil {t.Error(err) }
if err != nil {
t.Error(err)
}
_, err = tc.ExtractAuthorImage(user)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
}
func TestTwitchReturnsTags(t *testing.T) {
@ -212,10 +242,14 @@ func TestTwitchReturnsTags(t *testing.T) {
}
posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
_, err = tc.ExtractTags(posts[0], user)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
}
func TestTwitchReturnsTitle(t *testing.T) {
@ -236,42 +270,70 @@ func TestTwitchReturnsTitle(t *testing.T) {
}
posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
res, err := tc.ExtractTitle(posts[0])
if err != nil { t.Error(err) }
if res == "" { t.Error("expected a filled string but got nil")}
if err != nil {
t.Error(err)
}
if res == "" {
t.Error("expected a filled string but got nil")
}
}
func TestTwitchReturnsUrl(t *testing.T) {
tc, err := input.NewTwitchClient()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
user, err := tc.GetUserDetails()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
posts, err := tc.GetPosts(user)
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
res, err := tc.ExtractUrl(posts[0])
if err != nil { t.Error(err) }
if res == "" { t.Error("expected a filled string but got nil")}
if err != nil {
t.Error(err)
}
if res == "" {
t.Error("expected a filled string but got nil")
}
}
func TestTwitchGetContent(t *testing.T) {
tc, err := input.NewTwitchClient()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
tc.ReplaceSourceRecord(TwitchSourceRecord)
err = tc.Login()
if err != nil { t.Error(err) }
if err != nil {
t.Error(err)
}
posts, err := tc.GetContent()
if err != nil {t.Error(err) }
if len(posts) == 0 { t.Error("posts came back with 0 posts") }
if len(posts) != 20 { t.Error("expected 20 posts") }
}
if err != nil {
t.Error(err)
}
if len(posts) == 0 {
t.Error("posts came back with 0 posts")
}
if len(posts) != 20 {
t.Error("expected 20 posts")
}
}

View File

@ -44,13 +44,6 @@ func NewYoutubeClient(Record database.Source) YoutubeClient {
record: Record,
cacheGroup: "youtube",
}
/*
cc := NewConfigClient()
debug, err := strconv.ParseBool(cc.GetConfig(YOUTUBE_DEBUG))
if err != nil { panic("'YOUTUBE_DEBUG' was not a bool value")}
yc.Config.Debug = debug
*/
return yc
}
@ -109,14 +102,18 @@ func (yc *YoutubeClient) GetContent() ([]database.Article, error) {
return items, nil
}
func (yc *YoutubeClient) GetBrowser() *rod.Browser {
func (yc *YoutubeClient) GetBrowser() (*rod.Browser, error) {
//browser := rod.New().MustConnect()
var browser *rod.Browser
if path, exists := launcher.LookPath(); exists {
u := launcher.New().Bin(path).MustLaunch()
u, err := launcher.New().Bin(path).Launch()
if err != nil {
return browser, err
}
browser = rod.New().ControlURL(u).MustConnect()
}
return browser
return browser, nil
}
func (yc *YoutubeClient) GetPage(parser *rod.Browser, url string) *rod.Page {
@ -163,7 +160,11 @@ func (yc *YoutubeClient) GetAvatarUri() (string, error) {
var AvatarUri string
//browser := rod.New().MustConnect()
browser := yc.GetBrowser()
browser, err := yc.GetBrowser()
if err != nil {
return "", err
}
page := browser.MustPage(yc.record.Url)
res := page.MustElement("#channel-header-container > yt-img-shadow:nth-child(1) > img:nth-child(1)").MustAttribute("src")

View File

@ -9,45 +9,61 @@ import (
)
var YouTubeRecord database.Source = database.Source{
ID: uuid.New(),
Name: "dadjokes",
ID: uuid.New(),
Name: "dadjokes",
Source: "reddit",
Site: "reddit",
Url: "https://youtube.com/gamegrumps",
Site: "reddit",
Url: "https://youtube.com/gamegrumps",
}
func TestGetPageParser(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
_, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
}
func TestGetChannelId(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
_, err = yc.GetChannelId(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
}
func TestPullFeed(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
_, err = yc.GetChannelId(parser)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
_, err = yc.PullFeed()
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
}
func TestGetAvatarUri(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
res, err := yc.GetAvatarUri()
if err != nil { panic(err) }
if res == "" { panic(input.ErrMissingAuthorImage)}
if err != nil {
t.Error(err)
}
if res == "" {
t.Error(input.ErrMissingAuthorImage)
}
}
func TestGetVideoTags(t *testing.T) {
@ -56,39 +72,59 @@ func TestGetVideoTags(t *testing.T) {
var videoUri = "https://www.youtube.com/watch?v=k_sQEXOBe68"
parser, err := yc.GetParser(videoUri)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
tags, err := yc.GetTags(parser)
if err == nil && tags == "" { panic("err was empty but value was missing.")}
if err != nil { panic(err) }
if err == nil && tags == "" {
t.Error("err was empty but value was missing.")
}
if err != nil {
t.Error(err)
}
}
func TestGetChannelTags(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser(YouTubeRecord.Url)
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
tags, err := yc.GetTags(parser)
if err == nil && tags == "" { panic("no err but expected value was missing.")}
if err != nil { panic(err) }
if err == nil && tags == "" {
t.Error("no err but expected value was missing.")
}
if err != nil {
t.Error(err)
}
}
func TestGetVideoThumbnail(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
parser, err := yc.GetParser("https://www.youtube.com/watch?v=k_sQEXOBe68")
if err != nil {panic(err) }
if err != nil {
t.Error(err)
}
thumb, err := yc.GetVideoThumbnail(parser)
if err == nil && thumb == "" { panic("no err but expected result was missing")}
if err != nil { panic(err) }
if err == nil && thumb == "" {
t.Error("no err but expected result was missing")
}
if err != nil {
t.Error(err)
}
}
func TestCheckSource(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
_, err := yc.GetContent()
if err != nil { panic(err) }
if err != nil {
t.Error(err)
}
}
func TestCheckUriCache(t *testing.T) {
@ -97,14 +133,18 @@ func TestCheckUriCache(t *testing.T) {
input.YoutubeUriCache = append(input.YoutubeUriCache, &item)
res := yc.CheckUriCache(&item)
if res == false { panic("expected a value to come back")}
if res == false {
t.Error("expected a value to come back")
}
}
func TestCheckUriCacheFails(t *testing.T) {
yc := input.NewYoutubeClient(YouTubeRecord)
item := "demo1"
res := yc.CheckUriCache(&item)
if res == true { panic("expected no value to come back")}
}
res := yc.CheckUriCache(&item)
if res == true {
t.Error("expected no value to come back")
}
}

View File

@ -14,14 +14,14 @@ import (
var (
article database.Article = database.Article{
ID: uuid.New(),
ID: uuid.New(),
Sourceid: uuid.New(),
Tags: "unit, testing",
Title: "Demo",
Url: "https://github.com/jtom38/newsbot.collector.api",
Tags: "unit, testing",
Title: "Demo",
Url: "https://github.com/jtom38/newsbot.collector.api",
//Pubdate: time.Now(),
Videoheight: 0,
Videowidth: 0,
Videowidth: 0,
Description: "Hello World",
}
blank string = ""
@ -33,7 +33,7 @@ func TestDiscordMessageContainsTitle(t *testing.T) {
if err != nil {
t.Error(err)
}
for _, i := range *msg.Embeds {
if i.Title == &blank {
t.Error("title missing")
@ -47,7 +47,7 @@ func TestDiscordMessageContainsDescription(t *testing.T) {
if err != nil {
t.Error(err)
}
for _, i := range *msg.Embeds {
if i.Description == &blank {
t.Error("description missing")
@ -68,7 +68,7 @@ func TestDiscordMessageFooter(t *testing.T) {
}
if i.Footer.IconUrl == &blank {
t.Error("missing footer url")
}
}
}
}
@ -81,7 +81,7 @@ func TestDiscordMessageFields(t *testing.T) {
}
for _, embed := range *msg.Embeds {
for _, field := range embed.Fields {
var fName string
var fName string
if field.Name != nil {
fName = *field.Name
} else {
@ -110,7 +110,7 @@ func TestDiscordMessagePost(t *testing.T) {
if err != nil {
t.Error(err)
}
err = godotenv.Load()
if err != nil {
t.Error(err)
@ -131,8 +131,8 @@ func TestDiscordMessagePost(t *testing.T) {
t.Error(err)
}
err = d.SendPayload(msg, endpoints[0])
err = d.SendPayload(msg, endpoints[0])
if err != nil {
t.Error(err)
}
}
}

View File

@ -3,4 +3,4 @@ package output
type Output interface {
GeneratePayload() error
SendPayload() error
}
}