fetch url moved to util package

This commit is contained in:
Chris kerr 2024-04-27 23:05:54 -04:00
parent 35f7816137
commit abc61125ea
2 changed files with 34 additions and 27 deletions

30
main.go
View File

@ -2,7 +2,7 @@ package main
import ( import (
"NukaNewsBot/commands" "NukaNewsBot/commands"
"context" "NukaNewsBot/utils"
"encoding/json" "encoding/json"
"fmt" "fmt"
"log" "log"
@ -12,7 +12,6 @@ import (
"github.com/PuerkitoBio/goquery" "github.com/PuerkitoBio/goquery"
"github.com/bwmarrin/discordgo" "github.com/bwmarrin/discordgo"
"github.com/chromedp/chromedp"
) )
type Config struct { type Config struct {
@ -101,7 +100,7 @@ func main() {
log.Println("Opened Discord connection") log.Println("Opened Discord connection")
// Run the scraping and message sending function at start up // Run the scraping and message sending function at start up
//sendNotifications(dg, fetchUrl(url), channelMap, roleMap, date) sendNotifications(dg, utils.FetchUrl(url), channelMap, roleMap, date)
// Schedule the scraping and message sending function to run once a day // Schedule the scraping and message sending function to run once a day
ticker := time.NewTicker(24 * time.Hour) ticker := time.NewTicker(24 * time.Hour)
@ -112,7 +111,7 @@ func main() {
for { for {
select { select {
case <-ticker.C: case <-ticker.C:
sendNotifications(dg, fetchUrl(url), channelMap, roleMap, date) sendNotifications(dg, utils.FetchUrl(url), channelMap, roleMap, date)
} }
} }
}() }()
@ -122,29 +121,6 @@ func main() {
<-make(chan struct{}) <-make(chan struct{})
} }
func fetchUrl(url string) string {
// Create a new context
ctx := context.Background()
ctx, cancel := chromedp.NewContext(
ctx,
chromedp.WithLogf(log.Printf),
)
defer cancel()
// Navigate to the Fallout news page
var html string
err := chromedp.Run(ctx, chromedp.Tasks{
chromedp.Navigate(url),
chromedp.OuterHTML("html", &html),
})
if err != nil {
log.Fatal(err)
}
// Return the HTML content
return html
}
// Add a function to extract relevant tags from the HTML content // Add a function to extract relevant tags from the HTML content
func extractNewsArticles(html string) []map[string]string { func extractNewsArticles(html string) []map[string]string {
var articles []map[string]string var articles []map[string]string

31
utils/fetch_url.go Normal file
View File

@ -0,0 +1,31 @@
package utils
import (
"context"
"log"
"github.com/chromedp/chromedp"
)
func FetchUrl(url string) string {
// Create a new context
ctx := context.Background()
ctx, cancel := chromedp.NewContext(
ctx,
chromedp.WithLogf(log.Printf),
)
defer cancel()
// Navigate to the Fallout news page
var html string
err := chromedp.Run(ctx, chromedp.Tasks{
chromedp.Navigate(url),
chromedp.OuterHTML("html", &html),
})
if err != nil {
log.Fatal(err)
}
// Return the HTML content
return html
}