Rewirte of parser and server

Changed database layout and using Go path for source now
This commit is contained in:
Andreas Mieke 2015-09-26 00:33:55 +02:00
commit f6259a8160
68 changed files with 18863 additions and 0 deletions

24
.gitignore vendored Normal file
View file

@ -0,0 +1,24 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof

44
config/config.go Normal file
View file

@ -0,0 +1,44 @@
package config
import (
"encoding/json"
"io/ioutil"
)
type Config struct {
DatabaseConnection string
EpisodeRegex string
YoutubeKey string
GronkhUrl string
ImageDirectory string
ImageWebDirectory string
UseSocket bool
BindAddress string
BindSocket string
PiwikURL string
PiwikID int
PiwikToken string
SiteUrl string
AssetsDirectory string
TemplatesDirectory string
}
var C Config
func LoadConfig(path string) (error) {
file, e := ioutil.ReadFile(path)
if e != nil {
return e
}
e = json.Unmarshal(file, &C)
return e
}
func WriteConfig(path string) (error) {
jason, err := json.Marshal(C)
if err != nil {
return err
}
err = ioutil.WriteFile(path, jason, 0755)
return err
}

272
database/database.go Normal file
View file

@ -0,0 +1,272 @@
package database
import (
"database/sql"
"log"
"time"
"github.com/jinzhu/gorm"
_ "github.com/lib/pq"
)
// Author database model
type Author struct {
ID uint `gorm:"primary_key"`
CreatedAt time.Time
UpdatedAt time.Time
DeletedAt *time.Time
Slug sql.NullString `sql:"not null;unique_index"`
Name sql.NullString `sql:"not null"`
Youtube sql.NullString `sql:"not null"`
AvatarS sql.NullString
AvatarB sql.NullString
FanArtS sql.NullString
FanArtB sql.NullString
LetsPlays []LetsPlay
LetsTests []LetsTest
Episodes []Episode
}
// Let's Play database model
type LetsPlay struct {
ID uint `gorm:"primary_key"`
CreatedAt time.Time
UpdatedAt time.Time
DeletedAt *time.Time
AuthorID uint `sql:"index"`
Slug sql.NullString `sql:"not null;unique_index"`
Name sql.NullString `sql:"not null"`
PosterS sql.NullString
PosterB sql.NullString
Aired time.Time `sql:"default:null"`
Episodes []Episode
}
// Let's Test database model
type LetsTest struct {
ID uint `gorm:"primary_key"`
CreatedAt time.Time
UpdatedAt time.Time
DeletedAt *time.Time
AuthorID uint `sql:"index"`
Slug sql.NullString `sql:"not null;unique_index"`
Name sql.NullString `sql:"not null"`
PosterS sql.NullString
PosterB sql.NullString
ThumbS sql.NullString
ThumbB sql.NullString
Youtube sql.NullString `sql:"not null"`
Descr sql.NullString `sql:"type:TEXT"`
Aired time.Time `sql:"not null"`
Rating sql.NullFloat64
Votes sql.NullInt64
Duration sql.NullInt64
}
// Episode database model
type Episode struct {
ID uint `gorm:"primary_key"`
CreatedAt time.Time
UpdatedAt time.Time
DeletedAt *time.Time
AuthorID uint `sql:"index"`
LetsPlayID uint `sql:"index"`
Slug sql.NullString `sql:"not null;unique_index"`
Name sql.NullString `sql:"not null"`
Episode sql.NullInt64 `sql:"not null"`
ThumbS sql.NullString
ThumbB sql.NullString
Youtube sql.NullString `sql:"not null"`
Descr sql.NullString `sql:"type:TEXT"`
Aired time.Time `sql:"not null"`
Rating sql.NullFloat64
Votes sql.NullInt64
Duration sql.NullInt64
}
var Db gorm.DB
func InitDb(connection string) (error) {
var err error
Db, err = gorm.Open("postgres", connection)
if err != nil {
log.Fatalf("FAT Database error: %+v", err)
return err
}
Db.LogMode(false)
Db.AutoMigrate(&Author{}, &LetsPlay{}, &LetsTest{}, &Episode{})
return err
}
// Empty string cleanup
func (a *Author) BeforeSave() (err error) {
if a.Slug.String == "" {
a.Slug.Valid = false
} else {
a.Slug.Valid = true
}
if a.Name.String == "" {
a.Name.Valid = false
} else {
a.Name.Valid = true
}
if a.Youtube.String == "" {
a.Youtube.Valid = false
} else {
a.Youtube.Valid = true
}
if a.AvatarS.String == "" {
a.AvatarS.Valid = false
} else {
a.AvatarS.Valid = true
}
if a.AvatarB.String == "" {
a.AvatarB.Valid = false
} else {
a.AvatarB.Valid = true
}
if a.FanArtS.String == "" {
a.FanArtS.Valid = false
} else {
a.FanArtS.Valid = true
}
if a.FanArtB.String == "" {
a.FanArtB.Valid = false
} else {
a.FanArtB.Valid = true
}
return
}
func (l *LetsPlay) BeforeSave() (err error) {
if l.Slug.String == "" {
l.Slug.Valid = false
} else {
l.Slug.Valid = true
}
if l.Name.String == "" {
l.Name.Valid = false
} else {
l.Name.Valid = true
}
if l.PosterS.String == "" {
l.PosterS.Valid = false
} else {
l.PosterS.Valid = true
}
if l.PosterB.String == "" {
l.PosterB.Valid = false
} else {
l.PosterB.Valid = true
}
return
}
func (l *LetsTest) BeforeSave() (err error) {
if l.Slug.String == "" {
l.Slug.Valid = false
} else {
l.Slug.Valid = true
}
if l.Name.String == "" {
l.Name.Valid = false
} else {
l.Name.Valid = true
}
if l.PosterS.String == "" {
l.PosterS.Valid = false
} else {
l.PosterS.Valid = true
}
if l.PosterB.String == "" {
l.PosterB.Valid = false
} else {
l.PosterB.Valid = true
}
if l.ThumbS.String == "" {
l.ThumbS.Valid = false
} else {
l.ThumbS.Valid = true
}
if l.ThumbB.String == "" {
l.ThumbB.Valid = false
} else {
l.ThumbB.Valid = true
}
if l.Youtube.String == "" {
l.Youtube.Valid = false
} else {
l.Youtube.Valid = true
}
if l.Descr.String == "" {
l.Descr.Valid = false
} else {
l.Descr.Valid = true
}
l.Votes.Valid = true
if l.Votes.Int64 == 0 {
l.Rating.Valid = false
} else {
l.Rating.Valid = true
}
if l.Duration.Int64 == 0 {
l.Duration.Valid = false
} else {
l.Duration.Valid = true
}
return
}
func (e *Episode) BeforeSave() (err error) {
if e.Slug.String == "" {
e.Slug.Valid = false
} else {
e.Slug.Valid = true
}
if e.Name.String == "" {
e.Name.Valid = false
} else {
e.Name.Valid = true
}
if e.ThumbS.String == "" {
e.ThumbS.Valid = false
} else {
e.ThumbS.Valid = true
}
if e.ThumbB.String == "" {
e.ThumbB.Valid = false
} else {
e.ThumbB.Valid = true
}
if e.Youtube.String == "" {
e.Youtube.Valid = false
} else {
e.Youtube.Valid = true
}
if e.Descr.String == "" {
e.Descr.Valid = false
} else {
e.Descr.Valid = true
}
e.Votes.Valid = true
e.Episode.Valid = true
if e.Votes.Int64 == 0 {
e.Rating.Valid = false
} else {
e.Rating.Valid = true
}
if e.Duration.Int64 == 0 {
e.Duration.Valid = false
} else {
e.Duration.Valid = true
}
return
}

131
gparser/authorparser.go Normal file
View file

@ -0,0 +1,131 @@
package main
import (
"log"
"net/url"
"path"
"github.com/PuerkitoBio/goquery"
"github.com/cheggaaa/pb"
"git.1750studios.com/gronkhDE/gogronkh/config"
"git.1750studios.com/gronkhDE/gogronkh/database"
"git.1750studios.com/gronkhDE/gogronkh/image"
"git.1750studios.com/gronkhDE/gogronkh/youtube"
)
// Parse new authors
func ParseAuthors() {
bar := pb.StartNew(0)
res, err := GetHTTPResource(config.C.GronkhUrl + "/lets-player")
if err != nil {
log.Printf("ERR AT Page: Request failed (%+v)", err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR AT Page: Document failure (%+v)", err)
return
}
// Parse the Episodes
doc.Find("div.column > a").Each(func(i int, s *goquery.Selection) {
wg.Add(1)
go ParseAuthor(i, s, bar)
})
wg.Wait()
bar.FinishPrint("Parsed authors")
}
func ParseAuthor(i int, s *goquery.Selection, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
u, _ := s.Attr("href")
ur, _ := url.Parse(u)
slug := path.Base(ur.Path)
var count int
if database.Db.Model(database.Author{}).Where("slug = ?", slug).Count(&count); count > 0 {
return
}
var AT database.Author
AT.Slug.String = slug
AT.Name.String, _ = s.Attr("title")
res, err := GetHTTPResource(config.C.GronkhUrl + u)
if err != nil {
log.Printf("ERR AT %s: Request failed (%+v)", slug, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR AT %s: Document failure (%+v)", slug, err)
return
}
if AT.Name.String == "Honeyball" {
AT.Youtube.String = "UCwF-rMpz0gbgWZIbrgGXuGQ"
} else {
ytna, _ := doc.Find("iframe#fr").Attr("src")
ytn, _ := url.Parse(ytna)
ytname := ytn.Query().Get("p")
AT.Youtube.String, _ = youtube.GetChannelId(ytname, config.C.YoutubeKey)
}
ytres, _ := youtube.GetChannel([]string {"snippet", "brandingSettings"}, AT.Youtube.String, config.C.YoutubeKey)
AT.AvatarS.String, AT.AvatarB.String, err = image.ResizeAvatar(ytres.Items[0].Snippet.Thumbnails["high"].Url)
if err != nil {
log.Printf("WAR AT %s: Error resizing avatar: %+v", slug, err)
}
if fa, ok := ytres.Items[0].BrandingSettings.Image["bannerTvHighImageUrl"]; ok {
AT.FanArtS.String, AT.FanArtB.String, err = image.ResizeFanArt(fa)
} else if fa, ok := ytres.Items[0].BrandingSettings.Image["bannerTvImageUrl"]; ok {
AT.FanArtS.String, AT.FanArtB.String, err = image.ResizeFanArt(fa)
}
if err != nil {
log.Printf("WAR AT %s: Error resizing fanart: %+v", slug, err)
}
if err := database.Db.Create(&AT).Error; err != nil {
log.Printf("ERR AT %s: Could not be added to databse (%+v)", slug, err)
} else {
log.Printf("SUC AT %s: Added to database", slug)
}
}
// Update authors
func UpdateAuthors() {
var ATs []database.Author
var count int
database.Db.Find(&ATs).Count(&count)
bar := pb.StartNew(count)
for _, AT := range ATs {
wg.Add(1)
go UpdateAuthor(AT, bar)
}
wg.Wait()
bar.FinishPrint("Updated authors")
}
func UpdateAuthor(AT database.Author, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
ytres, err := youtube.GetChannel([]string {"snippet", "brandingSettings"}, AT.Youtube.String, config.C.YoutubeKey)
AT.AvatarS.String, AT.AvatarB.String, err = image.ResizeAvatar(ytres.Items[0].Snippet.Thumbnails["high"].Url)
if err != nil {
log.Printf("WAR AT %s: Error resizing avatar: %+v", AT.Slug.String, err)
}
if fa, ok := ytres.Items[0].BrandingSettings.Image["bannerTvHighImageUrl"]; ok {
AT.FanArtS.String, AT.FanArtB.String, err = image.ResizeFanArt(fa)
} else if fa, ok := ytres.Items[0].BrandingSettings.Image["bannerTvImageUrl"]; ok {
AT.FanArtS.String, AT.FanArtB.String, err = image.ResizeFanArt(fa)
}
if err != nil {
log.Printf("WAR AT %s: Error resizing fanart: %+v", AT.Slug.String, err)
}
if err := database.Db.Save(&AT).Error; err != nil {
log.Printf("ERR AT %s: Could not be updated in database (%+v)", AT.Slug.String, err)
} else {
log.Printf("SUC AT %s: Updated in database", AT.Slug.String)
}
}

219
gparser/episodeparser.go Normal file
View file

@ -0,0 +1,219 @@
package main
import (
"log"
"net/url"
"strconv"
"strings"
"time"
"path"
"github.com/ChannelMeter/iso8601duration"
"github.com/PuerkitoBio/goquery"
"github.com/cheggaaa/pb"
"git.1750studios.com/gronkhDE/gogronkh/config"
"git.1750studios.com/gronkhDE/gogronkh/database"
"git.1750studios.com/gronkhDE/gogronkh/image"
"git.1750studios.com/gronkhDE/gogronkh/youtube"
)
// Parse new episodes
func ParseEpisodes() {
bar := pb.StartNew(0)
var LPs []database.LetsPlay
database.Db.Find(&LPs)
for _, LP := range LPs {
wg.Add(1)
go ParseLPEpisodes(LP.Slug.String, 1, bar)
}
wg.Wait()
bar.FinishPrint("Parsed episodes")
}
func ParseLPEpisodes(gslug string, page int, bar *pb.ProgressBar) {
defer wg.Done()
res, err := GetHTTPResource(config.C.GronkhUrl + "/lets-play/" + gslug + "/page/" + strconv.Itoa(page))
if err != nil {
log.Printf("ERR LP %s: Request failed (%+v)", gslug, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR LP %s: Document failure (%+v)", gslug, err)
return
}
// Parse the Episodes
doc.Find(".entry.entry-letsplay").Each(func(i int, s *goquery.Selection) {
wg.Add(1)
go ParseEpisode(i, s, bar)
})
_, found := doc.Find(".next").Attr("href")
if found {
wg.Add(1)
go ParseLPEpisodes(gslug, page+1, bar)
}
}
func ParseEpisode(i int, s *goquery.Selection, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
gu, _ := s.Find("h2 > a").Attr("href")
ur, _ := url.Parse(gu)
slug := path.Base(ur.Path)
var count int
if database.Db.Model(database.Episode{}).Where("slug = ?", slug).Count(&count); count > 0 {
return
}
var EP database.Episode
EP.Slug.String = slug
u, _ := s.Find("h1 > a").Attr("href")
ur, _ = url.Parse(u)
var LP database.LetsPlay
database.Db.Where("slug = ?", path.Base(ur.Path)).First(&LP)
EP.LetsPlayID = LP.ID
res, err := GetHTTPResource(gu)
if err != nil {
log.Printf("ERR EP %s: Request failed (%+v)", slug, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR EP %s: Document failure (%+v)", slug, err)
return
}
EP.Name.String = doc.Find("div.article > h2").First().Text()
if EP.Name.String == "" {
log.Printf("ERR EP %s: Episode name empty", slug)
return
}
if match := episodeRegex.FindStringSubmatch(EP.Name.String); len(match) > 0 {
num, err := strconv.Atoi(match[1])
if err != nil {
log.Printf("SUC EP %s: Integer conversion not possible", slug)
EP.Episode.Int64 = 0
}
EP.Episode.Int64 = int64(num)
} else {
log.Printf("SUC EP %s: Name does not match RegEx", slug)
EP.Episode.Int64 = 0
}
doc.Find(".article > p").Each(func(i int, s *goquery.Selection) {
EP.Descr.String += s.Text() + "\n"
})
EP.Descr.String = strings.Trim(EP.Descr.String, "\n ")
au, _ := doc.Find(".author > a.avatar").Attr("href")
au = strings.TrimSuffix(au, "/")
aur, _ := url.Parse(au)
if path.Base(aur.Path) == "" || path.Base(aur.Path) == "." {
log.Printf("ERR EP %s: No author found", slug)
return
}
if path.Base(aur.Path) == "tobinator612" { // Don't ask… just… don't ask
EP.AuthorID = LP.AuthorID
} else {
var AT database.Author
database.Db.Where("slug = ?", path.Base(aur.Path)).First(&AT)
EP.AuthorID = AT.ID
}
yt, _ := doc.Find(".youtube > iframe").Attr("src")
ytpath, _ := url.Parse(yt)
EP.Youtube.String = path.Base(ytpath.Path)
if EP.Youtube.String == "watch" {
EP.Youtube.String = ytpath.Query().Get("v")
if idx := strings.LastIndex(EP.Youtube.String, "?"); idx != -1 {
EP.Youtube.String = EP.Youtube.String[:idx]
}
}
ytres, err := youtube.GetVideos([]string {"snippet", "statistics", "status", "contentDetails"}, []string {EP.Youtube.String}, config.C.YoutubeKey)
if err != nil || len(ytres.Items) == 0 || ytres.Items[0].Status.UploadStatus != "processed" {
log.Printf("WAR EP %s: Video %s is private (%+v)", slug, EP.Youtube.String, err)
return
}
if EP.Descr.String == "" {
EP.Descr.String = ytres.Items[0].Snippet.Description
}
if thumb, ok := ytres.Items[0].Snippet.Thumbnails["maxres"]; ok {
EP.ThumbS.String, EP.ThumbB.String, err = image.ResizeThumb(thumb.Url)
} else if thumb, ok := ytres.Items[0].Snippet.Thumbnails["high"]; ok {
EP.ThumbS.String, EP.ThumbB.String, err = image.ResizeThumb(thumb.Url)
}
if err != nil {
log.Printf("WAR EP %s: Error resizing thumbnail: %+v", slug, err)
}
EP.Aired, err = time.Parse(time.RFC3339Nano, ytres.Items[0].Snippet.PublishedAt)
if err != nil {
log.Printf("ERR EP %s: Could not parse aired", slug)
return
}
dur, err := duration.FromString(ytres.Items[0].ContentDetails.Duration)
if err == nil {
EP.Duration.Int64 = int64(dur.ToDuration().Seconds())
} else {
log.Printf("SUC EP %s: Could not parse duration", slug)
EP.Duration.Int64 = 0
}
EP.Rating.Float64, EP.Votes.Int64 = youtube.GetRatingAndVotesWithRes(ytres)
if LP.Aired.IsZero() || LP.Aired.After(EP.Aired) {
LP.Aired = EP.Aired
database.Db.Save(&LP)
}
if err := database.Db.Create(&EP).Error; err != nil {
log.Printf("ERR EP %s: Could not be added to databse (%+v)", slug, err)
} else {
log.Printf("SUC EP %s: Added to database", slug)
}
}
// Update episodes
func UpdateEpisodes() {
var EPs []database.Episode
var count int
database.Db.Find(&EPs).Count(&count)
bar := pb.StartNew(count)
for _, EP := range EPs {
wg.Add(1)
go UpdateEpisode(EP, bar)
}
wg.Wait()
bar.FinishPrint("Updated episodes")
}
func UpdateEpisode(EP database.Episode, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
var status string
var thumb youtube.Thumb
EP.Rating.Float64, EP.Votes.Int64, status, thumb = youtube.GetRatingAndVotesWithId(EP.Youtube.String, config.C.YoutubeKey)
if status != "private" {
var LP database.LetsPlay
database.Db.Where("ID = ?", EP.LetsPlayID).First(&LP)
if LP.Aired.IsZero() || LP.Aired.After(EP.Aired) {
LP.Aired = EP.Aired
database.Db.Save(&LP)
}
if EP.ThumbB.Valid == false {
var err error
EP.ThumbS.String, EP.ThumbB.String, err = image.ResizeThumb(thumb.Url)
if err != nil {
log.Printf("WAR EP %s: Error resizing thumbnail: %+v", EP.Slug.String, err)
}
}
if err := database.Db.Save(&EP).Error; err != nil {
log.Printf("ERR EP %s: Could not be updated in databse (%+v)", EP.Slug.String, err)
} else {
log.Printf("SUC EP %s: Updated in database", EP.Slug.String)
}
} else {
database.Db.Delete(&EP)
log.Printf("SUC EP %s: Removed from database", EP.Slug.String)
}
}

168
gparser/feedparser.go Normal file
View file

@ -0,0 +1,168 @@
package main
import (
"encoding/xml"
"io/ioutil"
"log"
"net/url"
"path"
"strconv"
"strings"
"time"
"github.com/ChannelMeter/iso8601duration"
"github.com/PuerkitoBio/goquery"
"github.com/cheggaaa/pb"
"git.1750studios.com/gronkhDE/gogronkh/config"
"git.1750studios.com/gronkhDE/gogronkh/database"
"git.1750studios.com/gronkhDE/gogronkh/image"
"git.1750studios.com/gronkhDE/gogronkh/youtube"
)
type RSSItem struct {
Link string `xml:"link"`
}
type RSSFeed struct {
Items []RSSItem `xml:"channel>item"`
}
func ParseRSSFeeds() {
var authors []database.Author
database.Db.Find(&authors)
for _, AT := range authors {
var Feed RSSFeed
res, err := GetHTTPResource(config.C.GronkhUrl + "/zeige/" + AT.Slug.String + "/feed")
if err != nil {
log.Printf("ERR RS %s: Request failed (%+v)", AT.Slug, err)
continue
}
defer res.Body.Close()
body, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Fatalf("ERR RS %s: Document failure (%+v)", AT.Slug, err)
continue
}
xerr := xml.Unmarshal(body, &Feed)
if xerr != nil {
log.Printf("ERR RS %s: XML failure (%+v)", AT.Slug, err)
continue
}
bar := pb.StartNew(len(Feed.Items))
for _, item := range Feed.Items {
ur, _ := url.Parse(item.Link)
slug := path.Base(ur.Path)
var count int
if database.Db.Model(database.Episode{}).Where("slug = ?", slug).Count(&count); count > 0 {
var EP database.Episode
database.Db.Where("slug = ?", slug).First(&EP)
wg.Add(1)
go UpdateEpisode(EP, bar)
} else {
wg.Add(1)
go ParseFeedEpisode(item.Link, bar)
}
}
wg.Wait()
bar.FinishPrint("Parsed RSS feed for " + AT.Slug.String)
}
}
func ParseFeedEpisode(u string, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
ur, _ := url.Parse(u)
slug := path.Base(ur.Path)
var EP database.Episode
EP.Slug.String = slug
var LP database.LetsPlay
database.Db.Where("slug = ?", path.Base(path.Dir(ur.Path))).First(&LP)
EP.LetsPlayID = LP.ID
res, err := GetHTTPResource(u)
if err != nil {
log.Printf("ERR RS %s: Request failed (%+v)", slug, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR RS %s: Document failure (%+v)", slug, err)
return
}
EP.Name.String = doc.Find("div.article > h2").First().Text()
if EP.Name.String == "" {
log.Printf("ERR RS %s: Episode name empty", slug)
return
}
if match := episodeRegex.FindStringSubmatch(EP.Name.String); len(match) > 0 {
num, _ := strconv.Atoi(match[1])
EP.Episode.Int64 = int64(num)
} else {
log.Printf("SUC RSS %s: Name does not match RegEx", slug)
EP.Episode.Int64 = 0
}
doc.Find(".article > p").Each(func(i int, s *goquery.Selection) {
EP.Descr.String += s.Text() + "\n"
})
EP.Descr.String = strings.Trim(EP.Descr.String, "\n ")
au, _ := doc.Find(".author > a.avatar").Attr("href")
au = strings.TrimSuffix(au, "/")
aur, _ := url.Parse(au)
if path.Base(aur.Path) == "" || path.Base(aur.Path) == "." {
log.Printf("ERR EP %s: No author found", slug)
return
}
if path.Base(aur.Path) == "tobinator612" { // Don't ask… just… don't ask
EP.AuthorID = LP.AuthorID
} else {
var AT database.Author
database.Db.Where("slug = ?", path.Base(aur.Path)).First(&AT)
EP.AuthorID = AT.ID
}
yt, _ := doc.Find(".youtube > iframe").Attr("src")
ytpath, _ := url.Parse(yt)
EP.Youtube.String = path.Base(ytpath.Path)
if EP.Youtube.String == "watch" {
EP.Youtube.String = ytpath.Query().Get("v")
if idx := strings.LastIndex(EP.Youtube.String, "?"); idx != -1 {
EP.Youtube.String = EP.Youtube.String[:idx]
}
}
ytres, err := youtube.GetVideos([]string {"snippet", "statistics", "status", "contentDetails"}, []string {EP.Youtube.String}, config.C.YoutubeKey)
if err != nil || len(ytres.Items) == 0 || ytres.Items[0].Status.UploadStatus != "processed" {
log.Printf("WAR RS %s: Video %s is private (%s)", slug, EP.Youtube.String, err)
return
}
if EP.Descr.String == "" {
EP.Descr.String = ytres.Items[0].Snippet.Description
}
if thumb, ok := ytres.Items[0].Snippet.Thumbnails["maxres"]; ok {
EP.ThumbS.String, EP.ThumbB.String, err = image.ResizeThumb(thumb.Url)
} else if thumb, ok := ytres.Items[0].Snippet.Thumbnails["high"]; ok {
EP.ThumbS.String, EP.ThumbB.String, err = image.ResizeThumb(thumb.Url)
}
if err != nil {
log.Printf("WAR EP %s: Error resizing thumbnail: %+v", slug, err)
}
EP.Aired, err = time.Parse(time.RFC3339Nano, ytres.Items[0].Snippet.PublishedAt)
if err != nil {
log.Printf("ERR RS %s: Could not parse aired", slug)
return
}
dur, err := duration.FromString(ytres.Items[0].ContentDetails.Duration)
if err == nil {
EP.Duration.Int64 = int64(dur.ToDuration().Seconds())
} else {
log.Printf("SUC RSS %s: Could not parse duration", slug)
EP.Duration.Int64 = 0
}
EP.Rating.Float64, EP.Votes.Int64 = youtube.GetRatingAndVotesWithRes(ytres)
if err := database.Db.Create(&EP).Error; err != nil {
log.Printf("ERR RS %s: Could not be added to databse (%+v)", slug, err)
} else {
log.Printf("SUC RSS %s: Added to database", slug)
}
}

45
gparser/http.go Normal file
View file

@ -0,0 +1,45 @@
package main
import (
"log"
"net/http"
"strconv"
"time"
)
var sem = make(chan byte, 5)
// Make an HTTP Get Request to u
func GetHTTPResource(u string) (*http.Response, error) {
sem <- 1
// Prepare HTTP Client, Cookie and Request
client := &http.Client {}
cookie := &http.Cookie {
Name : "POPUPCHECK",
Value : strconv.FormatInt(time.Now().UnixNano() / 1000000, 10),
Domain : "gronkh.de",
Path : "/",
}
req, err := http.NewRequest("GET", u, nil)
req.AddCookie(cookie)
req.Header.Add("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
req.Header.Add("Cache-Control", "max-age=0")
req.Header.Add("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12")
if err != nil {
<- sem
log.Fatalf("FAT HTTP - Failed to create new Request: %+v", err)
return nil, err
}
// Execute HTTP Request
res, err := client.Do(req)
if err != nil {
<- sem
return nil, err
}
if res.StatusCode == http.StatusNotFound {
<- sem
return nil, nil
}
<- sem
return res, nil
}

136
gparser/lpparser.go Normal file
View file

@ -0,0 +1,136 @@
package main
import (
"log"
"net/url"
"strings"
"path"
"github.com/PuerkitoBio/goquery"
"github.com/cheggaaa/pb"
"git.1750studios.com/gronkhDE/gogronkh/config"
"git.1750studios.com/gronkhDE/gogronkh/database"
"git.1750studios.com/gronkhDE/gogronkh/image"
)
// Parse new Lets Plays
func ParseLetsPlays() {
bar := pb.StartNew(0)
res, err := GetHTTPResource(config.C.GronkhUrl + "/lets-play")
if err != nil {
log.Printf("ERR LP Page: Request failed (%+v)", err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR LP Page: Document failure (%+v)", err)
return
}
doc.Find(".postpadding > a").Each(func(i int, s *goquery.Selection) {
u, _ := s.Attr("href")
ur, _ := url.Parse(u)
slug := path.Base(ur.Path)
var count int
if database.Db.Model(database.LetsPlay{}).Where("slug = ?", slug).Count(&count); count > 0 {
return
}
var LP database.LetsPlay
LP.Slug.String = slug
LP.Name.String, _ = s.Attr("title")
wg.Add(1)
go ParseLPPage(LP.Slug.String, &LP, bar)
})
wg.Wait()
bar.FinishPrint("Parsed lets plays")
}
func ParseLPPage(gslug string, LP *database.LetsPlay, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
res, err := GetHTTPResource(config.C.GronkhUrl + "/lets-play/" + gslug)
if err != nil {
log.Printf("ERR LP %s: Request failed (%+v)", gslug, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR LP %s: Document failure (%+v)", gslug, err)
return
}
pos, _ := doc.Find(".lightbox").Attr("href")
LP.PosterS.String, LP.PosterB.String, err = image.ResizeCover(pos)
if err != nil {
log.Printf("WAR LP %s: Error resizing poster: %+v", gslug, err)
}
au, _ := doc.Find(".author > a.avatar").Attr("href")
au = strings.TrimSuffix(au, "/")
aur, _ := url.Parse(au)
aus := path.Base(aur.Path)
if aus == "" || aus == "." {
log.Printf("ERR LP %s: No author found", gslug)
return
}
var AT database.Author
database.Db.Where("slug = ?", aus).First(&AT)
LP.AuthorID = AT.ID
if err := database.Db.Create(&LP).Error; err != nil {
log.Printf("ERR LP %s: Could not be added to database (%+v)", gslug, err)
return
} else {
log.Printf("SUC LP %s: Added to database", gslug)
}
}
// Update Lets Plays
func UpdateLetsPlays() {
var LPs []database.LetsPlay
var count int
database.Db.Find(&LPs).Count(&count)
bar := pb.StartNew(count)
for _, LP := range LPs {
wg.Add(1)
go UpdateLP(LP, bar)
}
wg.Wait()
bar.FinishPrint("Updated lets plays")
}
func UpdateLP(LP database.LetsPlay, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
if LP.PosterB.Valid == false {
res, err := GetHTTPResource(config.C.GronkhUrl + "/lets-play/" + LP.Slug.String)
if err != nil {
log.Printf("ERR LP %s: Request failed (%+v)", LP.Slug.String, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR LP %s: Document failure (%+v)", LP.Slug.String, err)
return
}
pos, _ := doc.Find(".lightbox").Attr("href")
LP.PosterS.String, LP.PosterB.String, err = image.ResizeCover(pos)
if err != nil {
log.Printf("WAR LP %s: Error resizing poster: %+v", LP.Slug.String, err)
}
if err := database.Db.Save(&LP).Error; err != nil {
log.Printf("ERR LP %s: Could not be updated in database (%+v)", LP.Slug.String, err)
return
} else {
log.Printf("SUC LP %s: Updated in database", LP.Slug.String)
}
}
}

197
gparser/ltparser.go Normal file
View file

@ -0,0 +1,197 @@
package main
import (
"log"
"net/url"
"strconv"
"strings"
"time"
"path"
"github.com/ChannelMeter/iso8601duration"
"github.com/PuerkitoBio/goquery"
"github.com/cheggaaa/pb"
"git.1750studios.com/gronkhDE/gogronkh/config"
"git.1750studios.com/gronkhDE/gogronkh/database"
"git.1750studios.com/gronkhDE/gogronkh/image"
"git.1750studios.com/gronkhDE/gogronkh/youtube"
)
// Parse new Lets Tests
func ParseLetsTests() {
bar := pb.StartNew(0)
ParseLTPage(1, bar)
wg.Wait()
bar.FinishPrint("Parsed lets tests")
}
func ParseLT(i int, s *goquery.Selection, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
u, _ := s.Find("h1 > a").Attr("href")
ur, _ := url.Parse(u)
slug := path.Base(ur.Path)
var count int
if database.Db.Model(database.LetsTest{}).Where("slug = ?", slug).Count(&count); count > 0 {
return
}
var LT database.LetsTest
LT.Slug.String = slug
LT.Name.String = s.Find("h1 > a").First().Text()
res, err := GetHTTPResource(u)
if err != nil {
log.Printf("ERR LT %s: Request failed (%+v)", slug, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR LT %s: Document failure (%+v)", slug, err)
return
}
pos, _ := doc.Find("div#game-cover > a.lightbox").Attr("href")
LT.PosterS.String, LT.PosterB.String, err = image.ResizeCover(pos)
if err != nil {
log.Printf("WAR LT %s: Error resizing cover: %+v", slug, err)
}
au, _ := doc.Find(".author > a.avatar").Attr("href")
aur, _ := url.Parse(au)
if path.Base(aur.Path) == "" || path.Base(aur.Path) == "." {
log.Printf("ERR LT %s: No author found", slug)
return
}
var AT database.Author
database.Db.Where("slug = ?", path.Base(aur.Path)).First(&AT)
LT.AuthorID = AT.ID
doc.Find(".article > p").Each(func(i int, s *goquery.Selection) {
LT.Descr.String += s.Text() + "\n"
})
LT.Descr.String = strings.Trim(LT.Descr.String, "\n ")
yt, _ := doc.Find(".youtube > iframe").Attr("src")
ytpath, _ := url.Parse(yt)
LT.Youtube.String = path.Base(ytpath.Path)
if LT.Youtube.String == "watch" {
LT.Youtube.String = ytpath.Query().Get("v")
if idx := strings.LastIndex(LT.Youtube.String, "?"); idx != -1 {
LT.Youtube.String = LT.Youtube.String[:idx]
}
}
ytres, err := youtube.GetVideos([]string {"snippet", "statistics", "status", "contentDetails"}, []string {LT.Youtube.String}, config.C.YoutubeKey)
if err != nil || len(ytres.Items) == 0 || ytres.Items[0].Status.UploadStatus != "processed" {
log.Printf("WAR LT %s: Video %s is private (%+v)", slug, LT.Youtube.String, err)
return
}
if LT.Descr.String == "" {
LT.Descr.String = ytres.Items[0].Snippet.Description
}
if thumb, ok := ytres.Items[0].Snippet.Thumbnails["maxres"]; ok {
LT.ThumbS.String, LT.ThumbB.String, err = image.ResizeThumb(thumb.Url)
} else if thumb, ok := ytres.Items[0].Snippet.Thumbnails["high"]; ok {
LT.ThumbS.String, LT.ThumbB.String, err = image.ResizeThumb(thumb.Url)
}
if err != nil {
log.Printf("WAR LT %s: Error resizing thumbnail: %+v", slug, err)
}
LT.Aired, err = time.Parse(time.RFC3339Nano, ytres.Items[0].Snippet.PublishedAt)
if err != nil {
log.Printf("ERR LT %s: Failed to parse aired", slug)
return
}
dur, err := duration.FromString(ytres.Items[0].ContentDetails.Duration)
if err == nil {
LT.Duration.Int64 = int64(dur.ToDuration().Seconds())
} else {
log.Printf("ERR LT %s: Failed to parse duration", slug)
return
}
LT.Rating.Float64, LT.Votes.Int64 = youtube.GetRatingAndVotesWithRes(ytres)
if err := database.Db.Create(&LT).Error; err != nil {
log.Printf("ERR LT %s: Could not be added to databse (%+v)", slug, err)
} else {
log.Printf("SUC LT %s: Added to database", slug)
}
}
func ParseLTPage(page int, bar *pb.ProgressBar) {
res, err := GetHTTPResource(config.C.GronkhUrl + "/testet/page/" + strconv.Itoa(page))
if err != nil {
log.Printf("ERR LT Page: Request failed (%+v)", err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR LT Page: Document failure (%+v)", err)
return
}
// Parse the Episodes
doc.Find(".entry.entry-letsplay").Each(func(i int, s *goquery.Selection) {
wg.Add(1)
go ParseLT(i, s, bar)
})
_, found := doc.Find(".next").Attr("href")
if found {
ParseLTPage(page+1, bar)
}
}
// Update Lets Tests
func UpdateLetsTests() {
var LTs []database.LetsTest
var count int
database.Db.Find(&LTs).Count(&count)
bar := pb.StartNew(count)
for _, LT := range LTs {
wg.Add(1)
go UpdateLT(LT, bar)
}
wg.Wait()
bar.FinishPrint("Updated lets tests")
}
func UpdateLT(LT database.LetsTest, bar *pb.ProgressBar) {
defer wg.Done()
if bar != nil {
defer bar.Increment()
}
var status string
var thumb youtube.Thumb
var err error
LT.Rating.Float64, LT.Votes.Int64, status, thumb = youtube.GetRatingAndVotesWithId(LT.Youtube.String, config.C.YoutubeKey)
if status != "private" {
LT.ThumbS.String, LT.ThumbB.String, err = image.ResizeThumb(thumb.Url)
if err != nil {
log.Printf("WAR EP %s: Error resizing thumbnail: %+v", LT.Slug.String, err)
}
if LT.PosterB.Valid == false {
res, err := GetHTTPResource(config.C.GronkhUrl + "/testet/" + LT.Slug.String)
if err != nil {
log.Printf("ERR LT %s: Request failed (%+v)", LT.Slug.String, err)
return
}
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
log.Printf("ERR LT %s: Document failure (%+v)", LT.Slug.String, err)
return
}
pos, _ := doc.Find("div#game-cover > a.lightbox").Attr("href")
LT.PosterS.String, LT.PosterB.String, err = image.ResizeCover(pos)
if err != nil {
log.Printf("WAR LT %s: Error resizing cover: %+v", LT.Slug.String, err)
}
}
if err := database.Db.Save(&LT).Error; err != nil {
log.Printf("ERR LT %s: Could not be updated in databse (%+v)", LT.Slug.String, err)
} else {
log.Printf("SUC LT %s: Updated in database", LT.Slug.String)
}
} else {
database.Db.Delete(&LT)
log.Printf("SUC LT %s: Removed from database", LT.Slug.String)
}
}

37
gparser/main.go Normal file
View file

@ -0,0 +1,37 @@
package main
import (
"os"
"log"
"regexp"
"sync"
"git.1750studios.com/gronkhDE/gogronkh/config"
"git.1750studios.com/gronkhDE/gogronkh/database"
)
var conf config.Config
var episodeRegex *regexp.Regexp
var wg sync.WaitGroup
func main() {
log.SetFlags(log.Lshortfile)
log.SetOutput(os.Stderr)
err := config.LoadConfig("/home/andreas/.gronkh.json")
if err != nil {
log.Fatalf("FAT Config error: %+v", err)
return
}
episodeRegex = regexp.MustCompile(config.C.EpisodeRegex)
if err = os.MkdirAll(config.C.ImageDirectory, 0775); err != nil {
log.Fatalf("FAT Could not create ImageDirectory, error: %+v", err)
}
err = database.InitDb(config.C.DatabaseConnection)
ParseAll()
StartCron()
MainLoop()
}

53
gparser/utils.go Normal file
View file

@ -0,0 +1,53 @@
package main
import (
"log"
"os"
"os/signal"
"github.com/robfig/cron"
)
func UpdateAll() {
UpdateAuthors()
log.Printf("*** FINISHED UPDATING AUTHORS ***")
UpdateLetsTests()
log.Printf("*** FINISHED UPDATING LETSTESTS ***")
UpdateLetsPlays()
log.Printf("*** FINISHED UPDATING LETSPLAYS ***")
UpdateEpisodes()
log.Printf("*** FINISHED UPDATING EPISODES ***")
}
func ParseAll() {
ParseAuthors()
log.Printf("*** FINISHED PARSING AUTHORS ***")
ParseLetsTests()
log.Printf("*** FINISHED PARSING LETSTESTS ***")
ParseLetsPlays()
log.Printf("*** FINISHED PARSING LETSPLAYS ***")
ParseEpisodes()
log.Printf("*** FINISHED PARSING EPISODES ***")
}
func StartCron() {
c := cron.New()
c.AddFunc("@every 1h", UpdateAll)
c.AddFunc("@every 10m", ParseRSSFeeds)
c.Start()
}
func MainLoop() {
sig := make(chan os.Signal, 1)
signal.Notify(sig)
for true {
s := <-sig
if s == os.Kill || s == os.Interrupt {
log.Printf("Quitting…")
break
}
}
wg.Wait()
}

229
gserver/apiv3/apiv3.go Normal file
View file

@ -0,0 +1,229 @@
package apiv3
import (
"fmt"
"net/http"
"strconv"
"strings"
. "git.1750studios.com/gronkhDE/gogronkh/gserver/utlis"
"git.1750studios.com/gronkhDE/gogronkh/database"
"github.com/jinzhu/copier"
"github.com/gin-gonic/gin"
)
func GetAuthors(c *gin.Context) {
var ATs []database.Author
var AATs []Author
if author, ok := CleanParam(c.Param("aid")); ok {
database.Db.Where(author).Find(&ATs)
if len(ATs) > 0 {
copier.Copy(&AATs, &ATs)
if HandleEtag(c, fmt.Sprintf("%#v", AATs[0])) {
return
}
c.JSON(http.StatusOK, AATs[0])
} else {
if HandleEtag(c, "{}") {
return
}
c.JSON(http.StatusOK, gin.H{})
}
} else {
database.Db.Find(&ATs)
copier.Copy(&AATs, &ATs)
if HandleEtag(c, fmt.Sprintf("%#v", AATs)) {
return
}
c.JSON(http.StatusOK, AATs)
}
}
func GetAuthorTests(c *gin.Context) {
var AT database.Author
var LTs []database.LetsTest
var ALTs []LetsTest
if author, ok := CleanParam(c.Param("aid")); ok {
database.Db.Where(author).Find(&AT)
database.Db.Model(&AT).Related(&LTs)
} else {
c.AbortWithStatus(404)
}
copier.Copy(&ALTs, &LTs)
if HandleEtag(c, fmt.Sprintf("%#v", ALTs)) {
return
}
c.JSON(http.StatusOK, ALTs)
}
func GetAuthorLps(c *gin.Context) {
var AT database.Author
var LPs []database.LetsPlay
var ALPs []LetsPlay
if author, ok := CleanParam(c.Param("aid")); ok {
database.Db.Where(author).Find(&AT)
database.Db.Model(&AT).Related(&LPs)
} else {
c.AbortWithStatus(404)
}
copier.Copy(&ALPs, &LPs)
if HandleEtag(c, fmt.Sprintf("%#v", ALPs)) {
return
}
c.JSON(http.StatusOK, ALPs)
}
func GetTests(c *gin.Context) {
var LTs []database.LetsTest
var ALTs []LetsTest
if lt, ok := CleanParam(c.Param("tid")); ok {
database.Db.Where(lt).Find(&LTs)
if len(LTs) > 0 {
copier.Copy(&ALTs, &LTs)
if HandleEtag(c, fmt.Sprintf("%#v", ALTs[0])) {
return
}
c.JSON(http.StatusOK, ALTs[0])
} else {
if HandleEtag(c, "{}") {
return
}