webtoon-dl-gif-support/main.go

198 lines
4.8 KiB
Go
Raw Normal View History

2023-12-20 10:33:51 -08:00
package main
import (
2023-12-21 08:38:48 -08:00
"bytes"
2023-12-20 10:33:51 -08:00
"fmt"
"github.com/anaskhan96/soup"
2023-12-21 08:38:48 -08:00
"github.com/signintech/gopdf"
"image"
"io"
"net/http"
2023-12-20 10:33:51 -08:00
"os"
2023-12-23 16:42:43 -08:00
"regexp"
"sort"
"strconv"
2023-12-21 09:20:50 -08:00
"strings"
2023-12-23 16:42:43 -08:00
"time"
2023-12-20 10:33:51 -08:00
)
2023-12-23 16:42:43 -08:00
func getImgLinksForEpisode(url string) []string {
2023-12-21 09:20:50 -08:00
resp, err := soup.Get(url)
2023-12-23 16:42:43 -08:00
time.Sleep(500 * time.Millisecond)
2023-12-21 09:20:50 -08:00
if err != nil {
fmt.Println(fmt.Sprintf("Error fetching page: %v", err))
os.Exit(1)
}
doc := soup.HTMLParse(resp)
imgs := doc.Find("div", "class", "viewer_lst").FindAll("img")
var imgLinks []string
for _, img := range imgs {
2023-12-23 16:42:43 -08:00
if dataURL, ok := img.Attrs()["data-url"]; ok {
imgLinks = append(imgLinks, dataURL)
}
2023-12-21 09:20:50 -08:00
}
return imgLinks
}
2023-12-23 16:42:43 -08:00
func getEpisodeLinksForPage(url string) ([]string, error) {
resp, err := soup.Get(url)
time.Sleep(500 * time.Millisecond)
if err != nil {
return []string{}, fmt.Errorf("error fetching page: %v", err)
}
doc := soup.HTMLParse(resp)
episodeURLs := doc.Find("div", "class", "detail_lst").FindAll("a")
var links []string
for _, episodeURL := range episodeURLs {
if href := episodeURL.Attrs()["href"]; strings.Contains(href, "/viewer") {
links = append(links, href)
}
}
return links, nil
}
func getImgLinks(url string) []string {
if strings.Contains(url, "/viewer") {
// assume viewing single episode
return getImgLinksForEpisode(url)
} else {
// assume viewing list of episodes
re := regexp.MustCompile("&page=[0-9]+")
allEpisodeLinks := make(map[string]struct{})
foundLastPage := false
for page := 1; !foundLastPage; page++ {
url = re.ReplaceAllString(url, "") + fmt.Sprintf("&page=%d", page)
episodeLinks, err := getEpisodeLinksForPage(url)
if err != nil {
break
}
for _, episodeLink := range episodeLinks {
// when you go past the last page, it just rerenders the last page
if _, ok := allEpisodeLinks[episodeLink]; ok {
foundLastPage = true
break
}
allEpisodeLinks[episodeLink] = struct{}{}
}
if !foundLastPage {
println(url)
}
}
keys := make([]string, 0, len(allEpisodeLinks))
for k := range allEpisodeLinks {
keys = append(keys, k)
}
// extract episode_no from url and sort by it
re = regexp.MustCompile("episode_no=([0-9]+)")
episodeNo := func(episodeLink string) int {
matches := re.FindStringSubmatch(episodeLink)
if len(matches) != 2 {
return 0
}
episodeNo, err := strconv.Atoi(matches[1])
if err != nil {
return 0
}
return episodeNo
}
sort.Slice(keys, func(i, j int) bool {
return episodeNo(keys[i]) < episodeNo(keys[j])
})
var allImgLinks []string
for _, episodeLink := range keys {
println(episodeLink)
allImgLinks = append(allImgLinks, getImgLinksForEpisode(episodeLink)...)
}
return allImgLinks
}
}
2023-12-21 08:38:48 -08:00
func fetchImage(imgLink string) []byte {
req, e := http.NewRequest("GET", imgLink, nil)
if e != nil {
fmt.Println(e)
os.Exit(1)
}
req.Header.Set("Referer", "http://www.webtoons.com")
response, err := http.DefaultClient.Do(req)
if err != nil {
2023-12-21 09:20:50 -08:00
fmt.Println(err.Error())
os.Exit(1)
2023-12-21 08:38:48 -08:00
}
defer func(Body io.ReadCloser) {
err := Body.Close()
if err != nil {
2023-12-21 09:20:50 -08:00
fmt.Println(err.Error())
os.Exit(1)
2023-12-21 08:38:48 -08:00
}
}(response.Body)
buff := new(bytes.Buffer)
_, err = buff.ReadFrom(response.Body)
if err != nil {
2023-12-21 09:20:50 -08:00
fmt.Println(err.Error())
os.Exit(1)
2023-12-21 08:38:48 -08:00
}
return buff.Bytes()
}
2023-12-20 10:33:51 -08:00
func main() {
2023-12-21 09:20:50 -08:00
if len(os.Args) < 2 {
fmt.Println("Usage: webtoon-dl <url>")
2023-12-20 10:33:51 -08:00
os.Exit(1)
}
2023-12-21 09:20:50 -08:00
url := os.Args[1]
imgLinks := getImgLinks(url)
fmt.Println(fmt.Sprintf("found %d pages", len(imgLinks)))
2023-12-21 08:38:48 -08:00
pdf := gopdf.GoPdf{}
pdf.Start(gopdf.Config{Unit: gopdf.UnitPT, PageSize: *gopdf.PageSizeA4})
for _, imgLink := range imgLinks {
fmt.Println(imgLink)
img := fetchImage(imgLink)
holder, err := gopdf.ImageHolderByBytes(img)
if err != nil {
2023-12-21 09:20:50 -08:00
fmt.Println(err.Error())
os.Exit(1)
2023-12-21 08:38:48 -08:00
}
d, _, err := image.DecodeConfig(bytes.NewReader(img))
if err != nil {
2023-12-21 09:20:50 -08:00
fmt.Println(err.Error())
os.Exit(1)
2023-12-21 08:38:48 -08:00
}
// gopdf assumes dpi 128 https://github.com/signintech/gopdf/issues/168
// W and H are in points, 1 point = 1/72 inch
2023-12-21 09:20:50 -08:00
// convert pixels (Width and Height) to points
// subtract 1 point to account for margins
2023-12-21 08:38:48 -08:00
pdf.AddPageWithOption(gopdf.PageOption{PageSize: &gopdf.Rect{
W: float64(d.Width)*72/128 - 1,
H: float64(d.Height)*72/128 - 1,
}})
err = pdf.ImageByHolder(holder, 0, 0, nil)
if err != nil {
2023-12-21 09:20:50 -08:00
fmt.Println(err.Error())
os.Exit(1)
2023-12-21 08:38:48 -08:00
}
}
2023-12-21 09:20:50 -08:00
outURL := strings.ReplaceAll(url, "http://", "")
outURL = strings.ReplaceAll(outURL, "https://", "")
outURL = strings.ReplaceAll(outURL, "www.", "")
outURL = strings.ReplaceAll(outURL, "webtoons.com/", "")
outURL = strings.Split(outURL, "?")[0]
outURL = strings.ReplaceAll(outURL, "/viewer", "")
2023-12-23 09:10:04 -08:00
outURL = strings.ReplaceAll(outURL, "/", "-")
outPath := outURL + ".pdf"
2023-12-21 09:20:50 -08:00
err := pdf.WritePdf(outPath)
2023-12-21 08:38:48 -08:00
if err != nil {
2023-12-21 09:20:50 -08:00
fmt.Println(err.Error())
os.Exit(1)
2023-12-21 08:38:48 -08:00
}
2023-12-21 09:20:50 -08:00
fmt.Println(fmt.Sprintf("saved to %s", outPath))
2023-12-20 10:33:51 -08:00
}