Skip to content

Commit 1fd7088

Browse files
committed
Better error handling
- Removed all panic code - It should continue downloading now in most cases where before it would panic while downloading closes #3
1 parent 5974b99 commit 1fd7088

File tree

8 files changed

+178
-139
lines changed

8 files changed

+178
-139
lines changed

cmd/root.go

Lines changed: 48 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,13 @@ Would download chapters 10 to 20 of Black Clover from
4848
mangadex.org in Spanish`, "manga-downloader", color.YellowString("manga-downloader")),
4949
Args: cobra.ExactArgs(2),
5050
Run: func(cmd *cobra.Command, args []string) {
51-
s := grabber.NewSite(args[0])
51+
s, errs := grabber.NewSite(args[0])
52+
if len(errs) > 0 {
53+
color.Red("Errors testing site (a site may be down):")
54+
for _, err := range errs {
55+
color.Red(err.Error())
56+
}
57+
}
5258
if s == nil {
5359
color.Yellow("Site not recognised")
5460
os.Exit(1)
@@ -57,55 +63,70 @@ mangadex.org in Spanish`, "manga-downloader", color.YellowString("manga-download
5763

5864
// ranges parsing
5965
rngs, err := ranges.Parse(args[1])
60-
if err != nil {
61-
panic(err)
62-
}
66+
cerr(err, "Error parsing ranges: %s")
6367

6468
// fetch series title
65-
title := s.GetTitle()
69+
title, err := s.FetchTitle()
70+
cerr(err, "Error fetching title: %s")
6671

6772
// fetch all chapters
68-
chapters := s.FetchChapters()
73+
chapters, errs := s.FetchChapters()
74+
if len(errs) > 0 {
75+
color.Red("Errors fetching chapters:")
76+
for _, err := range errs {
77+
color.Red(err.Error())
78+
}
79+
os.Exit(1)
80+
}
6981

7082
// sort and filter specified ranges
7183
chapters = chapters.FilterRanges(rngs)
7284

7385
if len(chapters) == 0 {
74-
color.Yellow("No chapters found for the specified ranges")
75-
os.Exit(0)
86+
warn("No chapters found for the specified ranges")
7687
}
7788

7889
wg := sync.WaitGroup{}
79-
guard := make(chan struct{}, s.GetMaxConcurrency().Chapters)
90+
g := make(chan struct{}, s.GetMaxConcurrency().Chapters)
8091

8192
// loop chapters to retrieve pages
8293
for _, chap := range chapters {
83-
guard <- struct{}{}
94+
g <- struct{}{}
8495
wg.Add(1)
8596
go func(chap grabber.Filterable) {
8697
defer wg.Done()
87-
chapter := s.FetchChapter(chap)
98+
chapter, err := s.FetchChapter(chap)
99+
if err != nil {
100+
color.Red("- error fetching chapter %s: %s", chap.GetTitle(), err.Error())
101+
<-g
102+
return
103+
}
88104
fmt.Printf("fetched %s %s\n", color.CyanString(title), color.HiBlackString(chapter.GetTitle()))
89105

90106
files, err := downloader.FetchChapter(s, chapter)
91107
if err != nil {
92-
panic(err)
108+
color.Red("- error downloading chapter %s: %s", chapter.GetTitle(), err.Error())
109+
<-g
110+
return
93111
}
94112

95113
filename, err := packer.NewFilenameFromTemplate(title, chapter, s.GetFilenameTemplate())
96114
if err != nil {
97-
panic(err)
115+
color.Red("- error creating filename for chapter %s: %s", chapter.GetTitle(), err.Error())
116+
<-g
117+
return
98118
}
99119

100120
filename += ".cbz"
101121

102-
fmt.Printf("- %s %s\n", color.GreenString("saving file"), color.HiBlackString(filename))
103122
if err = packer.ArchiveCBZ(filename, files); err != nil {
104123
color.Red("- error saving file %s: %s", filename, err.Error())
124+
} else {
125+
fmt.Printf("- %s %s\n", color.GreenString("saved file"), color.HiBlackString(filename))
105126
}
106127

107128
// release guard
108-
<-guard
129+
<-g
109130
}(chap)
110131
}
111132
wg.Wait()
@@ -140,3 +161,15 @@ func init() {
140161
rootCmd.Flags().StringP("language", "l", "", "only download the specified language")
141162
rootCmd.Flags().StringP("filename-template", "t", packer.FilenameTemplateDefault, "template for the resulting filename")
142163
}
164+
165+
func cerr(err error, prefix string) {
166+
if err != nil {
167+
fmt.Println(color.RedString(prefix + err.Error()))
168+
os.Exit(1)
169+
}
170+
}
171+
172+
func warn(err string) {
173+
fmt.Println(color.YellowString(err))
174+
os.Exit(1)
175+
}

downloader/fetch.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ type File struct {
1717
}
1818

1919
// FetchChapter downloads all the pages of a chapter
20-
func FetchChapter(site grabber.Site, chapter grabber.Chapter) (files []*File, err error) {
20+
func FetchChapter(site grabber.Site, chapter *grabber.Chapter) (files []*File, err error) {
2121
wg := sync.WaitGroup{}
2222

2323
color.Blue("- downloading %s pages...", color.HiBlackString(chapter.GetTitle()))

grabber/inmanga.go

Lines changed: 40 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -23,79 +23,84 @@ type InmangaChapter struct {
2323
}
2424

2525
// Test checks if the site is InManga
26-
func (i *Inmanga) Test() bool {
26+
func (i *Inmanga) Test() (bool, error) {
2727
re := regexp.MustCompile(`inmanga\.com`)
28-
return re.MatchString(i.URL)
28+
return re.MatchString(i.URL), nil
29+
}
30+
31+
// GetTitle fetches the manga title
32+
func (i *Inmanga) FetchTitle() (string, error) {
33+
if i.title != "" {
34+
return i.title, nil
35+
}
36+
37+
body, err := http.Get(http.RequestParams{
38+
URL: i.URL,
39+
})
40+
if err != nil {
41+
return "", err
42+
}
43+
defer body.Close()
44+
45+
doc, err := goquery.NewDocumentFromReader(body)
46+
if err != nil {
47+
return "", err
48+
}
49+
50+
i.title = doc.Find("h1").Text()
51+
52+
return i.title, nil
2953
}
3054

3155
// FetchChapters returns the chapters of the manga
32-
func (i Inmanga) FetchChapters() Filterables {
56+
func (i Inmanga) FetchChapters() (Filterables, []error) {
3357
id := GetUUID(i.URL)
3458

3559
// retrieve chapters json list
3660
body, err := http.GetText(http.RequestParams{
3761
URL: "https://inmanga.com/chapter/getall?mangaIdentification=" + id,
3862
})
3963
if err != nil {
40-
panic(err)
64+
return nil, []error{err}
4165
}
4266

4367
raw := struct {
4468
Data string
4569
}{}
4670

4771
if err = json.Unmarshal([]byte(body), &raw); err != nil {
48-
panic(err)
72+
return nil, []error{err}
4973
}
5074

5175
feed := inmangaChapterFeed{}
52-
err = json.Unmarshal([]byte(raw.Data), &feed)
53-
if err != nil {
54-
panic(err)
55-
}
56-
57-
return newInmangaChaptersSlice(feed.Result)
58-
}
59-
60-
// GetTitle fetches the manga title
61-
func (i *Inmanga) GetTitle() string {
62-
if i.title != "" {
63-
return i.title
76+
if err = json.Unmarshal([]byte(raw.Data), &feed); err != nil {
77+
return nil, []error{err}
6478
}
6579

66-
body, err := http.Get(http.RequestParams{
67-
URL: i.URL,
68-
})
69-
if err != nil {
70-
panic(err)
71-
}
72-
defer body.Close()
73-
74-
doc, err := goquery.NewDocumentFromReader(body)
75-
if err != nil {
76-
panic(err)
80+
chapters := make(Filterables, 0, len(feed.Result))
81+
for _, c := range feed.Result {
82+
chapters = append(chapters, newInmangaChapter(c))
7783
}
7884

79-
i.title = doc.Find("h1").Text()
80-
return i.title
85+
return chapters, nil
8186
}
8287

8388
// FetchChapter fetches the chapter with its pages
84-
func (i Inmanga) FetchChapter(chap Filterable) Chapter {
89+
func (i Inmanga) FetchChapter(chap Filterable) (*Chapter, error) {
8590
ichap := chap.(*InmangaChapter)
8691
body, err := http.Get(http.RequestParams{
8792
URL: "https://inmanga.com/chapter/chapterIndexControls?identification=" + ichap.Id,
8893
})
8994
if err != nil {
90-
panic(err)
95+
return nil, err
9196
}
9297
defer body.Close()
9398
doc, err := goquery.NewDocumentFromReader(body)
9499
if err != nil {
95-
panic(err)
100+
return nil, err
96101
}
97102

98-
chapter := Chapter{
103+
chapter := &Chapter{
99104
Title: chap.GetTitle(),
100105
Number: chap.GetNumber(),
101106
PagesCount: int64(ichap.PagesCount),
@@ -112,7 +117,7 @@ func (i Inmanga) FetchChapter(chap Filterable) Chapter {
112117
})
113118
})
114119

115-
return chapter
120+
return chapter, nil
116121
}
117122

118123
// newInmangaChapter creates an InMangaChapter from an InMangaChapterFeedResult
@@ -127,16 +132,6 @@ func newInmangaChapter(c inmangaChapterFeedResult) *InmangaChapter {
127132
}
128133
}
129134

130-
// newInmangaChaptersSlice creates a slice of Filterables from a slice of InMangaChapterFeedResult
131-
func newInmangaChaptersSlice(s []inmangaChapterFeedResult) Filterables {
132-
chapters := make(Filterables, 0, len(s))
133-
for _, c := range s {
134-
chapters = append(chapters, newInmangaChapter(c))
135-
}
136-
137-
return chapters
138-
}
139-
140135
// inmangaChapterFeed is the JSON feed for the chapters list
141136
type inmangaChapterFeed struct {
142137
Result []inmangaChapterFeedResult

0 commit comments

Comments
 (0)