Compare commits

..

4 Commits

Author SHA1 Message Date
66d6c22e55
improve cover title 2023-04-27 15:58:40 +02:00
6295fefa02
move image transformation to processing 2023-04-27 15:04:52 +02:00
02f86eb55e
move load to options as an helper 2023-04-27 13:46:00 +02:00
4553e1e673
decode image into loader 2023-04-27 12:02:47 +02:00
7 changed files with 336 additions and 276 deletions

2
go.mod
View File

@ -7,7 +7,7 @@ require (
github.com/disintegration/gift v1.2.1 github.com/disintegration/gift v1.2.1
github.com/gofrs/uuid v4.4.0+incompatible github.com/gofrs/uuid v4.4.0+incompatible
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0
github.com/nwaples/rardecode v1.1.3 github.com/nwaples/rardecode/v2 v2.0.0-beta.2
github.com/raff/pdfreader v0.0.0-20220308062436-033e8ac577f0 github.com/raff/pdfreader v0.0.0-20220308062436-033e8ac577f0
github.com/schollz/progressbar/v3 v3.13.1 github.com/schollz/progressbar/v3 v3.13.1
github.com/tcnksm/go-latest v0.0.0-20170313132115-e3007ae9052e github.com/tcnksm/go-latest v0.0.0-20170313132115-e3007ae9052e

4
go.sum
View File

@ -23,8 +23,8 @@ github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWV
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc= github.com/nwaples/rardecode/v2 v2.0.0-beta.2 h1:e3mzJFJs4k83GXBEiTaQ5HgSc/kOK8q0rDaRO0MPaOk=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/nwaples/rardecode/v2 v2.0.0-beta.2/go.mod h1:yntwv/HfMc/Hbvtq9I19D1n58te3h6KsqCf3GxyfBGY=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/raff/pdfreader v0.0.0-20220308062436-033e8ac577f0 h1:fuFvfwIc+cpySYurvDNTs5LIHXP9Cj3reVRplj9Whv4= github.com/raff/pdfreader v0.0.0-20220308062436-033e8ac577f0 h1:fuFvfwIc+cpySYurvDNTs5LIHXP9Cj3reVRplj9Whv4=

View File

@ -296,7 +296,7 @@ func (e *ePub) Write() error {
return err return err
} }
} }
if err := wz.WriteImage(epubimageprocessing.LoadCoverTitleData(part.Cover, title, e.Image.Quality)); err != nil { if err := wz.WriteImage(epubimageprocessing.CoverTitleData(part.Cover.Raw, title, e.Image.Quality)); err != nil {
return err return err
} }

View File

@ -1,70 +0,0 @@
package epubimagefilters
import (
"image"
epubimage "github.com/celogeek/go-comic-converter/v2/internal/epub/image"
"github.com/disintegration/gift"
)
// create filter to apply to the source
func NewGift(img image.Image, options *epubimage.Options) *gift.GIFT {
g := gift.New()
g.SetParallelization(false)
if options.Crop {
g.Add(AutoCrop(
img,
options.CropRatioLeft,
options.CropRatioUp,
options.CropRatioRight,
options.CropRatioBottom,
))
}
if options.AutoRotate && img.Bounds().Dx() > img.Bounds().Dy() {
g.Add(gift.Rotate90())
}
if options.Contrast != 0 {
g.Add(gift.Contrast(float32(options.Contrast)))
}
if options.Brightness != 0 {
g.Add(gift.Brightness(float32(options.Brightness)))
}
g.Add(
Resize(options.ViewWidth, options.ViewHeight, gift.LanczosResampling),
Pixel(),
)
return g
}
// create filters to cut image into 2 equal pieces
func NewGiftSplitDoublePage(options *epubimage.Options) []*gift.GIFT {
gifts := make([]*gift.GIFT, 2)
gifts[0] = gift.New(
CropSplitDoublePage(options.Manga),
)
gifts[1] = gift.New(
CropSplitDoublePage(!options.Manga),
)
for _, g := range gifts {
g.SetParallelization(false)
if options.Contrast != 0 {
g.Add(gift.Contrast(float32(options.Contrast)))
}
if options.Brightness != 0 {
g.Add(gift.Brightness(float32(options.Brightness)))
}
g.Add(
Resize(options.ViewWidth, options.ViewHeight, gift.LanczosResampling),
)
}
return gifts
}

View File

@ -57,16 +57,16 @@ func (p *coverTitle) Draw(dst draw.Image, src image.Image, options *gift.Options
dst, dst,
borderArea, borderArea,
image.Black, image.Black,
image.Point{}, borderArea.Min,
draw.Over, draw.Src,
) )
draw.Draw( draw.Draw(
dst, dst,
textArea, textArea,
image.White, image.White,
image.Point{}, textArea.Min,
draw.Over, draw.Src,
) )
// Draw text // Draw text

View File

@ -4,12 +4,7 @@ Extract and transform image into a compressed jpeg.
package epubimageprocessing package epubimageprocessing
import ( import (
"fmt"
"image" "image"
_ "image/jpeg"
_ "image/png"
"io"
"os"
"path/filepath" "path/filepath"
"strings" "strings"
"sync" "sync"
@ -19,16 +14,8 @@ import (
epubimagefilters "github.com/celogeek/go-comic-converter/v2/internal/epub/imagefilters" epubimagefilters "github.com/celogeek/go-comic-converter/v2/internal/epub/imagefilters"
epubprogress "github.com/celogeek/go-comic-converter/v2/internal/epub/progress" epubprogress "github.com/celogeek/go-comic-converter/v2/internal/epub/progress"
"github.com/disintegration/gift" "github.com/disintegration/gift"
_ "golang.org/x/image/webp"
) )
type tasks struct {
Id int
Reader io.Reader
Path string
Name string
}
// only accept jpg, png and webp as source file // only accept jpg, png and webp as source file
func isSupportedImage(path string) bool { func isSupportedImage(path string) bool {
switch strings.ToLower(filepath.Ext(path)) { switch strings.ToLower(filepath.Ext(path)) {
@ -44,31 +31,7 @@ func isSupportedImage(path string) bool {
func LoadImages(o *Options) ([]*epubimage.Image, error) { func LoadImages(o *Options) ([]*epubimage.Image, error) {
images := make([]*epubimage.Image, 0) images := make([]*epubimage.Image, 0)
fi, err := os.Stat(o.Input) imageCount, imageInput, err := o.Load()
if err != nil {
return nil, err
}
var (
imageCount int
imageInput chan *tasks
)
// get all images though a channel of bytes
if fi.IsDir() {
imageCount, imageInput, err = o.loadDir()
} else {
switch ext := strings.ToLower(filepath.Ext(o.Input)); ext {
case ".cbz", ".zip":
imageCount, imageInput, err = o.loadCbz()
case ".cbr", ".rar":
imageCount, imageInput, err = o.loadCbr()
case ".pdf":
imageCount, imageInput, err = o.loadPdf()
default:
err = fmt.Errorf("unknown file format (%s): support .cbz, .zip, .cbr, .rar, .pdf", ext)
}
}
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -98,66 +61,31 @@ func LoadImages(o *Options) ([]*epubimage.Image, error) {
}) })
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
for i := 0; i < o.Workers; i++ { for i := 0; i < o.WorkersRatio(50); i++ {
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()
for img := range imageInput { for img := range imageInput {
// Decode image src := img.Image
src, _, err := image.Decode(img.Reader)
if err != nil {
bar.Clear()
fmt.Fprintf(os.Stderr, "error processing image %s%s: %s\n", img.Path, img.Name, err)
os.Exit(1)
}
g := epubimagefilters.NewGift(src, o.Image) for part, dst := range TransformImage(src, img.Id, o.Image) {
// Convert image var raw image.Image
dst := image.NewGray(g.Bounds(src.Bounds())) if img.Id == 0 && part == 0 {
g.Draw(dst, src) raw = dst
}
var raw image.Image imageOutput <- &epubimage.Image{
if img.Id == 0 { Id: img.Id,
raw = dst Part: part,
} Raw: raw,
Data: epubimagedata.New(img.Id, part, dst, o.Image.Quality),
imageOutput <- &epubimage.Image{ Width: dst.Bounds().Dx(),
Id: img.Id, Height: dst.Bounds().Dy(),
Part: 0, IsCover: img.Id == 0 && part == 0,
Raw: raw, DoublePage: part == 0 && src.Bounds().Dx() > src.Bounds().Dy(),
Data: epubimagedata.New(img.Id, 0, dst, o.Image.Quality), Path: img.Path,
Width: dst.Bounds().Dx(), Name: img.Name,
Height: dst.Bounds().Dy(),
IsCover: img.Id == 0,
DoublePage: src.Bounds().Dx() > src.Bounds().Dy(),
Path: img.Path,
Name: img.Name,
}
// Auto split double page
// Except for cover
// Only if the src image have width > height and is bigger than the view
if (!o.Image.HasCover || img.Id > 0) &&
o.Image.AutoSplitDoublePage &&
src.Bounds().Dx() > src.Bounds().Dy() {
gifts := epubimagefilters.NewGiftSplitDoublePage(o.Image)
for i, g := range gifts {
part := i + 1
dst := image.NewGray(g.Bounds(src.Bounds()))
g.Draw(dst, src)
imageOutput <- &epubimage.Image{
Id: img.Id,
Part: part,
Data: epubimagedata.New(img.Id, part, dst, o.Image.Quality),
Width: dst.Bounds().Dx(),
Height: dst.Bounds().Dy(),
IsCover: false,
DoublePage: false,
Path: img.Path,
Name: img.Name,
}
} }
} }
} }
@ -188,11 +116,88 @@ func LoadImages(o *Options) ([]*epubimage.Image, error) {
} }
// create a title page with the cover // create a title page with the cover
func LoadCoverTitleData(img *epubimage.Image, title string, quality int) *epubimagedata.ImageData { func CoverTitleData(img image.Image, title string, quality int) *epubimagedata.ImageData {
// Create a blur version of the cover // Create a blur version of the cover
g := gift.New(epubimagefilters.CoverTitle(title)) g := gift.New(epubimagefilters.CoverTitle(title))
dst := image.NewGray(g.Bounds(img.Raw.Bounds())) dst := image.NewGray(g.Bounds(img.Bounds()))
g.Draw(dst, img.Raw) g.Draw(dst, img)
return epubimagedata.NewRaw("OEBPS/Images/title.jpg", dst, quality) return epubimagedata.NewRaw("OEBPS/Images/title.jpg", dst, quality)
} }
// transform image into 1 or 3 images
// only doublepage with autosplit has 3 versions
func TransformImage(src image.Image, srcId int, o *epubimage.Options) []image.Image {
var filters, splitFilter []gift.Filter
var images []image.Image
if o.Crop {
f := epubimagefilters.AutoCrop(
src,
o.CropRatioLeft,
o.CropRatioUp,
o.CropRatioRight,
o.CropRatioBottom,
)
filters = append(filters, f)
splitFilter = append(splitFilter, f)
}
if o.AutoRotate && src.Bounds().Dx() > src.Bounds().Dy() {
filters = append(filters, gift.Rotate90())
}
if o.Contrast != 0 {
f := gift.Contrast(float32(o.Contrast))
filters = append(filters, f)
splitFilter = append(splitFilter, f)
}
if o.Brightness != 0 {
f := gift.Brightness(float32(o.Brightness))
filters = append(filters, f)
splitFilter = append(splitFilter, f)
}
filters = append(filters,
epubimagefilters.Resize(o.ViewWidth, o.ViewHeight, gift.LanczosResampling),
epubimagefilters.Pixel(),
)
// convert
{
g := gift.New(filters...)
dst := image.NewGray(g.Bounds(src.Bounds()))
g.Draw(dst, src)
images = append(images, dst)
}
// auto split off
if !o.AutoSplitDoublePage {
return images
}
// portrait, no need to split
if src.Bounds().Dx() <= src.Bounds().Dy() {
return images
}
// cover
if o.HasCover && srcId == 0 {
return images
}
// convert double page
for _, b := range []bool{o.Manga, !o.Manga} {
g := gift.New(splitFilter...)
g.Add(
epubimagefilters.CropSplitDoublePage(b),
epubimagefilters.Resize(o.ViewWidth, o.ViewHeight, gift.LanczosResampling),
)
dst := image.NewGray(g.Bounds(src.Bounds()))
g.Draw(dst, src)
images = append(images, dst)
}
return images
}

View File

@ -5,20 +5,33 @@ import (
"bytes" "bytes"
"errors" "errors"
"fmt" "fmt"
"image"
_ "image/jpeg"
_ "image/png"
"io" "io"
"io/fs" "io/fs"
"os" "os"
"path/filepath" "path/filepath"
"sort" "sort"
"strings"
"sync"
_ "golang.org/x/image/webp"
epubimage "github.com/celogeek/go-comic-converter/v2/internal/epub/image" epubimage "github.com/celogeek/go-comic-converter/v2/internal/epub/image"
"github.com/celogeek/go-comic-converter/v2/internal/sortpath" "github.com/celogeek/go-comic-converter/v2/internal/sortpath"
"github.com/nwaples/rardecode" "github.com/nwaples/rardecode/v2"
pdfimage "github.com/raff/pdfreader/image" pdfimage "github.com/raff/pdfreader/image"
"github.com/raff/pdfreader/pdfread" "github.com/raff/pdfreader/pdfread"
"golang.org/x/image/tiff"
) )
type tasks struct {
Id int
Image image.Image
Path string
Name string
}
type Options struct { type Options struct {
Input string Input string
SortPathMode int SortPathMode int
@ -30,26 +43,37 @@ type Options struct {
var errNoImagesFound = errors.New("no images found") var errNoImagesFound = errors.New("no images found")
// ensure copy image into a buffer func (o *Options) WorkersRatio(pct int) (nbWorkers int) {
func (o *Options) mustExtractImage(imageOpener func() (io.ReadCloser, error)) *bytes.Buffer { nbWorkers = o.Workers * pct / 100
var b bytes.Buffer if nbWorkers < 1 {
if o.Dry { nbWorkers = 1
return &b }
return
}
// Load images from input
func (o *Options) Load() (totalImages int, output chan *tasks, err error) {
fi, err := os.Stat(o.Input)
if err != nil {
return
} }
f, err := imageOpener() // get all images though a channel of bytes
if err != nil { if fi.IsDir() {
fmt.Fprintln(os.Stderr, err) return o.loadDir()
os.Exit(1) } else {
switch ext := strings.ToLower(filepath.Ext(o.Input)); ext {
case ".cbz", ".zip":
return o.loadCbz()
case ".cbr", ".rar":
return o.loadCbr()
case ".pdf":
return o.loadPdf()
default:
err = fmt.Errorf("unknown file format (%s): support .cbz, .zip, .cbr, .rar, .pdf", ext)
return
}
} }
defer f.Close()
_, err = io.Copy(&b, f)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
return &b
} }
// load a directory of images // load a directory of images
@ -81,25 +105,64 @@ func (o *Options) loadDir() (totalImages int, output chan *tasks, err error) {
sort.Sort(sortpath.By(images, o.SortPathMode)) sort.Sort(sortpath.By(images, o.SortPathMode))
output = make(chan *tasks, o.Workers*2) // Queue all file with id
type job struct {
Id int
Path string
}
jobs := make(chan *job)
go func() { go func() {
defer close(output) defer close(jobs)
for i, img := range images { for i, path := range images {
p, fn := filepath.Split(img) jobs <- &job{i, path}
if p == input {
p = ""
} else {
p = p[len(input)+1:]
}
output <- &tasks{
Id: i,
Reader: o.mustExtractImage(func() (io.ReadCloser, error) { return os.Open(img) }),
Path: p,
Name: fn,
}
} }
}() }()
// read in parallel and get an image
output = make(chan *tasks, o.Workers)
wg := &sync.WaitGroup{}
for j := 0; j < o.WorkersRatio(50); j++ {
wg.Add(1)
go func() {
defer wg.Done()
for job := range jobs {
var img image.Image
if !o.Dry {
f, err := os.Open(job.Path)
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", job.Path, err)
os.Exit(1)
}
img, _, err = image.Decode(f)
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", job.Path, err)
os.Exit(1)
}
f.Close()
}
p, fn := filepath.Split(job.Path)
if p == input {
p = ""
} else {
p = p[len(input)+1:]
}
output <- &tasks{
Id: job.Id,
Image: img,
Path: p,
Name: fn,
}
}
}()
}
// wait all done and close
go func() {
wg.Wait()
close(output)
}()
return return
} }
@ -136,50 +199,76 @@ func (o *Options) loadCbz() (totalImages int, output chan *tasks, err error) {
indexedNames[name] = i indexedNames[name] = i
} }
output = make(chan *tasks, o.Workers*2) type job struct {
Id int
F *zip.File
}
jobs := make(chan *job)
go func() { go func() {
defer close(output) defer close(jobs)
defer r.Close()
for _, img := range images { for _, img := range images {
p, fn := filepath.Split(filepath.Clean(img.Name)) jobs <- &job{indexedNames[img.Name], img}
output <- &tasks{
Id: indexedNames[img.Name],
Reader: o.mustExtractImage(img.Open),
Path: p,
Name: fn,
}
} }
}() }()
output = make(chan *tasks, o.Workers)
wg := &sync.WaitGroup{}
for j := 0; j < o.WorkersRatio(50); j++ {
wg.Add(1)
go func() {
defer wg.Done()
for job := range jobs {
var img image.Image
if !o.Dry {
f, err := job.F.Open()
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", job.F.Name, err)
os.Exit(1)
}
img, _, err = image.Decode(f)
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", job.F.Name, err)
os.Exit(1)
}
f.Close()
}
p, fn := filepath.Split(filepath.Clean(job.F.Name))
output <- &tasks{
Id: job.Id,
Image: img,
Path: p,
Name: fn,
}
}
}()
}
go func() {
wg.Wait()
close(output)
r.Close()
}()
return return
} }
// load a rar file that include images // load a rar file that include images
func (o *Options) loadCbr() (totalImages int, output chan *tasks, err error) { func (o *Options) loadCbr() (totalImages int, output chan *tasks, err error) {
// listing and indexing var isSolid bool
rl, err := rardecode.OpenReader(o.Input, "") files, err := rardecode.List(o.Input)
if err != nil { if err != nil {
return return
} }
names := make([]string, 0) names := make([]string, 0)
for { for _, f := range files {
f, ferr := rl.Next()
if ferr != nil && ferr != io.EOF {
rl.Close()
err = ferr
return
}
if f == nil {
break
}
if !f.IsDir && isSupportedImage(f.Name) { if !f.IsDir && isSupportedImage(f.Name) {
if f.Solid {
isSolid = true
}
names = append(names, f.Name) names = append(names, f.Name)
} }
} }
rl.Close()
totalImages = len(names) totalImages = len(names)
if totalImages == 0 { if totalImages == 0 {
@ -194,46 +283,89 @@ func (o *Options) loadCbr() (totalImages int, output chan *tasks, err error) {
indexedNames[name] = i indexedNames[name] = i
} }
// send file to the queue type job struct {
output = make(chan *tasks, o.Workers*2) Id int
Name string
Open func() (io.ReadCloser, error)
}
jobs := make(chan *job)
go func() { go func() {
defer close(output) defer close(jobs)
r, err := rardecode.OpenReader(o.Input, "") if isSolid && !o.Dry {
if err != nil { r, rerr := rardecode.OpenReader(o.Input)
fmt.Fprintln(os.Stderr, err) if rerr != nil {
os.Exit(1) fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", o.Input, rerr)
}
defer r.Close()
for {
f, err := r.Next()
if err != nil && err != io.EOF {
fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)
} }
if f == nil { defer r.Close()
break for {
} f, rerr := r.Next()
if idx, ok := indexedNames[f.Name]; ok { if rerr != nil {
var b bytes.Buffer if rerr == io.EOF {
if !o.Dry { break
io.Copy(&b, r) }
fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", f.Name, rerr)
os.Exit(1)
} }
if i, ok := indexedNames[f.Name]; ok {
p, fn := filepath.Split(filepath.Clean(f.Name)) var b bytes.Buffer
_, rerr = io.Copy(&b, r)
output <- &tasks{ if rerr != nil {
Id: idx, fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", f.Name, rerr)
Reader: &b, os.Exit(1)
Path: p, }
Name: fn, jobs <- &job{i, f.Name, func() (io.ReadCloser, error) {
return io.NopCloser(bytes.NewReader(b.Bytes())), nil
}}
}
}
} else {
for _, img := range files {
if i, ok := indexedNames[img.Name]; ok {
jobs <- &job{i, img.Name, img.Open}
} }
} }
} }
}() }()
// send file to the queue
output = make(chan *tasks, o.Workers)
wg := &sync.WaitGroup{}
for j := 0; j < o.WorkersRatio(50); j++ {
wg.Add(1)
go func() {
defer wg.Done()
for job := range jobs {
var img image.Image
if !o.Dry {
f, err := job.Open()
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", job.Name, err)
os.Exit(1)
}
img, _, err = image.Decode(f)
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror processing image %s: %s\n", job.Name, err)
os.Exit(1)
}
f.Close()
}
p, fn := filepath.Split(filepath.Clean(job.Name))
output <- &tasks{
Id: job.Id,
Image: img,
Path: p,
Name: fn,
}
}
}()
}
go func() {
wg.Wait()
close(output)
}()
return return
} }
@ -252,16 +384,9 @@ func (o *Options) loadPdf() (totalImages int, output chan *tasks, err error) {
defer close(output) defer close(output)
defer pdf.Close() defer pdf.Close()
for i := 0; i < totalImages; i++ { for i := 0; i < totalImages; i++ {
var b bytes.Buffer var img image.Image
if !o.Dry { if !o.Dry {
img, err := pdfimage.Extract(pdf, i+1) img, err = pdfimage.Extract(pdf, i+1)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
err = tiff.Encode(&b, img, nil)
if err != nil { if err != nil {
fmt.Fprintln(os.Stderr, err) fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)
@ -269,10 +394,10 @@ func (o *Options) loadPdf() (totalImages int, output chan *tasks, err error) {
} }
output <- &tasks{ output <- &tasks{
Id: i, Id: i,
Reader: &b, Image: img,
Path: "", Path: "",
Name: fmt.Sprintf(pageFmt, i+1), Name: fmt.Sprintf(pageFmt, i+1),
} }
} }
}() }()