mirror of
https://github.com/celogeek/go-comic-converter.git
synced 2025-05-25 00:02:37 +02:00
move image processing
This commit is contained in:
parent
58ed44c28d
commit
b340305f33
@ -11,6 +11,8 @@ import (
|
||||
"time"
|
||||
|
||||
epubimage "github.com/celogeek/go-comic-converter/v2/internal/epub/image"
|
||||
epubimageprocessing "github.com/celogeek/go-comic-converter/v2/internal/epub/image_processing"
|
||||
epubprogress "github.com/celogeek/go-comic-converter/v2/internal/epub/progress"
|
||||
epubtemplates "github.com/celogeek/go-comic-converter/v2/internal/epub/templates"
|
||||
epubzip "github.com/celogeek/go-comic-converter/v2/internal/epub/zip"
|
||||
"github.com/gofrs/uuid"
|
||||
@ -48,7 +50,8 @@ type epubPart struct {
|
||||
func New(options *Options) *ePub {
|
||||
uid, err := uuid.NewV4()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
tmpl := template.New("parser")
|
||||
@ -69,11 +72,13 @@ func New(options *Options) *ePub {
|
||||
func (e *ePub) render(templateString string, data any) string {
|
||||
tmpl, err := e.templateProcessor.Parse(templateString)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
result := &strings.Builder{}
|
||||
if err := tmpl.Execute(result, data); err != nil {
|
||||
panic(err)
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
stripBlank := regexp.MustCompile("\n+")
|
||||
@ -110,7 +115,14 @@ func (e *ePub) writeBlank(wz *epubzip.EpubZip, img *epubimage.Image) error {
|
||||
}
|
||||
|
||||
func (e *ePub) getParts() ([]*epubPart, error) {
|
||||
images, err := e.LoadImages()
|
||||
images, err := epubimageprocessing.LoadImages(&epubimageprocessing.Options{
|
||||
Input: e.Input,
|
||||
SortPathMode: e.SortPathMode,
|
||||
Quiet: e.Quiet,
|
||||
Dry: e.Dry,
|
||||
Workers: e.Workers,
|
||||
Image: e.Image,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -205,7 +217,12 @@ func (e *ePub) Write() error {
|
||||
|
||||
totalParts := len(epubParts)
|
||||
|
||||
bar := e.NewBar(totalParts, "Writing Part", 2, 2)
|
||||
bar := epubprogress.New(epubprogress.Options{
|
||||
Max: totalParts,
|
||||
Description: "Writing Part",
|
||||
CurrentJob: 2,
|
||||
TotalJob: 2,
|
||||
})
|
||||
for i, part := range epubParts {
|
||||
ext := filepath.Ext(e.Output)
|
||||
suffix := ""
|
||||
@ -252,7 +269,7 @@ func (e *ePub) Write() error {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := wz.WriteImage(e.coverTitleImageData(title, part.Cover, i+1, totalParts)); err != nil {
|
||||
if err := wz.WriteImage(epubimageprocessing.LoadCoverData(part.Cover, title, e.Image.Quality)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -1,464 +0,0 @@
|
||||
package epub
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
_ "image/jpeg"
|
||||
_ "image/png"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
epubfilters "github.com/celogeek/go-comic-converter/v2/internal/epub/filters"
|
||||
epubimage "github.com/celogeek/go-comic-converter/v2/internal/epub/image"
|
||||
epubimagedata "github.com/celogeek/go-comic-converter/v2/internal/epub/imagedata"
|
||||
"github.com/celogeek/go-comic-converter/v2/internal/sortpath"
|
||||
"github.com/disintegration/gift"
|
||||
"github.com/nwaples/rardecode"
|
||||
pdfimage "github.com/raff/pdfreader/image"
|
||||
"github.com/raff/pdfreader/pdfread"
|
||||
"golang.org/x/image/tiff"
|
||||
_ "golang.org/x/image/webp"
|
||||
)
|
||||
|
||||
type imageTask struct {
|
||||
Id int
|
||||
Reader io.ReadCloser
|
||||
Path string
|
||||
Name string
|
||||
}
|
||||
|
||||
func colorIsBlank(c color.Color) bool {
|
||||
g := color.GrayModel.Convert(c).(color.Gray)
|
||||
return g.Y >= 0xf0
|
||||
}
|
||||
|
||||
func findMarging(img image.Image) image.Rectangle {
|
||||
imgArea := img.Bounds()
|
||||
|
||||
LEFT:
|
||||
for x := imgArea.Min.X; x < imgArea.Max.X; x++ {
|
||||
for y := imgArea.Min.Y; y < imgArea.Max.Y; y++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break LEFT
|
||||
}
|
||||
}
|
||||
imgArea.Min.X++
|
||||
}
|
||||
|
||||
UP:
|
||||
for y := imgArea.Min.Y; y < imgArea.Max.Y; y++ {
|
||||
for x := imgArea.Min.X; x < imgArea.Max.X; x++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break UP
|
||||
}
|
||||
}
|
||||
imgArea.Min.Y++
|
||||
}
|
||||
|
||||
RIGHT:
|
||||
for x := imgArea.Max.X - 1; x >= imgArea.Min.X; x-- {
|
||||
for y := imgArea.Min.Y; y < imgArea.Max.Y; y++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break RIGHT
|
||||
}
|
||||
}
|
||||
imgArea.Max.X--
|
||||
}
|
||||
|
||||
BOTTOM:
|
||||
for y := imgArea.Max.Y - 1; y >= imgArea.Min.Y; y-- {
|
||||
for x := imgArea.Min.X; x < imgArea.Max.X; x++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break BOTTOM
|
||||
}
|
||||
}
|
||||
imgArea.Max.Y--
|
||||
}
|
||||
|
||||
return imgArea
|
||||
}
|
||||
|
||||
func (e *ePub) LoadImages() ([]*epubimage.Image, error) {
|
||||
images := make([]*epubimage.Image, 0)
|
||||
|
||||
fi, err := os.Stat(e.Input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var (
|
||||
imageCount int
|
||||
imageInput chan *imageTask
|
||||
)
|
||||
|
||||
if fi.IsDir() {
|
||||
imageCount, imageInput, err = loadDir(e.Input, e.SortPathMode)
|
||||
} else {
|
||||
switch ext := strings.ToLower(filepath.Ext(e.Input)); ext {
|
||||
case ".cbz", ".zip":
|
||||
imageCount, imageInput, err = loadCbz(e.Input, e.SortPathMode)
|
||||
case ".cbr", ".rar":
|
||||
imageCount, imageInput, err = loadCbr(e.Input, e.SortPathMode)
|
||||
case ".pdf":
|
||||
imageCount, imageInput, err = loadPdf(e.Input)
|
||||
default:
|
||||
err = fmt.Errorf("unknown file format (%s): support .cbz, .zip, .cbr, .rar, .pdf", ext)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if e.Dry {
|
||||
for img := range imageInput {
|
||||
img.Reader.Close()
|
||||
images = append(images, &epubimage.Image{
|
||||
Id: img.Id,
|
||||
Path: img.Path,
|
||||
Name: img.Name,
|
||||
})
|
||||
}
|
||||
|
||||
return images, nil
|
||||
}
|
||||
|
||||
imageOutput := make(chan *epubimage.Image)
|
||||
|
||||
// processing
|
||||
bar := e.NewBar(imageCount, "Processing", 1, 2)
|
||||
wg := &sync.WaitGroup{}
|
||||
|
||||
for i := 0; i < e.Workers; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
for img := range imageInput {
|
||||
// Decode image
|
||||
src, _, err := image.Decode(img.Reader)
|
||||
img.Reader.Close()
|
||||
if err != nil {
|
||||
bar.Clear()
|
||||
fmt.Fprintf(os.Stderr, "error processing image %s%s: %s\n", img.Path, img.Name, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if e.Image.Crop {
|
||||
g := gift.New(gift.Crop(findMarging(src)))
|
||||
newSrc := image.NewNRGBA(g.Bounds(src.Bounds()))
|
||||
g.Draw(newSrc, src)
|
||||
src = newSrc
|
||||
}
|
||||
|
||||
g := epubimage.NewGift(e.Options.Image)
|
||||
|
||||
// Convert image
|
||||
dst := image.NewGray(g.Bounds(src.Bounds()))
|
||||
g.Draw(dst, src)
|
||||
|
||||
var raw image.Image
|
||||
if img.Id == 0 {
|
||||
raw = dst
|
||||
}
|
||||
|
||||
imageOutput <- &epubimage.Image{
|
||||
Id: img.Id,
|
||||
Part: 0,
|
||||
Raw: raw,
|
||||
Data: epubimagedata.New(img.Id, 0, dst, e.Image.Quality),
|
||||
Width: dst.Bounds().Dx(),
|
||||
Height: dst.Bounds().Dy(),
|
||||
IsCover: img.Id == 0,
|
||||
DoublePage: src.Bounds().Dx() > src.Bounds().Dy() &&
|
||||
src.Bounds().Dx() > e.Image.ViewHeight &&
|
||||
src.Bounds().Dy() > e.Image.ViewWidth,
|
||||
Path: img.Path,
|
||||
Name: img.Name,
|
||||
}
|
||||
|
||||
// Auto split double page
|
||||
// Except for cover
|
||||
// Only if the src image have width > height and is bigger than the view
|
||||
if (!e.Image.HasCover || img.Id > 0) &&
|
||||
e.Image.AutoSplitDoublePage &&
|
||||
src.Bounds().Dx() > src.Bounds().Dy() &&
|
||||
src.Bounds().Dx() > e.Image.ViewHeight &&
|
||||
src.Bounds().Dy() > e.Image.ViewWidth {
|
||||
gifts := epubimage.NewGiftSplitDoublePage(e.Options.Image)
|
||||
for i, g := range gifts {
|
||||
part := i + 1
|
||||
dst := image.NewGray(g.Bounds(src.Bounds()))
|
||||
g.Draw(dst, src)
|
||||
|
||||
imageOutput <- &epubimage.Image{
|
||||
Id: img.Id,
|
||||
Part: part,
|
||||
Data: epubimagedata.New(img.Id, part, dst, e.Image.Quality),
|
||||
Width: dst.Bounds().Dx(),
|
||||
Height: dst.Bounds().Dy(),
|
||||
IsCover: false,
|
||||
DoublePage: false,
|
||||
Path: img.Path,
|
||||
Name: img.Name,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(imageOutput)
|
||||
}()
|
||||
|
||||
for img := range imageOutput {
|
||||
if !(e.Image.NoBlankPage && img.Width == 1 && img.Height == 1) {
|
||||
images = append(images, img)
|
||||
}
|
||||
if img.Part == 0 {
|
||||
bar.Add(1)
|
||||
}
|
||||
}
|
||||
bar.Close()
|
||||
|
||||
if len(images) == 0 {
|
||||
return nil, fmt.Errorf("image not found")
|
||||
}
|
||||
|
||||
return images, nil
|
||||
}
|
||||
|
||||
func isSupportedImage(path string) bool {
|
||||
switch strings.ToLower(filepath.Ext(path)) {
|
||||
case ".jpg", ".jpeg", ".png", ".webp":
|
||||
{
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func loadDir(input string, sortpathmode int) (int, chan *imageTask, error) {
|
||||
images := make([]string, 0)
|
||||
input = filepath.Clean(input)
|
||||
err := filepath.WalkDir(input, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !d.IsDir() && isSupportedImage(path) {
|
||||
images = append(images, path)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if len(images) == 0 {
|
||||
return 0, nil, fmt.Errorf("image not found")
|
||||
}
|
||||
|
||||
sort.Sort(sortpath.By(images, sortpathmode))
|
||||
|
||||
output := make(chan *imageTask)
|
||||
go func() {
|
||||
defer close(output)
|
||||
for i, img := range images {
|
||||
f, err := os.Open(img)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
p, fn := filepath.Split(img)
|
||||
if p == input {
|
||||
p = ""
|
||||
} else {
|
||||
p = p[len(input)+1:]
|
||||
}
|
||||
output <- &imageTask{
|
||||
Id: i,
|
||||
Reader: f,
|
||||
Path: p,
|
||||
Name: fn,
|
||||
}
|
||||
}
|
||||
}()
|
||||
return len(images), output, nil
|
||||
}
|
||||
|
||||
func loadCbz(input string, sortpathmode int) (int, chan *imageTask, error) {
|
||||
r, err := zip.OpenReader(input)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
|
||||
images := make([]*zip.File, 0)
|
||||
for _, f := range r.File {
|
||||
if !f.FileInfo().IsDir() && isSupportedImage(f.Name) {
|
||||
images = append(images, f)
|
||||
}
|
||||
}
|
||||
if len(images) == 0 {
|
||||
r.Close()
|
||||
return 0, nil, fmt.Errorf("no images found")
|
||||
}
|
||||
|
||||
names := []string{}
|
||||
for _, img := range images {
|
||||
names = append(names, img.Name)
|
||||
}
|
||||
sort.Sort(sortpath.By(names, sortpathmode))
|
||||
|
||||
indexedNames := make(map[string]int)
|
||||
for i, name := range names {
|
||||
indexedNames[name] = i
|
||||
}
|
||||
|
||||
output := make(chan *imageTask)
|
||||
go func() {
|
||||
defer close(output)
|
||||
for _, img := range images {
|
||||
f, err := img.Open()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
p, fn := filepath.Split(filepath.Clean(img.Name))
|
||||
output <- &imageTask{
|
||||
Id: indexedNames[img.Name],
|
||||
Reader: f,
|
||||
Path: p,
|
||||
Name: fn,
|
||||
}
|
||||
}
|
||||
}()
|
||||
return len(images), output, nil
|
||||
}
|
||||
|
||||
func loadCbr(input string, sortpathmode int) (int, chan *imageTask, error) {
|
||||
// listing and indexing
|
||||
rl, err := rardecode.OpenReader(input, "")
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
names := make([]string, 0)
|
||||
for {
|
||||
f, err := rl.Next()
|
||||
|
||||
if err != nil && err != io.EOF {
|
||||
rl.Close()
|
||||
return 0, nil, err
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
break
|
||||
}
|
||||
|
||||
if !f.IsDir && isSupportedImage(f.Name) {
|
||||
names = append(names, f.Name)
|
||||
}
|
||||
}
|
||||
rl.Close()
|
||||
|
||||
if len(names) == 0 {
|
||||
return 0, nil, fmt.Errorf("no images found")
|
||||
}
|
||||
|
||||
sort.Sort(sortpath.By(names, sortpathmode))
|
||||
|
||||
indexedNames := make(map[string]int)
|
||||
for i, name := range names {
|
||||
indexedNames[name] = i
|
||||
}
|
||||
|
||||
// send file to the queue
|
||||
output := make(chan *imageTask)
|
||||
go func() {
|
||||
defer close(output)
|
||||
r, err := rardecode.OpenReader(input, "")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
for {
|
||||
f, err := r.Next()
|
||||
if err != nil && err != io.EOF {
|
||||
panic(err)
|
||||
}
|
||||
if f == nil {
|
||||
break
|
||||
}
|
||||
if idx, ok := indexedNames[f.Name]; ok {
|
||||
b := bytes.NewBuffer([]byte{})
|
||||
io.Copy(b, r)
|
||||
|
||||
p, fn := filepath.Split(filepath.Clean(f.Name))
|
||||
|
||||
output <- &imageTask{
|
||||
Id: idx,
|
||||
Reader: io.NopCloser(b),
|
||||
Path: p,
|
||||
Name: fn,
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return len(names), output, nil
|
||||
}
|
||||
|
||||
func loadPdf(input string) (int, chan *imageTask, error) {
|
||||
pdf := pdfread.Load(input)
|
||||
if pdf == nil {
|
||||
return 0, nil, fmt.Errorf("can't read pdf")
|
||||
}
|
||||
|
||||
nbPages := len(pdf.Pages())
|
||||
pageFmt := fmt.Sprintf("page %%0%dd", len(fmt.Sprintf("%d", nbPages)))
|
||||
output := make(chan *imageTask)
|
||||
go func() {
|
||||
defer close(output)
|
||||
defer pdf.Close()
|
||||
for i := 0; i < nbPages; i++ {
|
||||
img, err := pdfimage.Extract(pdf, i+1)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
b := bytes.NewBuffer([]byte{})
|
||||
err = tiff.Encode(b, img, nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
output <- &imageTask{
|
||||
Id: i,
|
||||
Reader: io.NopCloser(b),
|
||||
Path: "",
|
||||
Name: fmt.Sprintf(pageFmt, i+1),
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return nbPages, output, nil
|
||||
}
|
||||
|
||||
func (e *ePub) coverTitleImageData(title string, img *epubimage.Image, currentPart, totalPart int) *epubimagedata.ImageData {
|
||||
// Create a blur version of the cover
|
||||
g := gift.New(epubfilters.CoverTitle(title))
|
||||
dst := image.NewGray(g.Bounds(img.Raw.Bounds()))
|
||||
g.Draw(dst, img.Raw)
|
||||
|
||||
return epubimagedata.NewRaw("OEBPS/Images/title.jpg", dst, e.Image.Quality)
|
||||
}
|
192
internal/epub/image_processing/epub_image_processing.go
Normal file
192
internal/epub/image_processing/epub_image_processing.go
Normal file
@ -0,0 +1,192 @@
|
||||
package epubimageprocessing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"image"
|
||||
_ "image/jpeg"
|
||||
_ "image/png"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
epubfilters "github.com/celogeek/go-comic-converter/v2/internal/epub/filters"
|
||||
epubimage "github.com/celogeek/go-comic-converter/v2/internal/epub/image"
|
||||
epubimagedata "github.com/celogeek/go-comic-converter/v2/internal/epub/imagedata"
|
||||
epubprogress "github.com/celogeek/go-comic-converter/v2/internal/epub/progress"
|
||||
"github.com/disintegration/gift"
|
||||
_ "golang.org/x/image/webp"
|
||||
)
|
||||
|
||||
type tasks struct {
|
||||
Id int
|
||||
Reader io.Reader
|
||||
Path string
|
||||
Name string
|
||||
}
|
||||
|
||||
func LoadImages(o *Options) ([]*epubimage.Image, error) {
|
||||
images := make([]*epubimage.Image, 0)
|
||||
|
||||
fi, err := os.Stat(o.Input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var (
|
||||
imageCount int
|
||||
imageInput chan *tasks
|
||||
)
|
||||
|
||||
if fi.IsDir() {
|
||||
imageCount, imageInput, err = o.loadDir()
|
||||
} else {
|
||||
switch ext := strings.ToLower(filepath.Ext(o.Input)); ext {
|
||||
case ".cbz", ".zip":
|
||||
imageCount, imageInput, err = o.loadCbz()
|
||||
case ".cbr", ".rar":
|
||||
imageCount, imageInput, err = o.loadCbr()
|
||||
case ".pdf":
|
||||
imageCount, imageInput, err = o.loadPdf()
|
||||
default:
|
||||
err = fmt.Errorf("unknown file format (%s): support .cbz, .zip, .cbr, .rar, .pdf", ext)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if o.Dry {
|
||||
for img := range imageInput {
|
||||
images = append(images, &epubimage.Image{
|
||||
Id: img.Id,
|
||||
Path: img.Path,
|
||||
Name: img.Name,
|
||||
})
|
||||
}
|
||||
|
||||
return images, nil
|
||||
}
|
||||
|
||||
imageOutput := make(chan *epubimage.Image)
|
||||
|
||||
// processing
|
||||
bar := epubprogress.New(epubprogress.Options{
|
||||
Quiet: o.Quiet,
|
||||
Max: imageCount,
|
||||
Description: "Processing",
|
||||
CurrentJob: 1,
|
||||
TotalJob: 2,
|
||||
})
|
||||
wg := &sync.WaitGroup{}
|
||||
|
||||
for i := 0; i < o.Workers; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
for img := range imageInput {
|
||||
// Decode image
|
||||
src, _, err := image.Decode(img.Reader)
|
||||
if err != nil {
|
||||
bar.Clear()
|
||||
fmt.Fprintf(os.Stderr, "error processing image %s%s: %s\n", img.Path, img.Name, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if o.Image.Crop {
|
||||
g := gift.New(gift.Crop(findMarging(src)))
|
||||
newSrc := image.NewNRGBA(g.Bounds(src.Bounds()))
|
||||
g.Draw(newSrc, src)
|
||||
src = newSrc
|
||||
}
|
||||
|
||||
g := epubimage.NewGift(o.Image)
|
||||
|
||||
// Convert image
|
||||
dst := image.NewGray(g.Bounds(src.Bounds()))
|
||||
g.Draw(dst, src)
|
||||
|
||||
var raw image.Image
|
||||
if img.Id == 0 {
|
||||
raw = dst
|
||||
}
|
||||
|
||||
imageOutput <- &epubimage.Image{
|
||||
Id: img.Id,
|
||||
Part: 0,
|
||||
Raw: raw,
|
||||
Data: epubimagedata.New(img.Id, 0, dst, o.Image.Quality),
|
||||
Width: dst.Bounds().Dx(),
|
||||
Height: dst.Bounds().Dy(),
|
||||
IsCover: img.Id == 0,
|
||||
DoublePage: src.Bounds().Dx() > src.Bounds().Dy() &&
|
||||
src.Bounds().Dx() > o.Image.ViewHeight &&
|
||||
src.Bounds().Dy() > o.Image.ViewWidth,
|
||||
Path: img.Path,
|
||||
Name: img.Name,
|
||||
}
|
||||
|
||||
// Auto split double page
|
||||
// Except for cover
|
||||
// Only if the src image have width > height and is bigger than the view
|
||||
if (!o.Image.HasCover || img.Id > 0) &&
|
||||
o.Image.AutoSplitDoublePage &&
|
||||
src.Bounds().Dx() > src.Bounds().Dy() &&
|
||||
src.Bounds().Dx() > o.Image.ViewHeight &&
|
||||
src.Bounds().Dy() > o.Image.ViewWidth {
|
||||
gifts := epubimage.NewGiftSplitDoublePage(o.Image)
|
||||
for i, g := range gifts {
|
||||
part := i + 1
|
||||
dst := image.NewGray(g.Bounds(src.Bounds()))
|
||||
g.Draw(dst, src)
|
||||
|
||||
imageOutput <- &epubimage.Image{
|
||||
Id: img.Id,
|
||||
Part: part,
|
||||
Data: epubimagedata.New(img.Id, part, dst, o.Image.Quality),
|
||||
Width: dst.Bounds().Dx(),
|
||||
Height: dst.Bounds().Dy(),
|
||||
IsCover: false,
|
||||
DoublePage: false,
|
||||
Path: img.Path,
|
||||
Name: img.Name,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(imageOutput)
|
||||
}()
|
||||
|
||||
for img := range imageOutput {
|
||||
if img.Part == 0 {
|
||||
bar.Add(1)
|
||||
}
|
||||
if o.Image.NoBlankPage && img.Width == 1 && img.Height == 1 {
|
||||
continue
|
||||
}
|
||||
images = append(images, img)
|
||||
}
|
||||
bar.Close()
|
||||
|
||||
if len(images) == 0 {
|
||||
return nil, fmt.Errorf("image not found")
|
||||
}
|
||||
|
||||
return images, nil
|
||||
}
|
||||
|
||||
func LoadCoverData(img *epubimage.Image, title string, quality int) *epubimagedata.ImageData {
|
||||
// Create a blur version of the cover
|
||||
g := gift.New(epubfilters.CoverTitle(title))
|
||||
dst := image.NewGray(g.Bounds(img.Raw.Bounds()))
|
||||
g.Draw(dst, img.Raw)
|
||||
|
||||
return epubimagedata.NewRaw("OEBPS/Images/title.jpg", dst, quality)
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
package epubimageprocessing
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func isSupportedImage(path string) bool {
|
||||
switch strings.ToLower(filepath.Ext(path)) {
|
||||
case ".jpg", ".jpeg", ".png", ".webp":
|
||||
{
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func colorIsBlank(c color.Color) bool {
|
||||
g := color.GrayModel.Convert(c).(color.Gray)
|
||||
return g.Y >= 0xf0
|
||||
}
|
||||
|
||||
func findMarging(img image.Image) image.Rectangle {
|
||||
imgArea := img.Bounds()
|
||||
|
||||
LEFT:
|
||||
for x := imgArea.Min.X; x < imgArea.Max.X; x++ {
|
||||
for y := imgArea.Min.Y; y < imgArea.Max.Y; y++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break LEFT
|
||||
}
|
||||
}
|
||||
imgArea.Min.X++
|
||||
}
|
||||
|
||||
UP:
|
||||
for y := imgArea.Min.Y; y < imgArea.Max.Y; y++ {
|
||||
for x := imgArea.Min.X; x < imgArea.Max.X; x++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break UP
|
||||
}
|
||||
}
|
||||
imgArea.Min.Y++
|
||||
}
|
||||
|
||||
RIGHT:
|
||||
for x := imgArea.Max.X - 1; x >= imgArea.Min.X; x-- {
|
||||
for y := imgArea.Min.Y; y < imgArea.Max.Y; y++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break RIGHT
|
||||
}
|
||||
}
|
||||
imgArea.Max.X--
|
||||
}
|
||||
|
||||
BOTTOM:
|
||||
for y := imgArea.Max.Y - 1; y >= imgArea.Min.Y; y-- {
|
||||
for x := imgArea.Min.X; x < imgArea.Max.X; x++ {
|
||||
if !colorIsBlank(img.At(x, y)) {
|
||||
break BOTTOM
|
||||
}
|
||||
}
|
||||
imgArea.Max.Y--
|
||||
}
|
||||
|
||||
return imgArea
|
||||
}
|
271
internal/epub/image_processing/epub_image_processing_loader.go
Normal file
271
internal/epub/image_processing/epub_image_processing_loader.go
Normal file
@ -0,0 +1,271 @@
|
||||
package epubimageprocessing
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
epubimage "github.com/celogeek/go-comic-converter/v2/internal/epub/image"
|
||||
"github.com/celogeek/go-comic-converter/v2/internal/sortpath"
|
||||
"github.com/nwaples/rardecode"
|
||||
pdfimage "github.com/raff/pdfreader/image"
|
||||
"github.com/raff/pdfreader/pdfread"
|
||||
"golang.org/x/image/tiff"
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
Input string
|
||||
SortPathMode int
|
||||
Quiet bool
|
||||
Dry bool
|
||||
Workers int
|
||||
Image *epubimage.Options
|
||||
}
|
||||
|
||||
func (o *Options) mustExtractImage(imageOpener func() (io.ReadCloser, error)) *bytes.Buffer {
|
||||
if o.Dry {
|
||||
return &bytes.Buffer{}
|
||||
}
|
||||
f, err := imageOpener()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer f.Close()
|
||||
var b bytes.Buffer
|
||||
_, err = io.Copy(&b, f)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
return &b
|
||||
}
|
||||
|
||||
func (o *Options) loadDir() (totalImages int, output chan *tasks, err error) {
|
||||
images := make([]string, 0)
|
||||
|
||||
input := filepath.Clean(o.Input)
|
||||
err = filepath.WalkDir(input, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !d.IsDir() && isSupportedImage(path) {
|
||||
images = append(images, path)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
totalImages = len(images)
|
||||
|
||||
if totalImages == 0 {
|
||||
err = fmt.Errorf("image not found")
|
||||
return
|
||||
}
|
||||
|
||||
sort.Sort(sortpath.By(images, o.SortPathMode))
|
||||
|
||||
output = make(chan *tasks, o.Workers*2)
|
||||
go func() {
|
||||
defer close(output)
|
||||
for i, img := range images {
|
||||
p, fn := filepath.Split(img)
|
||||
if p == input {
|
||||
p = ""
|
||||
} else {
|
||||
p = p[len(input)+1:]
|
||||
}
|
||||
output <- &tasks{
|
||||
Id: i,
|
||||
Reader: o.mustExtractImage(func() (io.ReadCloser, error) { return os.Open(img) }),
|
||||
Path: p,
|
||||
Name: fn,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Options) loadCbz() (totalImages int, output chan *tasks, err error) {
|
||||
r, err := zip.OpenReader(o.Input)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
images := make([]*zip.File, 0)
|
||||
for _, f := range r.File {
|
||||
if !f.FileInfo().IsDir() && isSupportedImage(f.Name) {
|
||||
images = append(images, f)
|
||||
}
|
||||
}
|
||||
|
||||
totalImages = len(images)
|
||||
|
||||
if totalImages == 0 {
|
||||
r.Close()
|
||||
err = fmt.Errorf("no images found")
|
||||
return
|
||||
}
|
||||
|
||||
names := []string{}
|
||||
for _, img := range images {
|
||||
names = append(names, img.Name)
|
||||
}
|
||||
sort.Sort(sortpath.By(names, o.SortPathMode))
|
||||
|
||||
indexedNames := make(map[string]int)
|
||||
for i, name := range names {
|
||||
indexedNames[name] = i
|
||||
}
|
||||
|
||||
output = make(chan *tasks, o.Workers*2)
|
||||
go func() {
|
||||
defer close(output)
|
||||
defer r.Close()
|
||||
for _, img := range images {
|
||||
p, fn := filepath.Split(filepath.Clean(img.Name))
|
||||
output <- &tasks{
|
||||
Id: indexedNames[img.Name],
|
||||
Reader: o.mustExtractImage(img.Open),
|
||||
Path: p,
|
||||
Name: fn,
|
||||
}
|
||||
}
|
||||
}()
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Options) loadCbr() (totalImages int, output chan *tasks, err error) {
|
||||
// listing and indexing
|
||||
rl, err := rardecode.OpenReader(o.Input, "")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
names := make([]string, 0)
|
||||
for {
|
||||
f, ferr := rl.Next()
|
||||
|
||||
if ferr != nil && ferr != io.EOF {
|
||||
rl.Close()
|
||||
err = ferr
|
||||
return
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
break
|
||||
}
|
||||
|
||||
if !f.IsDir && isSupportedImage(f.Name) {
|
||||
names = append(names, f.Name)
|
||||
}
|
||||
}
|
||||
rl.Close()
|
||||
|
||||
totalImages = len(names)
|
||||
if totalImages == 0 {
|
||||
err = fmt.Errorf("no images found")
|
||||
return
|
||||
}
|
||||
|
||||
sort.Sort(sortpath.By(names, o.SortPathMode))
|
||||
|
||||
indexedNames := make(map[string]int)
|
||||
for i, name := range names {
|
||||
indexedNames[name] = i
|
||||
}
|
||||
|
||||
// send file to the queue
|
||||
output = make(chan *tasks, o.Workers*2)
|
||||
go func() {
|
||||
defer close(output)
|
||||
r, err := rardecode.OpenReader(o.Input, "")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
for {
|
||||
f, err := r.Next()
|
||||
if err != nil && err != io.EOF {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
|
||||
}
|
||||
if f == nil {
|
||||
break
|
||||
}
|
||||
if idx, ok := indexedNames[f.Name]; ok {
|
||||
var b bytes.Buffer
|
||||
if !o.Dry {
|
||||
io.Copy(&b, r)
|
||||
}
|
||||
|
||||
p, fn := filepath.Split(filepath.Clean(f.Name))
|
||||
|
||||
output <- &tasks{
|
||||
Id: idx,
|
||||
Reader: &b,
|
||||
Path: p,
|
||||
Name: fn,
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Options) loadPdf() (totalImages int, output chan *tasks, err error) {
|
||||
pdf := pdfread.Load(o.Input)
|
||||
if pdf == nil {
|
||||
err = fmt.Errorf("can't read pdf")
|
||||
return
|
||||
}
|
||||
|
||||
totalImages = len(pdf.Pages())
|
||||
pageFmt := fmt.Sprintf("page %%0%dd", len(fmt.Sprintf("%d", totalImages)))
|
||||
output = make(chan *tasks)
|
||||
go func() {
|
||||
defer close(output)
|
||||
defer pdf.Close()
|
||||
for i := 0; i < totalImages; i++ {
|
||||
var b bytes.Buffer
|
||||
|
||||
if !o.Dry {
|
||||
img, err := pdfimage.Extract(pdf, i+1)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
err = tiff.Encode(&b, img, nil)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
output <- &tasks{
|
||||
Id: i,
|
||||
Reader: &b,
|
||||
Path: "",
|
||||
Name: fmt.Sprintf(pageFmt, i+1),
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return
|
||||
}
|
@ -8,6 +8,7 @@ import (
|
||||
"hash/crc32"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
@ -28,18 +29,21 @@ func New(id int, part int, img image.Image, quality int) *ImageData {
|
||||
func NewRaw(name string, img image.Image, quality int) *ImageData {
|
||||
data := bytes.NewBuffer([]byte{})
|
||||
if err := jpeg.Encode(data, img, &jpeg.Options{Quality: quality}); err != nil {
|
||||
panic(err)
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
cdata := bytes.NewBuffer([]byte{})
|
||||
wcdata, err := flate.NewWriter(cdata, flate.BestCompression)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
wcdata.Write(data.Bytes())
|
||||
wcdata.Close()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
t := time.Now()
|
||||
return &ImageData{
|
||||
|
@ -1,4 +1,4 @@
|
||||
package epub
|
||||
package epubprogress
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@ -7,18 +7,26 @@ import (
|
||||
"github.com/schollz/progressbar/v3"
|
||||
)
|
||||
|
||||
func (e *ePub) NewBar(max int, description string, currentJob, totalJob int) *progressbar.ProgressBar {
|
||||
if e.Quiet {
|
||||
return progressbar.DefaultSilent(int64(max))
|
||||
type Options struct {
|
||||
Quiet bool
|
||||
Max int
|
||||
Description string
|
||||
CurrentJob int
|
||||
TotalJob int
|
||||
}
|
||||
|
||||
func New(o Options) *progressbar.ProgressBar {
|
||||
if o.Quiet {
|
||||
return progressbar.DefaultSilent(int64(o.Max))
|
||||
}
|
||||
fmtJob := fmt.Sprintf("%%0%dd", len(fmt.Sprint(totalJob)))
|
||||
fmtJob := fmt.Sprintf("%%0%dd", len(fmt.Sprint(o.TotalJob)))
|
||||
fmtDesc := fmt.Sprintf("[%s/%s] %%-15s", fmtJob, fmtJob)
|
||||
return progressbar.NewOptions(max,
|
||||
return progressbar.NewOptions(o.Max,
|
||||
progressbar.OptionSetWriter(os.Stderr),
|
||||
progressbar.OptionOnCompletion(func() {
|
||||
fmt.Fprint(os.Stderr, "\n")
|
||||
}),
|
||||
progressbar.OptionSetDescription(fmt.Sprintf(fmtDesc, currentJob, totalJob, description)),
|
||||
progressbar.OptionSetDescription(fmt.Sprintf(fmtDesc, o.CurrentJob, o.TotalJob, o.Description)),
|
||||
progressbar.OptionSetWidth(60),
|
||||
progressbar.OptionShowCount(),
|
||||
progressbar.OptionSetRenderBlankState(true),
|
Loading…
x
Reference in New Issue
Block a user