go-xdfx2csv/main.go

157 lines
3.5 KiB
Go

package main
import (
"bufio"
"encoding/csv"
"fmt"
"log"
"os"
"strings"
)
type Words struct {
word string
translation string
transcription string
}
var (
full_name string
lang_from string
lang_to string
)
func main() {
fcsv, err := os.Create("./dict.csv")
if err != nil {
fmt.Println(err)
}
defer fcsv.Close()
writer := csv.NewWriter(fcsv)
writer.Comma = '|'
fxdfx, err := os.OpenFile("dict.xdxf", os.O_RDONLY, os.ModePerm)
if err != nil {
log.Fatalf("open file error: %v", err)
return
}
defer fxdfx.Close()
fullFile(fxdfx, writer)
}
func fullFile(fxdfx *os.File, fcsv *csv.Writer) {
sc := bufio.NewScanner(fxdfx)
/*for sc.Scan() {
fmt.Println(sc.Text())
}*/
if err := sc.Err(); err != nil {
log.Fatalf("scan file error: %v", err)
return
}
//skip first lines with some unnessesary data and save full name
first := ""
for sc.Scan() {
line := sc.Text()
if strings.Contains(line, "<full_name>") {
line = strings.ReplaceAll(line, "full_name>", "")
line = strings.ReplaceAll(line, "</", "")
full_name = strings.ReplaceAll(line, "<", "")
}
if strings.Contains(line, "lang_from=") && strings.Contains(line, "lang_to=") {
tmp := strings.Split(line, "=")
lang_from = strings.Split(tmp[1], " ")[0]
lang_to = strings.Split(tmp[2], " ")[0]
}
if strings.Contains(line, "<ar><k>") {
first = line
break
}
}
fmt.Printf("Dict name: %s.\n", full_name)
fmt.Printf("From %s to %s. \n", lang_from, lang_to)
//fmt.Println(first)
// read file line by line
neww := new(Words)
for {
first = strings.TrimSpace(first)
if first == "" {
if sc.Scan() {
first += sc.Text()
} else {
break
}
}
if strings.Contains(first, "<ar>") {
neww.word = ""
neww.translation = ""
neww.transcription = ""
//fmt.Println(neww)
}
if strings.Contains(first, "<k>") {
_, neww.word, _ = strings.Cut(first, "<k>")
if strings.Contains(neww.word, "</k>") {
neww.word, first, _ = strings.Cut(neww.word, "</k>")
first = strings.TrimSpace(first)
if first == "" {
if sc.Scan() {
first += sc.Text()
} else {
break
}
}
}
} else if strings.Contains(first, "<tr>") {
_, neww.transcription, _ = strings.Cut(first, "<tr>")
if strings.Contains(neww.transcription, "</tr>") {
neww.transcription, first, _ = strings.Cut(neww.transcription, "</tr>")
first = strings.TrimSpace(first)
if first == "" {
if sc.Scan() {
first += sc.Text()
} else {
break
}
}
}
} else if strings.Contains(first, "</ar>") {
neww.translation, first, _ = strings.Cut(first, "</ar>")
first = strings.TrimSpace(first)
first = strings.TrimSpace(first)
if first == "" {
if sc.Scan() {
//fmt.Printf("Word: %s. Transcription: %s. Translation: %s.\n", neww.word, neww.transcription, neww.translation)
err := fcsv.Write([]string{neww.word, neww.transcription, neww.translation})
if err != nil {
fmt.Println(err)
}
first += sc.Text()
} else {
break
}
}
} else {
if sc.Scan() {
//fmt.Printf("Word: %s. Transcription: %s. Translation: %s.\n", neww.word, neww.transcription, neww.translation)
first += sc.Text()
} else {
break
}
}
}
}
/*func processChunk(chunk []byte, linesPool *sync.Pool, stringPool *sync.Pool, start time.Time, end time.Time) {
}
//readFile opens file in chunks and process to other func. https://medium.com/swlh/processing-16gb-file-in-seconds-go-lang-3982c235dfa2
func readFile() {
}*/