new cutup approach(es)

This commit is contained in:
nate smith 2024-04-28 01:15:10 -07:00
parent 59e4e54172
commit 2e6bf9cc2a
2 changed files with 332 additions and 143 deletions

View File

@ -2,32 +2,24 @@ package cutup
import ( import (
"bufio" "bufio"
"crypto/sha1"
"fmt" "fmt"
"io" "os"
"path"
"strings" "strings"
) )
func conjPrep(phraseBuff []byte, r rune) int { const (
if r != ' ' { srcDir = "/home/vilmibm/pg_plaintext/files"
return -1 tgtDir = "/home/vilmibm/pg_plaintext/cutup"
} workers = 10
)
suffices := []string{"from", "at", "but", "however", "yet", "though", "and", "to", "on", "or"} // TODO configurable src/tgt dir
maxLen := 8 // TODO magic number based on longest suffix // TODO generalize so it's not gutenberg specific
offset := len(phraseBuff) - maxLen
if offset < 0 {
offset = 0
}
end := string(phraseBuff[offset:])
for _, s := range suffices {
if strings.HasSuffix(end, " "+s) {
return len(s)
}
}
return -1
}
func Cutup(ins io.Reader) { func worker(paths <-chan string, sources chan<- string) {
// TODO generalize to n character phrase markers, write new function
phraseMarkers := map[rune]bool{ phraseMarkers := map[rune]bool{
';': true, ';': true,
',': true, ',': true,
@ -52,48 +44,79 @@ func Cutup(ins io.Reader) {
'>': true, '>': true,
} }
// I want to experiment with treating prepositions and conjunctions as phrase for p := range paths {
// markers. f, err := os.Open(p)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open '%s': %s\n", p, err.Error())
}
s := bufio.NewScanner(f)
// to do this i would need to check the phraseBuff when I check phraseMarkers and then split accordingly
s := bufio.NewScanner(ins)
phraseBuff := []byte{} phraseBuff := []byte{}
printed := false written := 0
inHeader := true
title := ""
sourceid := ""
var of *os.File
var cleaned string
var ok bool
var asStr string
var text string
var prefix string
for s.Scan() { for s.Scan() {
text := strings.TrimSpace(s.Text()) text = strings.TrimSpace(s.Text())
if strings.HasPrefix(text, "*** START") {
title, _ = strings.CutPrefix(text, "*** START OF THE PROJECT GUTENBERG")
title, _ = strings.CutPrefix(title, " EBOOK")
title = strings.Map(rep, title)
title = strings.TrimSpace(title)
inHeader = false
continue
}
if inHeader {
continue
}
if strings.HasPrefix(text, "*** END") {
break
}
if title == "" {
fmt.Fprintf(os.Stderr, "got to cutup phase with no title: '%s'", p)
break
}
if sourceid == "" {
sourceid = fmt.Sprintf("%x", sha1.Sum([]byte(title)))[0:6]
prefix = sourceid + "\t"
of, err = os.Create(path.Join(tgtDir, sourceid))
if err != nil {
fmt.Fprintf(os.Stderr, "could not open '%s' for writing: %s", sourceid, err.Error())
break
}
}
for i, r := range text { for i, r := range text {
if ok := phraseMarkers[r]; ok { if ok = phraseMarkers[r]; ok {
if len(phraseBuff) >= 10 { if len(phraseBuff) >= 10 {
cleaned := clean(phraseBuff) cleaned = clean(phraseBuff)
if len(cleaned) > 0 { if len(cleaned) > 0 {
fmt.Println(cleaned) fmt.Fprintln(of, prefix+cleaned)
printed = true written++
} }
} }
if !printed {
//fmt.Fprintf(os.Stderr, "SKIP: %s\n", string(phraseBuff))
}
printed = false
phraseBuff = []byte{} phraseBuff = []byte{}
} else if v := conjPrep(phraseBuff, r); v > 0 { } else if v := conjPrep(phraseBuff, r); v > 0 {
// TODO erase or keep? starting with erase. // TODO erase or keep? starting with erase.
phraseBuff = phraseBuff[0 : len(phraseBuff)-v] phraseBuff = phraseBuff[0 : len(phraseBuff)-v]
// TODO this pasta is copied // TODO this pasta is copied
if len(phraseBuff) >= 10 { if len(phraseBuff) >= 10 {
cleaned := clean(phraseBuff) cleaned = clean(phraseBuff)
if len(cleaned) > 0 { if len(cleaned) > 0 {
fmt.Println(cleaned) fmt.Fprintln(of, prefix+cleaned)
printed = true written++
} }
} }
if !printed {
//fmt.Fprintf(os.Stderr, "SKIP: %s\n", string(phraseBuff))
}
printed = false
phraseBuff = []byte{} phraseBuff = []byte{}
} else { } else {
asStr := string(phraseBuff) asStr = string(phraseBuff)
if r == ' ' && strings.HasSuffix(asStr, " ") { if r == ' ' && strings.HasSuffix(asStr, " ") {
continue continue
} }
@ -104,39 +127,138 @@ func Cutup(ins io.Reader) {
} }
} }
} }
of.Close()
if written == 0 {
// there are a bunch of empty books in gutenberg :( these are text files
// that just have start and end markers with nothing in between. nothing
// i can do about it.
fmt.Fprintf(os.Stderr, "WARN: no content found in '%s' '%s'\n", sourceid, p)
}
sources <- fmt.Sprintf("%s\t%s", sourceid, title)
}
}
func CutupFiles() error {
err := os.Mkdir(tgtDir, 0770)
if err != nil {
return err
}
dir, err := os.Open(srcDir)
if err != nil {
return fmt.Errorf("could not open %s: %w", srcDir, err)
}
entries, err := dir.Readdirnames(-1)
if err != nil {
return fmt.Errorf("could not read %s: %w", srcDir, err)
}
paths := make(chan string, len(entries))
sources := make(chan string, len(entries))
for x := 0; x < workers; x++ {
go worker(paths, sources)
}
for _, e := range entries {
paths <- path.Join(srcDir, e)
}
close(paths)
ixFile, err := os.Create(path.Join(tgtDir, "_title_index.tsv"))
if err != nil {
return fmt.Errorf("could not open index file: %w", err)
}
defer ixFile.Close()
for i := 0; i < len(entries); i++ {
l := <-sources
fmt.Printf("%d/%d\r", i+1, len(entries))
fmt.Fprintln(ixFile, l)
}
close(sources)
return nil
}
func conjPrep(phraseBuff []byte, r rune) int {
if r != ' ' {
return -1
}
suffices := []string{"from", "at", "but", "however", "yet", "though", "and", "to", "on", "or"}
maxLen := 8 // TODO magic number based on longest suffix
offset := len(phraseBuff) - maxLen
if offset < 0 {
offset = 0
}
end := string(phraseBuff[offset:])
for _, s := range suffices {
if strings.HasSuffix(end, " "+s) {
return len(s)
}
}
return -1
} }
func isAlpha(r rune) bool { func isAlpha(r rune) bool {
alphaChars := map[rune]bool{ // TODO use rune numerical ranges for this
'a': true, switch strings.ToLower(string(r)) {
'b': true, case "a":
'c': true, return true
'd': true, case "b":
'e': true, return true
'f': true, case "c":
'g': true, return true
'h': true, case "d":
'i': true, return true
'j': true, case "e":
'k': true, return true
'l': true, case "f":
'm': true, return true
'n': true, case "g":
'o': true, return true
'p': true, case "h":
'q': true, return true
'r': true, case "i":
's': true, return true
't': true, case "j":
'u': true, return true
'v': true, case "k":
'w': true, return true
'x': true, case "l":
'y': true, return true
'z': true, case "m":
return true
case "n":
return true
case "o":
return true
case "p":
return true
case "q":
return true
case "r":
return true
case "s":
return true
case "t":
return true
case "u":
return true
case "v":
return true
case "w":
return true
case "x":
return true
case "y":
return true
case "z":
return true
} }
lookup := strings.ToLower(string(r))
return alphaChars[rune(lookup[0])] return false
} }
func alphaPercent(s string) float64 { func alphaPercent(s string) float64 {
@ -153,19 +275,61 @@ func alphaPercent(s string) float64 {
return 100 * (alpha / total) return 100 * (alpha / total)
} }
func rep(r rune) (s rune) {
s = r
switch s {
case '':
return '\''
case '“':
return '"'
case '”':
return '"'
case '"':
return -1
case '(':
return -1
case '[':
return -1
case '{':
return -1
case '<':
return -1
case '_':
return -1
case '*':
return -1
case '\r':
return -1
case '\t':
return -1
case '\n': // should not need this but stray \n ending up in output...
return -1
case 0x1c:
return -1
case 0x19:
return -1
case 0x01:
return -1
case 0x0f:
return -1
case 0x00:
return -1
case 0xb0:
return -1
case 0x1b:
return -1
case '\\':
return '/'
}
return
}
func clean(bs []byte) string { func clean(bs []byte) string {
s := string(bs) s := strings.ToLower(
s = strings.ReplaceAll(s, "", "'") strings.TrimSpace(
s = strings.ReplaceAll(s, "\"", "") strings.TrimRight(
s = strings.ReplaceAll(s, "(", "") strings.TrimLeft(
s = strings.ReplaceAll(s, "[", "") strings.Map(rep, strings.ToValidUTF8(string(bs), "")), "'\""), "'\"")))
s = strings.ReplaceAll(s, "{", "")
s = strings.ReplaceAll(s, "<", "")
s = strings.ReplaceAll(s, "_", "")
s = strings.ReplaceAll(s, "*", "")
s = strings.TrimLeft(s, "'\"")
s = strings.TrimSpace(s)
s = strings.ToLower(s)
if alphaPercent(s) < 50.0 { if alphaPercent(s) < 50.0 {
return "" return ""

View File

@ -2,63 +2,88 @@ package ingest
import ( import (
"bufio" "bufio"
"database/sql" "context"
"fmt" "fmt"
"io" "os"
"path"
"strings"
_ "github.com/mattn/go-sqlite3" "github.com/vilmibm/trunkless/db"
) )
const dsn = "phrase.db?_journal=OFF" const cutupDir = "/home/vilmibm/pg_plaintext/cutup"
func createSource(db *sql.DB, sourceName string) (int64, error) { // TODO
stmt, err := db.Prepare("INSERT INTO sources (name) VALUES (?) ON CONFLICT DO NOTHING RETURNING id") // - [X] finalize gutenberg ingestion
if err != nil { // - [ ] clean up commands
return -1, err // - [ ] clean up repo
} // - [ ] push and deploy to town with new pg db
// - [ ] gamefaqs extraction
// - [ ] corpus selector
// - [ ] deploy to town
// - [ ] geocities
// - [ ] blog post
// - [ ] launch
result, err := stmt.Exec(sourceName) func IngestGut() error {
if err != nil { conn, err := db.Connect()
return -1, err
}
defer stmt.Close()
return result.LastInsertId()
}
func Ingest(sourceName string, ins io.Reader) error {
db, err := sql.Open("sqlite3", dsn)
if err != nil { if err != nil {
return err return err
} }
defer conn.Close(context.Background())
defer db.Close() dir, err := os.Open(cutupDir)
s := bufio.NewScanner(ins)
sourceID, err := createSource(db, sourceName)
if err != nil { if err != nil {
return fmt.Errorf("could not make source: %w", err) return fmt.Errorf("could not open %s: %w", cutupDir, err)
} }
tx, err := db.Begin() // echo gutenberg | sha1sum | head -c7
corpusid := "cb20c3e"
_, err = conn.Exec(context.Background(), "INSERT INTO corpora (id, name) VALUES ($1, $2) ON CONFLICT DO NOTHING", corpusid, "gutenberg")
if err != nil { if err != nil {
return fmt.Errorf("failed to create transaction: %w", err) return fmt.Errorf("failed to create gutenberg corpus: %w", err)
} }
stmt, err := tx.Prepare("INSERT INTO phrases (sourceid, phrase) VALUES (?, ?) ON CONFLICT DO NOTHING") entries, err := dir.Readdirnames(-1)
defer stmt.Close() if err != nil {
return fmt.Errorf("could not read %s: %w", cutupDir, err)
}
idx, err := os.Open(path.Join(cutupDir, "_title_index.tsv"))
if err != nil {
return fmt.Errorf("failed to open source index: %w", err)
}
tx, err := conn.Begin(context.Background())
if err != nil {
return fmt.Errorf("could not open transaction: %w", err)
}
s := bufio.NewScanner(idx)
for s.Scan() { for s.Scan() {
phrase := s.Text() line := s.Text()
parts := strings.SplitN(line, " ", 2)
if len(parts) != 2 {
return fmt.Errorf("malformed line in sourceMap: %s", line)
}
_, err = tx.Exec(context.Background(),
"INSERT INTO sources (id, corpusid, name) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING",
parts[0], corpusid, parts[1])
}
tx.Commit(context.Background())
for _, e := range entries {
if strings.HasPrefix(e, "_") {
continue
}
p := path.Join(cutupDir, e)
sql := fmt.Sprintf("COPY phrases(sourceid, phrase) FROM '%s'", p)
_, err = conn.Exec(context.Background(), sql)
if err != nil { if err != nil {
return err fmt.Fprintf(os.Stderr, "failed to ingest '%s': %s\n", p, err.Error())
}
if _, err = stmt.Exec(sourceID, phrase); err != nil {
return fmt.Errorf("could not insert phrase '%s' for source '%d': %w", phrase, sourceID, err)
} }
} }
return tx.Commit() return nil
} }