404 lines
13 KiB
Go
404 lines
13 KiB
Go
package main
|
|
|
|
import (
|
|
"bufio"
|
|
"cmp"
|
|
"context"
|
|
_ "embed"
|
|
"encoding/json"
|
|
"errors"
|
|
"flag"
|
|
"fmt"
|
|
"log/slog"
|
|
"maps"
|
|
"os"
|
|
"os/signal"
|
|
"path/filepath"
|
|
"slices"
|
|
|
|
"golang.org/x/sync/errgroup"
|
|
"zombiezen.com/go/sqlite"
|
|
"zombiezen.com/go/sqlite/sqlitex"
|
|
|
|
"git.sunturtle.xyz/zephyr/horse/horse"
|
|
)
|
|
|
|
func main() {
|
|
var (
|
|
mdb string
|
|
out string
|
|
region string
|
|
)
|
|
flag.StringVar(&mdb, "mdb", os.ExpandEnv(`$USERPROFILE\AppData\LocalLow\Cygames\Umamusume\master\master.mdb`), "`path` to Umamusume master.mdb")
|
|
flag.StringVar(&out, "o", `.`, "`dir`ectory for output files")
|
|
flag.StringVar(®ion, "region", "global", "region the database is for (global, jp)")
|
|
flag.Parse()
|
|
|
|
slog.Info("open", slog.String("mdb", mdb))
|
|
db, err := sqlitex.NewPool(mdb, sqlitex.PoolOptions{Flags: sqlite.OpenReadOnly})
|
|
if err != nil {
|
|
slog.Error("opening mdb", slog.String("mdb", mdb), slog.Any("err", err))
|
|
os.Exit(1)
|
|
}
|
|
|
|
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
|
|
go func() {
|
|
<-ctx.Done()
|
|
stop()
|
|
}()
|
|
|
|
loadgroup, ctx1 := errgroup.WithContext(ctx)
|
|
charas := load(ctx1, loadgroup, db, "characters", characterSQL, func(s *sqlite.Stmt) horse.Character {
|
|
return horse.Character{
|
|
ID: horse.CharacterID(s.ColumnInt(0)),
|
|
Name: s.ColumnText(1),
|
|
}
|
|
})
|
|
aff := load(ctx1, loadgroup, db, "pair affinity", affinitySQL, func(s *sqlite.Stmt) horse.AffinityRelation {
|
|
return horse.AffinityRelation{
|
|
IDA: s.ColumnInt(0),
|
|
IDB: s.ColumnInt(1),
|
|
IDC: s.ColumnInt(2),
|
|
Affinity: s.ColumnInt(3),
|
|
}
|
|
})
|
|
umas := load(ctx1, loadgroup, db, "umas", umaSQL, func(s *sqlite.Stmt) horse.Uma {
|
|
return horse.Uma{
|
|
ID: horse.UmaID(s.ColumnInt(0)),
|
|
CharacterID: horse.CharacterID(s.ColumnInt(1)),
|
|
Name: s.ColumnText(2),
|
|
Variant: s.ColumnText(3),
|
|
Sprint: horse.AptitudeLevel(s.ColumnInt(4)),
|
|
Mile: horse.AptitudeLevel(s.ColumnInt(6)),
|
|
Medium: horse.AptitudeLevel(s.ColumnInt(7)),
|
|
Long: horse.AptitudeLevel(s.ColumnInt(8)),
|
|
Front: horse.AptitudeLevel(s.ColumnInt(9)),
|
|
Pace: horse.AptitudeLevel(s.ColumnInt(10)),
|
|
Late: horse.AptitudeLevel(s.ColumnInt(11)),
|
|
End: horse.AptitudeLevel(s.ColumnInt(12)),
|
|
Turf: horse.AptitudeLevel(s.ColumnInt(13)),
|
|
Dirt: horse.AptitudeLevel(s.ColumnInt(14)),
|
|
Unique: horse.SkillID(s.ColumnInt(15)),
|
|
Skill1: horse.SkillID(s.ColumnInt(16)),
|
|
Skill2: horse.SkillID(s.ColumnInt(17)),
|
|
Skill3: horse.SkillID(s.ColumnInt(18)),
|
|
SkillPL2: horse.SkillID(s.ColumnInt(19)),
|
|
SkillPL3: horse.SkillID(s.ColumnInt(20)),
|
|
SkillPL4: horse.SkillID(s.ColumnInt(21)),
|
|
SkillPL5: horse.SkillID(s.ColumnInt(22)),
|
|
}
|
|
})
|
|
sg := load(ctx1, loadgroup, db, "skill groups", skillGroupSQL, func(s *sqlite.Stmt) horse.SkillGroup {
|
|
return horse.SkillGroup{
|
|
ID: horse.SkillGroupID(s.ColumnInt(0)),
|
|
Skill1: horse.SkillID(s.ColumnInt(1)),
|
|
Skill2: horse.SkillID(s.ColumnInt(2)),
|
|
Skill3: horse.SkillID(s.ColumnInt(3)),
|
|
SkillBad: horse.SkillID(s.ColumnInt(4)),
|
|
}
|
|
})
|
|
skills := load(ctx1, loadgroup, db, "skills", skillSQL, func(s *sqlite.Stmt) horse.Skill {
|
|
return horse.Skill{
|
|
ID: horse.SkillID(s.ColumnInt(0)),
|
|
Name: s.ColumnText(1),
|
|
Description: s.ColumnText(2),
|
|
Group: horse.SkillGroupID(s.ColumnInt32(3)),
|
|
Rarity: int8(s.ColumnInt(5)),
|
|
GroupRate: int8(s.ColumnInt(6)),
|
|
GradeValue: s.ColumnInt32(7),
|
|
WitCheck: s.ColumnBool(8),
|
|
Activations: trimActivations([]horse.Activation{
|
|
{
|
|
Precondition: s.ColumnText(9),
|
|
Condition: s.ColumnText(10),
|
|
Duration: horse.TenThousandths(s.ColumnInt(11)),
|
|
DurScale: horse.DurScale(s.ColumnInt(12)),
|
|
Cooldown: horse.TenThousandths(s.ColumnInt(13)),
|
|
Abilities: trimAbilities([]horse.Ability{
|
|
{
|
|
Type: horse.AbilityType(s.ColumnInt(14)),
|
|
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(15)),
|
|
Value: horse.TenThousandths(s.ColumnInt(16)),
|
|
Target: horse.AbilityTarget(s.ColumnInt(17)),
|
|
TargetValue: s.ColumnInt32(18),
|
|
},
|
|
{
|
|
Type: horse.AbilityType(s.ColumnInt(19)),
|
|
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(20)),
|
|
Value: horse.TenThousandths(s.ColumnInt(21)),
|
|
Target: horse.AbilityTarget(s.ColumnInt(22)),
|
|
TargetValue: s.ColumnInt32(23),
|
|
},
|
|
{
|
|
Type: horse.AbilityType(s.ColumnInt(24)),
|
|
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(25)),
|
|
Value: horse.TenThousandths(s.ColumnInt(26)),
|
|
Target: horse.AbilityTarget(s.ColumnInt(27)),
|
|
TargetValue: s.ColumnInt32(28),
|
|
},
|
|
}),
|
|
},
|
|
{
|
|
Precondition: s.ColumnText(29),
|
|
Condition: s.ColumnText(30),
|
|
Duration: horse.TenThousandths(s.ColumnInt(31)),
|
|
DurScale: horse.DurScale(s.ColumnInt(32)),
|
|
Cooldown: horse.TenThousandths(s.ColumnInt(33)),
|
|
Abilities: trimAbilities([]horse.Ability{
|
|
{
|
|
Type: horse.AbilityType(s.ColumnInt(34)),
|
|
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(35)),
|
|
Value: horse.TenThousandths(s.ColumnInt(36)),
|
|
Target: horse.AbilityTarget(s.ColumnInt(37)),
|
|
TargetValue: s.ColumnInt32(38),
|
|
},
|
|
{
|
|
Type: horse.AbilityType(s.ColumnInt(39)),
|
|
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(40)),
|
|
Value: horse.TenThousandths(s.ColumnInt(41)),
|
|
Target: horse.AbilityTarget(s.ColumnInt(42)),
|
|
TargetValue: s.ColumnInt32(43),
|
|
},
|
|
{
|
|
Type: horse.AbilityType(s.ColumnInt(44)),
|
|
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(45)),
|
|
Value: horse.TenThousandths(s.ColumnInt(46)),
|
|
Target: horse.AbilityTarget(s.ColumnInt(47)),
|
|
TargetValue: s.ColumnInt32(48),
|
|
},
|
|
}),
|
|
},
|
|
}),
|
|
UniqueOwner: s.ColumnText(52), // TODO(zeph): should be id, not name
|
|
SPCost: s.ColumnInt(49),
|
|
IconID: s.ColumnInt(53),
|
|
}
|
|
})
|
|
races := load(ctx1, loadgroup, db, "races", raceSQL, func(s *sqlite.Stmt) horse.Race {
|
|
return horse.Race{
|
|
ID: horse.RaceID(s.ColumnInt(0)),
|
|
Name: s.ColumnText(1),
|
|
// TODO(zeph): grade
|
|
Thumbnail: s.ColumnInt(3),
|
|
Primary: horse.RaceID(s.ColumnInt(4)),
|
|
}
|
|
})
|
|
saddles := load(ctx1, loadgroup, db, "saddles", saddleSQL, func(s *sqlite.Stmt) horse.Saddle {
|
|
return horse.Saddle{
|
|
ID: horse.SaddleID(s.ColumnInt(0)),
|
|
Name: s.ColumnText(1),
|
|
Races: trimZeros(
|
|
horse.RaceID(s.ColumnInt(2)),
|
|
horse.RaceID(s.ColumnInt(3)),
|
|
horse.RaceID(s.ColumnInt(4)),
|
|
),
|
|
Type: horse.SaddleType(s.ColumnInt(5)),
|
|
Primary: horse.SaddleID(s.ColumnInt(6)),
|
|
}
|
|
})
|
|
scenarios := load(ctx1, loadgroup, db, "scenarios", scenarioSQL, func(s *sqlite.Stmt) horse.Scenario {
|
|
return horse.Scenario{
|
|
ID: horse.ScenarioID(s.ColumnInt(0)),
|
|
Name: s.ColumnText(1),
|
|
Title: s.ColumnText(2),
|
|
}
|
|
})
|
|
sparks := load(ctx1, loadgroup, db, "sparks", sparkSQL, func(s *sqlite.Stmt) horse.Spark {
|
|
return horse.Spark{
|
|
ID: horse.SparkID(s.ColumnInt(0)),
|
|
Name: s.ColumnText(1),
|
|
Description: s.ColumnText(2),
|
|
Group: horse.SparkGroupID(s.ColumnInt(3)),
|
|
Rarity: horse.SparkRarity(s.ColumnInt(4)),
|
|
Type: horse.SparkType(s.ColumnInt(5)),
|
|
// Effects filled in later.
|
|
}
|
|
})
|
|
sparkeffs := load(ctx1, loadgroup, db, "spark effects", sparkEffectSQL, func(s *sqlite.Stmt) SparkEffImm {
|
|
return SparkEffImm{
|
|
Group: horse.SparkGroupID(s.ColumnInt(0)),
|
|
Effect: s.ColumnInt(1),
|
|
Target: horse.SparkTarget(s.ColumnInt(2)),
|
|
Value1: s.ColumnInt32(3),
|
|
Value2: s.ColumnInt32(4),
|
|
}
|
|
})
|
|
|
|
if err := os.MkdirAll(filepath.Join(out, region), 0775); err != nil {
|
|
slog.Error("create output dir", slog.Any("err", err))
|
|
os.Exit(1)
|
|
}
|
|
|
|
writegroup, ctx2 := errgroup.WithContext(ctx)
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "character.json", charas) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "affinity.json", aff) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "uma.json", umas) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "skill-group.json", sg) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "skill.json", skills) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "race.json", races) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "saddle.json", saddles) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "scenario.json", scenarios) })
|
|
writegroup.Go(func() error { return write(ctx2, out, region, "spark.json", mergesparks(sparks, sparkeffs)) })
|
|
if err := writegroup.Wait(); err != nil {
|
|
slog.ErrorContext(ctx, "write", slog.Any("err", err))
|
|
os.Exit(1)
|
|
}
|
|
|
|
slog.InfoContext(ctx, "done")
|
|
}
|
|
|
|
var (
|
|
//go:embed sql/character.sql
|
|
characterSQL string
|
|
//go:embed sql/affinity.sql
|
|
affinitySQL string
|
|
//go:embed sql/uma.sql
|
|
umaSQL string
|
|
//go:embed sql/skill-group.sql
|
|
skillGroupSQL string
|
|
//go:embed sql/skill.sql
|
|
skillSQL string
|
|
//go:embed sql/race.sql
|
|
raceSQL string
|
|
//go:embed sql/saddle.sql
|
|
saddleSQL string
|
|
//go:embed sql/scenario.sql
|
|
scenarioSQL string
|
|
//go:embed sql/spark.sql
|
|
sparkSQL string
|
|
//go:embed sql/spark-effect.sql
|
|
sparkEffectSQL string
|
|
)
|
|
|
|
func load[T any](ctx context.Context, group *errgroup.Group, db *sqlitex.Pool, kind, sql string, row func(*sqlite.Stmt) T) func() ([]T, error) {
|
|
slog.InfoContext(ctx, "load", slog.String("kind", kind))
|
|
var r []T
|
|
group.Go(func() error {
|
|
conn, err := db.Take(ctx)
|
|
defer db.Put(conn)
|
|
if err != nil {
|
|
return fmt.Errorf("couldn't get connection for %s: %w", kind, err)
|
|
}
|
|
stmt, _, err := conn.PrepareTransient(sql)
|
|
if err != nil {
|
|
return fmt.Errorf("couldn't prepare statement for %s: %w", kind, err)
|
|
}
|
|
|
|
for {
|
|
ok, err := stmt.Step()
|
|
if err != nil {
|
|
return fmt.Errorf("error stepping %s: %w", kind, err)
|
|
}
|
|
if !ok {
|
|
break
|
|
}
|
|
r = append(r, row(stmt))
|
|
}
|
|
return nil
|
|
})
|
|
return func() ([]T, error) {
|
|
err := group.Wait()
|
|
if err == context.Canceled {
|
|
// After the first wait, all future ones return context.Canceled.
|
|
// We want to be able to wait any number of times, so hide it.
|
|
err = nil
|
|
}
|
|
return r, err
|
|
}
|
|
}
|
|
|
|
func write[T any](ctx context.Context, out, region, name string, v func() (T, error)) error {
|
|
p := filepath.Join(out, region, name)
|
|
r, err := v()
|
|
if err != nil {
|
|
return err
|
|
}
|
|
slog.InfoContext(ctx, "write", slog.String("path", p))
|
|
f, err := os.Create(p)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
defer f.Close()
|
|
w := bufio.NewWriter(f)
|
|
enc := json.NewEncoder(w)
|
|
enc.SetEscapeHTML(false)
|
|
enc.SetIndent("", "\t")
|
|
err = enc.Encode(r)
|
|
err = errors.Join(err, w.Flush())
|
|
slog.InfoContext(ctx, "marshaled", slog.String("path", p))
|
|
return err
|
|
}
|
|
|
|
func mergesparks(sparks func() ([]horse.Spark, error), effs func() ([]SparkEffImm, error)) func() ([]horse.Spark, error) {
|
|
return func() ([]horse.Spark, error) {
|
|
sp, err := sparks()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
ef, err := effs()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
// Spark effects are sorted by group ID, but groups apply to multiple
|
|
// sparks, and we don't rely on sparks and groups being in the same order.
|
|
// It is possible to merge in linear time, but not worth the effort:
|
|
// n log n is fine since this is an AOT step.
|
|
for i := range sp {
|
|
k, ok := slices.BinarySearchFunc(ef, sp[i].Group, func(e SparkEffImm, v horse.SparkGroupID) int { return cmp.Compare(e.Group, v) })
|
|
if !ok {
|
|
panic(fmt.Errorf("mergesparks: no spark group for %+v", &sp[i]))
|
|
}
|
|
// Back up to the first effect in the group.
|
|
for k > 0 && ef[k-1].Group == sp[i].Group {
|
|
k--
|
|
}
|
|
// Map effect IDs to the lists of their effects.
|
|
m := make(map[int][]horse.SparkEffect)
|
|
for _, e := range ef[k:] {
|
|
if e.Group != sp[i].Group {
|
|
// Done with this group.
|
|
break
|
|
}
|
|
m[e.Effect] = append(m[e.Effect], horse.SparkEffect{Target: e.Target, Value1: e.Value1, Value2: e.Value2})
|
|
}
|
|
// Now get effects in order.
|
|
keys := slices.Sorted(maps.Keys(m))
|
|
sp[i].Effects = make([][]horse.SparkEffect, 0, len(keys))
|
|
for _, key := range keys {
|
|
sp[i].Effects = append(sp[i].Effects, m[key])
|
|
}
|
|
}
|
|
return sp, nil
|
|
}
|
|
}
|
|
|
|
type SparkEffImm struct {
|
|
Group horse.SparkGroupID
|
|
Effect int
|
|
Target horse.SparkTarget
|
|
Value1 int32
|
|
Value2 int32
|
|
}
|
|
|
|
func trimAbilities(s []horse.Ability) []horse.Ability {
|
|
for len(s) > 0 && s[len(s)-1].Type == 0 {
|
|
s = s[:len(s)-1]
|
|
}
|
|
return s
|
|
}
|
|
|
|
func trimActivations(s []horse.Activation) []horse.Activation {
|
|
for len(s) > 0 && s[len(s)-1].Condition == "" {
|
|
s = s[:len(s)-1]
|
|
}
|
|
return s
|
|
}
|
|
|
|
func trimZeros[T comparable](s ...T) []T {
|
|
var zero T
|
|
for len(s) > 0 && s[len(s)-1] == zero {
|
|
s = s[:len(s)-1]
|
|
}
|
|
return s
|
|
}
|