horse: generate json

This commit is contained in:
2026-03-08 14:01:43 -04:00
parent 5540bb2c4e
commit 7ff271ff2d
28 changed files with 172829 additions and 60 deletions

View File

@@ -12,7 +12,7 @@ import (
"git.sunturtle.xyz/zephyr/horse/horse/global"
)
func RenderSkill(id horse.SkillID, all map[horse.SkillID]horse.Skill, groups map[int32][4]horse.SkillID) discord.ContainerComponent {
func RenderSkill(id horse.SkillID, all map[horse.SkillID]horse.Skill, groups map[horse.SkillGroupID][4]horse.SkillID) discord.ContainerComponent {
s, ok := all[id]
if !ok {
return discord.NewContainer(discord.NewTextDisplayf("invalid skill ID %v made it to RenderSkill", id))

View File

@@ -3,10 +3,17 @@ package horse
type CharacterID int16
type Character struct {
ID CharacterID
Name string
ID CharacterID `json:"chara_id"`
Name string `json:"name"`
}
func (c Character) String() string {
return c.Name
}
type AffinityRelation struct {
IDA int `json:"chara_a"`
IDB int `json:"chara_b"`
IDC int `json:"chara_c,omitzero"`
Affinity int `json:"affinity"`
}

405
horse/generate.go Normal file
View File

@@ -0,0 +1,405 @@
//go:build ignore
package main
import (
"bufio"
"cmp"
"context"
_ "embed"
"encoding/json"
"errors"
"flag"
"fmt"
"log/slog"
"maps"
"os"
"os/signal"
"path/filepath"
"slices"
"golang.org/x/sync/errgroup"
"zombiezen.com/go/sqlite"
"zombiezen.com/go/sqlite/sqlitex"
"git.sunturtle.xyz/zephyr/horse/horse"
)
func main() {
var (
mdb string
out string
region string
)
flag.StringVar(&mdb, "mdb", os.ExpandEnv(`$USERPROFILE\AppData\LocalLow\Cygames\Umamusume\master\master.mdb`), "`path` to Umamusume master.mdb")
flag.StringVar(&out, "o", `horse`, "`dir`ectory for output files")
flag.StringVar(&region, "region", "global", "region the database is for (global, jp)")
flag.Parse()
slog.Info("open", slog.String("mdb", mdb))
db, err := sqlitex.NewPool(mdb, sqlitex.PoolOptions{Flags: sqlite.OpenReadOnly})
if err != nil {
slog.Error("opening mdb", slog.String("mdb", mdb), slog.Any("err", err))
os.Exit(1)
}
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
go func() {
<-ctx.Done()
stop()
}()
loadgroup, ctx1 := errgroup.WithContext(ctx)
charas := load(ctx1, loadgroup, db, "characters", characterSQL, func(s *sqlite.Stmt) horse.Character {
return horse.Character{
ID: horse.CharacterID(s.ColumnInt(0)),
Name: s.ColumnText(1),
}
})
aff := load(ctx1, loadgroup, db, "pair affinity", affinitySQL, func(s *sqlite.Stmt) horse.AffinityRelation {
return horse.AffinityRelation{
IDA: s.ColumnInt(0),
IDB: s.ColumnInt(1),
IDC: s.ColumnInt(2),
Affinity: s.ColumnInt(3),
}
})
umas := load(ctx1, loadgroup, db, "umas", umaSQL, func(s *sqlite.Stmt) horse.Uma {
return horse.Uma{
ID: horse.UmaID(s.ColumnInt(0)),
CharacterID: horse.CharacterID(s.ColumnInt(1)),
Name: s.ColumnText(2),
Variant: s.ColumnText(3),
Sprint: horse.AptitudeLevel(s.ColumnInt(4)),
Mile: horse.AptitudeLevel(s.ColumnInt(6)),
Medium: horse.AptitudeLevel(s.ColumnInt(7)),
Long: horse.AptitudeLevel(s.ColumnInt(8)),
Front: horse.AptitudeLevel(s.ColumnInt(9)),
Pace: horse.AptitudeLevel(s.ColumnInt(10)),
Late: horse.AptitudeLevel(s.ColumnInt(11)),
End: horse.AptitudeLevel(s.ColumnInt(12)),
Turf: horse.AptitudeLevel(s.ColumnInt(13)),
Dirt: horse.AptitudeLevel(s.ColumnInt(14)),
Unique: horse.SkillID(s.ColumnInt(15)),
Skill1: horse.SkillID(s.ColumnInt(16)),
Skill2: horse.SkillID(s.ColumnInt(17)),
Skill3: horse.SkillID(s.ColumnInt(18)),
SkillPL2: horse.SkillID(s.ColumnInt(19)),
SkillPL3: horse.SkillID(s.ColumnInt(20)),
SkillPL4: horse.SkillID(s.ColumnInt(21)),
SkillPL5: horse.SkillID(s.ColumnInt(22)),
}
})
sg := load(ctx1, loadgroup, db, "skill groups", skillGroupSQL, func(s *sqlite.Stmt) horse.SkillGroup {
return horse.SkillGroup{
ID: horse.SkillGroupID(s.ColumnInt(0)),
Skill1: horse.SkillID(s.ColumnInt(1)),
Skill2: horse.SkillID(s.ColumnInt(2)),
Skill3: horse.SkillID(s.ColumnInt(3)),
SkillBad: horse.SkillID(s.ColumnInt(4)),
}
})
skills := load(ctx1, loadgroup, db, "skills", skillSQL, func(s *sqlite.Stmt) horse.Skill {
return horse.Skill{
ID: horse.SkillID(s.ColumnInt(0)),
Name: s.ColumnText(1),
Description: s.ColumnText(2),
Group: horse.SkillGroupID(s.ColumnInt32(3)),
Rarity: int8(s.ColumnInt(5)),
GroupRate: int8(s.ColumnInt(6)),
GradeValue: s.ColumnInt32(7),
WitCheck: s.ColumnBool(8),
Activations: trimActivations([]horse.Activation{
{
Precondition: s.ColumnText(9),
Condition: s.ColumnText(10),
Duration: horse.TenThousandths(s.ColumnInt(11)),
DurScale: horse.DurScale(s.ColumnInt(12)),
Cooldown: horse.TenThousandths(s.ColumnInt(13)),
Abilities: trimAbilities([]horse.Ability{
{
Type: horse.AbilityType(s.ColumnInt(14)),
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(15)),
Value: horse.TenThousandths(s.ColumnInt(16)),
Target: horse.AbilityTarget(s.ColumnInt(17)),
TargetValue: s.ColumnInt32(18),
},
{
Type: horse.AbilityType(s.ColumnInt(19)),
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(20)),
Value: horse.TenThousandths(s.ColumnInt(21)),
Target: horse.AbilityTarget(s.ColumnInt(22)),
TargetValue: s.ColumnInt32(23),
},
{
Type: horse.AbilityType(s.ColumnInt(24)),
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(25)),
Value: horse.TenThousandths(s.ColumnInt(26)),
Target: horse.AbilityTarget(s.ColumnInt(27)),
TargetValue: s.ColumnInt32(28),
},
}),
},
{
Precondition: s.ColumnText(29),
Condition: s.ColumnText(30),
Duration: horse.TenThousandths(s.ColumnInt(31)),
DurScale: horse.DurScale(s.ColumnInt(32)),
Cooldown: horse.TenThousandths(s.ColumnInt(33)),
Abilities: trimAbilities([]horse.Ability{
{
Type: horse.AbilityType(s.ColumnInt(34)),
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(35)),
Value: horse.TenThousandths(s.ColumnInt(36)),
Target: horse.AbilityTarget(s.ColumnInt(37)),
TargetValue: s.ColumnInt32(38),
},
{
Type: horse.AbilityType(s.ColumnInt(39)),
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(40)),
Value: horse.TenThousandths(s.ColumnInt(41)),
Target: horse.AbilityTarget(s.ColumnInt(42)),
TargetValue: s.ColumnInt32(43),
},
{
Type: horse.AbilityType(s.ColumnInt(44)),
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(45)),
Value: horse.TenThousandths(s.ColumnInt(46)),
Target: horse.AbilityTarget(s.ColumnInt(47)),
TargetValue: s.ColumnInt32(48),
},
}),
},
}),
UniqueOwner: s.ColumnText(52), // TODO(zeph): should be id, not name
SPCost: s.ColumnInt(49),
IconID: s.ColumnInt(53),
}
})
races := load(ctx1, loadgroup, db, "races", raceSQL, func(s *sqlite.Stmt) horse.Race {
return horse.Race{
ID: horse.RaceID(s.ColumnInt(0)),
Name: s.ColumnText(1),
// TODO(zeph): grade
Thumbnail: s.ColumnInt(3),
Primary: horse.RaceID(s.ColumnInt(4)),
}
})
saddles := load(ctx1, loadgroup, db, "saddles", saddleSQL, func(s *sqlite.Stmt) horse.Saddle {
return horse.Saddle{
ID: horse.SaddleID(s.ColumnInt(0)),
Name: s.ColumnText(1),
Races: trimZeros(
horse.RaceID(s.ColumnInt(2)),
horse.RaceID(s.ColumnInt(3)),
horse.RaceID(s.ColumnInt(4)),
),
Type: horse.SaddleType(s.ColumnInt(5)),
Primary: horse.SaddleID(s.ColumnInt(6)),
}
})
scenarios := load(ctx1, loadgroup, db, "scenarios", scenarioSQL, func(s *sqlite.Stmt) horse.Scenario {
return horse.Scenario{
ID: horse.ScenarioID(s.ColumnInt(0)),
Name: s.ColumnText(1),
Title: s.ColumnText(2),
}
})
sparks := load(ctx1, loadgroup, db, "sparks", sparkSQL, func(s *sqlite.Stmt) horse.Spark {
return horse.Spark{
ID: horse.SparkID(s.ColumnInt(0)),
Name: s.ColumnText(1),
Description: s.ColumnText(2),
Group: horse.SparkGroupID(s.ColumnInt(3)),
Rarity: horse.SparkRarity(s.ColumnInt(4)),
Type: horse.SparkType(s.ColumnInt(5)),
// Effects filled in later.
}
})
sparkeffs := load(ctx1, loadgroup, db, "spark effects", sparkEffectSQL, func(s *sqlite.Stmt) SparkEffImm {
return SparkEffImm{
Group: horse.SparkGroupID(s.ColumnInt(0)),
Effect: s.ColumnInt(1),
Target: horse.SparkTarget(s.ColumnInt(2)),
Value1: s.ColumnInt32(3),
Value2: s.ColumnInt32(4),
}
})
if err := os.MkdirAll(filepath.Join(out, region), 0775); err != nil {
slog.Error("create output dir", slog.Any("err", err))
os.Exit(1)
}
writegroup, ctx2 := errgroup.WithContext(ctx)
writegroup.Go(func() error { return write(ctx2, out, region, "character.json", charas) })
writegroup.Go(func() error { return write(ctx2, out, region, "affinity.json", aff) })
writegroup.Go(func() error { return write(ctx2, out, region, "uma.json", umas) })
writegroup.Go(func() error { return write(ctx2, out, region, "skill-group.json", sg) })
writegroup.Go(func() error { return write(ctx2, out, region, "skill.json", skills) })
writegroup.Go(func() error { return write(ctx2, out, region, "race.json", races) })
writegroup.Go(func() error { return write(ctx2, out, region, "saddle.json", saddles) })
writegroup.Go(func() error { return write(ctx2, out, region, "scenario.json", scenarios) })
writegroup.Go(func() error { return write(ctx2, out, region, "spark.json", mergesparks(sparks, sparkeffs)) })
if err := writegroup.Wait(); err != nil {
slog.ErrorContext(ctx, "write", slog.Any("err", err))
os.Exit(1)
}
slog.InfoContext(ctx, "done")
}
var (
//go:embed sql/character.sql
characterSQL string
//go:embed sql/affinity.sql
affinitySQL string
//go:embed sql/uma.sql
umaSQL string
//go:embed sql/skill-group.sql
skillGroupSQL string
//go:embed sql/skill.sql
skillSQL string
//go:embed sql/race.sql
raceSQL string
//go:embed sql/saddle.sql
saddleSQL string
//go:embed sql/scenario.sql
scenarioSQL string
//go:embed sql/spark.sql
sparkSQL string
//go:embed sql/spark-effect.sql
sparkEffectSQL string
)
func load[T any](ctx context.Context, group *errgroup.Group, db *sqlitex.Pool, kind, sql string, row func(*sqlite.Stmt) T) func() ([]T, error) {
slog.InfoContext(ctx, "load", slog.String("kind", kind))
var r []T
group.Go(func() error {
conn, err := db.Take(ctx)
defer db.Put(conn)
if err != nil {
return fmt.Errorf("couldn't get connection for %s: %w", kind, err)
}
stmt, _, err := conn.PrepareTransient(sql)
if err != nil {
return fmt.Errorf("couldn't prepare statement for %s: %w", kind, err)
}
for {
ok, err := stmt.Step()
if err != nil {
return fmt.Errorf("error stepping %s: %w", kind, err)
}
if !ok {
break
}
r = append(r, row(stmt))
}
return nil
})
return func() ([]T, error) {
err := group.Wait()
if err == context.Canceled {
// After the first wait, all future ones return context.Canceled.
// We want to be able to wait any number of times, so hide it.
err = nil
}
return r, err
}
}
func write[T any](ctx context.Context, out, region, name string, v func() (T, error)) error {
p := filepath.Join(out, region, name)
r, err := v()
if err != nil {
return err
}
slog.InfoContext(ctx, "write", slog.String("path", p))
f, err := os.Create(p)
if err != nil {
return err
}
defer f.Close()
w := bufio.NewWriter(f)
enc := json.NewEncoder(w)
enc.SetEscapeHTML(false)
enc.SetIndent("", "\t")
err = enc.Encode(r)
err = errors.Join(err, w.Flush())
slog.InfoContext(ctx, "marshaled", slog.String("path", p))
return err
}
func mergesparks(sparks func() ([]horse.Spark, error), effs func() ([]SparkEffImm, error)) func() ([]horse.Spark, error) {
return func() ([]horse.Spark, error) {
sp, err := sparks()
if err != nil {
return nil, err
}
ef, err := effs()
if err != nil {
return nil, err
}
// Spark effects are sorted by group ID, but groups apply to multiple
// sparks, and we don't rely on sparks and groups being in the same order.
// It is possible to merge in linear time, but not worth the effort:
// n log n is fine since this is an AOT step.
for i := range sp {
k, ok := slices.BinarySearchFunc(ef, sp[i].Group, func(e SparkEffImm, v horse.SparkGroupID) int { return cmp.Compare(e.Group, v) })
if !ok {
panic(fmt.Errorf("mergesparks: no spark group for %+v", &sp[i]))
}
// Back up to the first effect in the group.
for k > 0 && ef[k-1].Group == sp[i].Group {
k--
}
// Map effect IDs to the lists of their effects.
m := make(map[int][]horse.SparkEffect)
for _, e := range ef[k:] {
if e.Group != sp[i].Group {
// Done with this group.
break
}
m[e.Effect] = append(m[e.Effect], horse.SparkEffect{Target: e.Target, Value1: e.Value1, Value2: e.Value2})
}
// Now get effects in order.
keys := slices.Sorted(maps.Keys(m))
sp[i].Effects = make([][]horse.SparkEffect, 0, len(keys))
for _, key := range keys {
sp[i].Effects = append(sp[i].Effects, m[key])
}
}
return sp, nil
}
}
type SparkEffImm struct {
Group horse.SparkGroupID
Effect int
Target horse.SparkTarget
Value1 int32
Value2 int32
}
func trimAbilities(s []horse.Ability) []horse.Ability {
for len(s) > 0 && s[len(s)-1].Type == 0 {
s = s[:len(s)-1]
}
return s
}
func trimActivations(s []horse.Activation) []horse.Activation {
for len(s) > 0 && s[len(s)-1].Condition == "" {
s = s[:len(s)-1]
}
return s
}
func trimZeros[T comparable](s ...T) []T {
var zero T
for len(s) > 0 && s[len(s)-1] == zero {
s = s[:len(s)-1]
}
return s
}

114126
horse/global/affinity.json Normal file

File diff suppressed because it is too large Load Diff

214
horse/global/character.json Normal file
View File

@@ -0,0 +1,214 @@
[
{
"chara_id": 1001,
"name": "Special Week"
},
{
"chara_id": 1002,
"name": "Silence Suzuka"
},
{
"chara_id": 1003,
"name": "Tokai Teio"
},
{
"chara_id": 1004,
"name": "Maruzensky"
},
{
"chara_id": 1005,
"name": "Fuji Kiseki"
},
{
"chara_id": 1006,
"name": "Oguri Cap"
},
{
"chara_id": 1007,
"name": "Gold Ship"
},
{
"chara_id": 1008,
"name": "Vodka"
},
{
"chara_id": 1009,
"name": "Daiwa Scarlet"
},
{
"chara_id": 1010,
"name": "Taiki Shuttle"
},
{
"chara_id": 1011,
"name": "Grass Wonder"
},
{
"chara_id": 1012,
"name": "Hishi Amazon"
},
{
"chara_id": 1013,
"name": "Mejiro McQueen"
},
{
"chara_id": 1014,
"name": "El Condor Pasa"
},
{
"chara_id": 1015,
"name": "T.M. Opera O"
},
{
"chara_id": 1016,
"name": "Narita Brian"
},
{
"chara_id": 1017,
"name": "Symboli Rudolf"
},
{
"chara_id": 1018,
"name": "Air Groove"
},
{
"chara_id": 1019,
"name": "Agnes Digital"
},
{
"chara_id": 1020,
"name": "Seiun Sky"
},
{
"chara_id": 1021,
"name": "Tamamo Cross"
},
{
"chara_id": 1022,
"name": "Fine Motion"
},
{
"chara_id": 1023,
"name": "Biwa Hayahide"
},
{
"chara_id": 1024,
"name": "Mayano Top Gun"
},
{
"chara_id": 1025,
"name": "Manhattan Cafe"
},
{
"chara_id": 1026,
"name": "Mihono Bourbon"
},
{
"chara_id": 1027,
"name": "Mejiro Ryan"
},
{
"chara_id": 1028,
"name": "Hishi Akebono"
},
{
"chara_id": 1030,
"name": "Rice Shower"
},
{
"chara_id": 1032,
"name": "Agnes Tachyon"
},
{
"chara_id": 1033,
"name": "Admire Vega"
},
{
"chara_id": 1034,
"name": "Inari One"
},
{
"chara_id": 1035,
"name": "Winning Ticket"
},
{
"chara_id": 1037,
"name": "Eishin Flash"
},
{
"chara_id": 1038,
"name": "Curren Chan"
},
{
"chara_id": 1039,
"name": "Kawakami Princess"
},
{
"chara_id": 1040,
"name": "Gold City"
},
{
"chara_id": 1041,
"name": "Sakura Bakushin O"
},
{
"chara_id": 1044,
"name": "Sweep Tosho"
},
{
"chara_id": 1045,
"name": "Super Creek"
},
{
"chara_id": 1046,
"name": "Smart Falcon"
},
{
"chara_id": 1048,
"name": "Tosen Jordan"
},
{
"chara_id": 1050,
"name": "Narita Taishin"
},
{
"chara_id": 1051,
"name": "Nishino Flower"
},
{
"chara_id": 1052,
"name": "Haru Urara"
},
{
"chara_id": 1056,
"name": "Matikanefukukitaru"
},
{
"chara_id": 1058,
"name": "Meisho Doto"
},
{
"chara_id": 1059,
"name": "Mejiro Dober"
},
{
"chara_id": 1060,
"name": "Nice Nature"
},
{
"chara_id": 1061,
"name": "King Halo"
},
{
"chara_id": 1068,
"name": "Kitasan Black"
},
{
"chara_id": 1069,
"name": "Sakura Chiyono O"
},
{
"chara_id": 1071,
"name": "Mejiro Ardan"
}
]

1712
horse/global/race.json Normal file

File diff suppressed because it is too large Load Diff

1420
horse/global/saddle.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,12 @@
[
{
"scenario_id": 1,
"name": "URA Finale",
"title": "The Beginning: URA Finale"
},
{
"scenario_id": 2,
"name": "Unity Cup",
"title": "Unity Cup: Shine On, Team Spirit!"
}
]

File diff suppressed because it is too large Load Diff

View File

@@ -13521,7 +13521,7 @@ var SkillNameToID = map[string]SkillID{
"Carnival Bonus": 1000011,
}
var SkillGroups = map[int32][4]SkillID{
var SkillGroups = map[SkillGroupID][4]SkillID{
1007: {SkillWarningShot},
1008: {SkillXceleration},
1009: {SkillRedAce},

15756
horse/global/skill.json Normal file

File diff suppressed because it is too large Load Diff

35564
horse/global/spark.json Normal file

File diff suppressed because it is too large Load Diff

1634
horse/global/uma.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -4,26 +4,26 @@ type RaceID int32
// Race is the internal data about a race.
type Race struct {
ID RaceID
Name string
Thumbnail int
ID RaceID `json:"race_id"`
Name string `json:"name"`
Thumbnail int `json:"thumbnail"`
// Some careers contain unusual versions of races, e.g. Tenno Sho (Spring)
// in Hanshin instead of Kyoto for Narita Taishin and Biwa Hayahide.
// For such races, this field holds the normal race ID.
Primary RaceID
Primary RaceID `json:"primary"`
}
type SaddleID int32
// Saddle is the internal data about a race win saddle.
type Saddle struct {
ID SaddleID
Name string
Races []RaceID
Type SaddleType
ID SaddleID `json:"saddle_id"`
Name string `json:"name"`
Races []RaceID `json:"races"`
Type SaddleType `json:"type"`
// Saddles that involve alternate races are themselves alternate.
// For such saddles, this field holds the normal saddle ID.
Primary SaddleID
Primary SaddleID `json:"primary"`
}
type SaddleType int8
@@ -40,7 +40,7 @@ type ScenarioID int8
// Scenario is metadata about a career scenario.
type Scenario struct {
ID ScenarioID
Name string
Title string
ID ScenarioID `json:"scenario_id"`
Name string `json:"name"`
Title string `json:"title"`
}

View File

@@ -27,37 +27,37 @@ func (x TenThousandths) String() string {
// Skill is the internal data about a skill.
type Skill struct {
ID SkillID
Name string
Description string
Group int32
Rarity int8
GroupRate int8
GradeValue int32
WitCheck bool
Activations []Activation
UniqueOwner string
SPCost int
IconID int
ID SkillID `json:"skill_id"`
Name string `json:"name"`
Description string `json:"description"`
Group SkillGroupID `json:"group"`
Rarity int8 `json:"rarity"`
GroupRate int8 `json:"group_rate"`
GradeValue int32 `json:"grade_value,omitzero"`
WitCheck bool `json:"wit_check"`
Activations []Activation `json:"activations"`
UniqueOwner string `json:"unique_owner,omitzero"`
SPCost int `json:"sp_cost,omitzero"`
IconID int `json:"icon_id"`
}
// Activation is the parameters controlling when a skill activates.
type Activation struct {
Precondition string
Condition string
Duration TenThousandths
DurScale DurScale
Cooldown TenThousandths
Abilities []Ability
Precondition string `json:"precondition,omitzero"`
Condition string `json:"condition"`
Duration TenThousandths `json:"duration,omitzero"`
DurScale DurScale `json:"dur_scale,omitzero"`
Cooldown TenThousandths `json:"cooldown,omitzero"`
Abilities []Ability `json:"abilities"`
}
// Ability is an individual effect applied by a skill.
type Ability struct {
Type AbilityType
ValueUsage AbilityValueUsage
Value TenThousandths
Target AbilityTarget
TargetValue int32
Type AbilityType `json:"type"`
ValueUsage AbilityValueUsage `json:"value_usage"`
Value TenThousandths `json:"value"`
Target AbilityTarget `json:"target"`
TargetValue int32 `json:"target_value"`
}
func (a Ability) String() string {
@@ -202,3 +202,26 @@ const (
TargetCharacter AbilityTarget = 22 // specific character
TargetTriggering AbilityTarget = 23 // whosoever triggered this skill
)
type SkillGroupID int32
// SkillGroup is a group of skills which are alternate versions of each other.
//
// Any of the skill IDs in a group may be zero, indicating that there is no
// skill with the corresponding group rate.
// Some skill groups contain only Skill2 or SkillBad, while others may have all
// four skills.
type SkillGroup struct {
ID SkillGroupID `json:"skill_group"`
// Skill1 is the base version of the skill, either a common (white) skill
// or an Uma's own unique.
Skill1 SkillID `json:"skill1,omitzero"`
// Skill2 is the first upgraded version of the skill: a rare (gold)
// skill, a double circle skill, or an inherited unique skill.
Skill2 SkillID `json:"skill2,omitzero"`
// Skill3 is the highest upgraded version, a gold version of a skill with
// a double circle version.
Skill3 SkillID `json:"skill3,omitzero"`
// SkillBad is a negative (purple) skill.
SkillBad SkillID `json:"skill_bad,omitzero"`
}

View File

@@ -6,13 +6,13 @@ type (
)
type Spark struct {
ID SparkID
Name string
Description string
Group SparkGroupID
Rarity SparkRarity
Type SparkType
Effects [][]SparkEffect
ID SparkID `json:"spark_id"`
Name string `json:"name"`
Description string `json:"description"`
Group SparkGroupID `json:"spark_group"`
Rarity SparkRarity `json:"rarity"`
Type SparkType `json:"type"`
Effects [][]SparkEffect `json:"effects"`
}
type SparkType int8
@@ -46,9 +46,9 @@ func (r SparkRarity) String() string {
}
type SparkEffect struct {
Target SparkTarget
Value1 int32
Value2 int32
Target SparkTarget `json:"target"`
Value1 int32 `json:"value1,omitzero"`
Value2 int32 `json:"value2,omitzero"`
}
type SparkTarget int8

60
horse/sql/affinity.sql Normal file
View File

@@ -0,0 +1,60 @@
WITH pairs AS (
SELECT
a.id AS id_a,
b.id AS id_b
FROM chara_data a
JOIN chara_data b ON a.id < b.id
-- Exclude characters who have no succession relations defined.
WHERE a.id IN (SELECT chara_id FROM succession_relation_member)
AND b.id IN (SELECT chara_id FROM succession_relation_member)
), trios AS (
SELECT
a.id AS id_a,
b.id AS id_b,
c.id AS id_c
FROM chara_data a
JOIN chara_data b ON a.id < b.id
JOIN chara_data c ON a.id < c.id AND b.id < c.id
-- Exclude characters who have no succession relations defined.
WHERE a.id IN (SELECT chara_id FROM succession_relation_member)
AND b.id IN (SELECT chara_id FROM succession_relation_member)
AND c.id IN (SELECT chara_id FROM succession_relation_member)
), pair_relations AS (
SELECT
ra.relation_type,
ra.chara_id AS id_a,
rb.chara_id AS id_b
FROM succession_relation_member ra
JOIN succession_relation_member rb ON ra.relation_type = rb.relation_type
), trio_relations AS (
SELECT
ra.relation_type,
ra.chara_id AS id_a,
rb.chara_id AS id_b,
rc.chara_id AS id_c
FROM succession_relation_member ra
JOIN succession_relation_member rb ON ra.relation_type = rb.relation_type
JOIN succession_relation_member rc ON ra.relation_type = rc.relation_type
), affinity AS (
SELECT
pairs.*,
0 AS id_c,
SUM(IFNULL(relation_point, 0)) AS base_affinity
FROM pairs
LEFT JOIN pair_relations rp ON pairs.id_a = rp.id_a AND pairs.id_b = rp.id_b
LEFT JOIN succession_relation sr ON rp.relation_type = sr.relation_type
GROUP BY pairs.id_a, pairs.id_b
UNION ALL
SELECT
trios.*,
SUM(IFNULL(relation_point, 0)) AS base_affinity
FROM trios
LEFT JOIN trio_relations rt ON trios.id_a = rt.id_a AND trios.id_b = rt.id_b AND trios.id_c = rt.id_c
LEFT JOIN succession_relation sr ON rt.relation_type = sr.relation_type
GROUP BY trios.id_a, trios.id_b, trios.id_c
)
SELECT * FROM affinity
WHERE base_affinity != 0
ORDER BY id_a, id_b, id_c

9
horse/sql/character.sql Normal file
View File

@@ -0,0 +1,9 @@
SELECT
"index" AS "id",
"text" AS "name",
ROW_NUMBER() OVER (ORDER BY "index") - 1 AS "index"
FROM text_data
WHERE category = 6 AND "index" BETWEEN 1000 AND 1999
-- Exclude characters who have no succession relations defined.
AND "index" IN (SELECT chara_id FROM succession_relation_member)
ORDER BY "id"

14
horse/sql/race.sql Normal file
View File

@@ -0,0 +1,14 @@
WITH race_names AS (
SELECT "index" AS id, "text" AS name FROM text_data WHERE category = 33
)
SELECT
race.id,
race_names.name,
race.grade,
race.thumbnail_id,
MIN(race.id) OVER (PARTITION BY race_names.name) AS "primary",
ROW_NUMBER() OVER (PARTITION BY race_names.name ORDER BY race.id) - 1 AS "alternate"
FROM race
JOIN race_names ON race.id = race_names.id
WHERE race."group" = 1
ORDER BY race.id

20
horse/sql/saddle.sql Normal file
View File

@@ -0,0 +1,20 @@
WITH saddle_names AS (
SELECT "index" AS id, "text" AS name
FROM text_data
WHERE category = 111
)
SELECT
s.id,
n.name,
ri1.id AS race1,
IFNULL(ri2.id, 0) AS race2,
IFNULL(ri3.id, 0) AS race3,
s.win_saddle_type,
MIN(s.id) OVER (PARTITION BY n.name) AS "primary",
ROW_NUMBER() OVER (PARTITION BY n.name ORDER BY s.id) - 1 AS "alternate"
FROM single_mode_wins_saddle s
JOIN race_instance ri1 ON s.race_instance_id_1 = ri1.id
LEFT JOIN race_instance ri2 ON s.race_instance_id_2 = ri2.id
LEFT JOIN race_instance ri3 ON s.race_instance_id_3 = ri3.id
LEFT JOIN saddle_names n ON s.id = n.id
ORDER BY s.id

17
horse/sql/scenario.sql Normal file
View File

@@ -0,0 +1,17 @@
WITH scenario_name AS (
SELECT "index" AS id, "text" AS name
FROM text_data
WHERE category = 237
), scenario_title AS (
SELECT "index" AS id, "text" AS title
FROM text_data
WHERE category = 119
)
SELECT
sc.id,
n.name,
t.title
FROM single_mode_scenario sc
JOIN scenario_name n ON sc.id = n.id
JOIN scenario_title t ON sc.id = t.id
ORDER BY sc.id

15
horse/sql/skill-group.sql Normal file
View File

@@ -0,0 +1,15 @@
WITH skill_groups AS (
SELECT DISTINCT group_id FROM skill_data
)
SELECT
g.group_id,
IFNULL(s1.id, 0) AS skill1,
IFNULL(s2.id, 0) AS skill2,
IFNULL(s3.id, 0) AS skill3,
IFNULL(m1.id, 0) AS skill_bad
FROM skill_groups g
LEFT JOIN skill_data s1 ON g.group_id = s1.group_id AND s1.group_rate = 1
LEFT JOIN skill_data s2 ON g.group_id = s2.group_id AND s2.group_rate = 2
LEFT JOIN skill_data s3 ON g.group_id = s3.group_id AND s3.group_rate = 3
LEFT JOIN skill_data m1 ON g.group_id = m1.group_id AND m1.group_rate = -1
ORDER BY g.group_id

98
horse/sql/skill.sql Normal file
View File

@@ -0,0 +1,98 @@
WITH skill_names AS (
SELECT
n."index" AS "id",
n."text" AS "name",
d."text" AS "description"
FROM text_data n
JOIN text_data d ON n."index" = d."index" AND n."category" = 47 AND d."category" = 48
), skill_groups AS (
SELECT
group_id,
name
FROM skill_data d
JOIN skill_names n ON d.id = n.id
WHERE group_rate = 1
), card_name AS (
SELECT
"index" AS "id",
"text" AS "name"
FROM text_data n
WHERE category = 4
), card_unique AS (
SELECT DISTINCT
ss.skill_id1 AS unique_id,
card_name.id AS owner_id,
card_name.name
FROM card_data card
JOIN card_name ON card.id = card_name.id
JOIN card_rarity_data rd ON card.id = rd.card_id
JOIN skill_set ss ON rd.skill_set = ss.id
)
SELECT
d.id,
n.name,
n.description,
IIF(d.unique_skill_id_1 = 0, d.group_id, ud.group_id) AS group_id,
CASE
WHEN g.name IS NOT NULL THEN g.name
WHEN d.unique_skill_id_1 != 0 THEN n.name
ELSE ''
END AS group_name,
d.rarity,
d.group_rate,
d.grade_value,
d.activate_lot,
d.precondition_1,
d.condition_1,
d.float_ability_time_1,
d.ability_time_usage_1,
d.float_cooldown_time_1,
d.ability_type_1_1,
d.ability_value_usage_1_1,
d.float_ability_value_1_1,
d.target_type_1_1,
d.target_value_1_1,
d.ability_type_1_2,
d.ability_value_usage_1_2,
d.float_ability_value_1_2,
d.target_type_1_2,
d.target_value_1_2,
d.ability_type_1_3,
d.ability_value_usage_1_3,
d.float_ability_value_1_3,
d.target_type_1_3,
d.target_value_1_3,
d.precondition_2,
d.condition_2,
d.float_ability_time_2,
d.ability_time_usage_2,
d.float_cooldown_time_2,
d.ability_type_2_1,
d.ability_value_usage_2_1,
d.float_ability_value_2_1,
d.target_type_2_1,
d.target_value_2_1,
d.ability_type_2_2,
d.ability_value_usage_2_2,
d.float_ability_value_2_2,
d.target_type_2_2,
d.target_value_2_2,
d.ability_type_2_3,
d.ability_value_usage_2_3,
d.float_ability_value_2_3,
d.target_type_2_3,
d.target_value_2_3,
IFNULL(p.need_skill_point, 0) AS sp_cost,
d.unique_skill_id_1,
COALESCE(u.owner_id, iu.owner_id, 0) AS unique_owner_id,
COALESCE(u.name, iu.name, '') AS unique_owner,
d.icon_id,
ROW_NUMBER() OVER (ORDER BY d.id) - 1 AS "index"
FROM skill_data d
JOIN skill_names n ON d.id = n.id
LEFT JOIN skill_data ud ON d.unique_skill_id_1 = ud.id
LEFT JOIN skill_groups g ON d.group_id = g.group_id
LEFT JOIN single_mode_skill_need_point p ON d.id = p.id
LEFT JOIN card_unique u ON d.id = u.unique_id
LEFT JOIN card_unique iu ON d.unique_skill_id_1 = iu.unique_id
ORDER BY d.id

View File

@@ -0,0 +1,9 @@
SELECT
factor_group_id,
effect_id,
target_type,
value_1,
value_2
FROM succession_factor_effect
WHERE factor_group_id NOT IN (40001) -- exclude Carnival Bonus
ORDER BY factor_group_id, effect_id, id

20
horse/sql/spark.sql Normal file
View File

@@ -0,0 +1,20 @@
WITH spark AS (
SELECT
n."index" AS "id",
n."text" AS "name",
d."text" AS "description"
FROM text_data n
LEFT JOIN text_data d ON n."index" = d."index" AND d."category" = 172
WHERE n.category = 147
)
SELECT
sf.factor_id,
spark.name,
spark.description,
sf.factor_group_id,
sf.rarity,
sf.factor_type
FROM spark
JOIN succession_factor sf ON spark.id = sf.factor_id
WHERE sf.factor_type != 7 -- exclude Carnival Bonus
ORDER BY sf.factor_id

59
horse/sql/uma.sql Normal file
View File

@@ -0,0 +1,59 @@
WITH uma_name AS (
SELECT "index" AS id, "text" AS name
FROM text_data
WHERE category = 4
), uma_variant AS (
SELECT "index" AS id, "text" AS variant
FROM text_data
WHERE category = 5
), chara_name AS (
SELECT "index" AS id, "text" AS name
FROM text_data
WHERE category = 6
), skills AS (
SELECT
uma.id,
s.skill_id,
s.need_rank,
ROW_NUMBER() OVER (PARTITION BY s.available_skill_set_id, s.need_rank) AS idx
FROM card_data uma
LEFT JOIN available_skill_set s ON uma.available_skill_set_id = s.available_skill_set_id
)
SELECT
uma.card_id,
card_data.chara_id,
n.name,
v.variant,
c.name AS chara_name,
uma.proper_distance_short,
uma.proper_distance_mile,
uma.proper_distance_middle,
uma.proper_distance_long,
uma.proper_running_style_nige,
uma.proper_running_style_senko,
uma.proper_running_style_sashi,
uma.proper_running_style_oikomi,
uma.proper_ground_turf,
uma.proper_ground_dirt,
su.skill_id1 AS unique_skill,
s1.skill_id AS skill1,
s2.skill_id AS skill2,
s3.skill_id AS skill3,
sp2.skill_id AS skill_pl2,
sp3.skill_id AS skill_pl3,
sp4.skill_id AS skill_pl4,
sp5.skill_id AS skill_pl5
FROM card_data
JOIN card_rarity_data uma ON card_data.id = uma.card_id
JOIN chara_name c ON card_data.chara_id = c.id
JOIN skill_set su ON uma.skill_set = su.id
JOIN skills s1 ON uma.card_id = s1.id AND s1.need_rank = 0 AND s1.idx = 1
JOIN skills s2 ON uma.card_id = s2.id AND s2.need_rank = 0 AND s2.idx = 2
JOIN skills s3 ON uma.card_id = s3.id AND s3.need_rank = 0 AND s3.idx = 3
JOIN skills sp2 ON uma.card_id = sp2.id AND sp2.need_rank = 2
JOIN skills sp3 ON uma.card_id = sp3.id AND sp3.need_rank = 3
JOIN skills sp4 ON uma.card_id = sp4.id AND sp4.need_rank = 4
JOIN skills sp5 ON uma.card_id = sp5.id AND sp5.need_rank = 5
LEFT JOIN uma_name n ON uma.card_id = n.id
LEFT JOIN uma_variant v ON uma.card_id = v.id
WHERE uma.rarity = 5

View File

@@ -3,18 +3,30 @@ package horse
type UmaID int32
type Uma struct {
ID UmaID
CharacterID CharacterID
Name string
Variant string
ID UmaID `json:"chara_card_id"`
CharacterID CharacterID `json:"chara_id"`
Name string `json:"name"`
Variant string `json:"variant"`
Sprint, Mile, Medium, Long AptitudeLevel
Front, Pace, Late, End AptitudeLevel
Turf, Dirt AptitudeLevel
Sprint AptitudeLevel `json:"sprint"`
Mile AptitudeLevel `json:"mile"`
Medium AptitudeLevel `json:"medium"`
Long AptitudeLevel `json:"long"`
Front AptitudeLevel `json:"front"`
Pace AptitudeLevel `json:"pace"`
Late AptitudeLevel `json:"late"`
End AptitudeLevel `json:"end"`
Turf AptitudeLevel `json:"turf"`
Dirt AptitudeLevel `json:"dirt"`
Unique SkillID
Skill1, Skill2, Skill3 SkillID
SkillPL2, SkillPL3, SkillPL4, SkillPL5 SkillID
Unique SkillID `json:"unique"`
Skill1 SkillID `json:"skill1"`
Skill2 SkillID `json:"skill2"`
Skill3 SkillID `json:"skill3"`
SkillPL2 SkillID `json:"skill_pl2"`
SkillPL3 SkillID `json:"skill_pl3"`
SkillPL4 SkillID `json:"skill_pl4"`
SkillPL5 SkillID `json:"skill_pl5"`
}
type AptitudeLevel int8

View File

@@ -71,7 +71,7 @@ var SkillNameToID = map[string]SkillID{
{{- end }}
}
var SkillGroups = map[int32][4]SkillID{
var SkillGroups = map[SkillGroupID][4]SkillID{
{{- range $g := $.Groups }}
{{ $g.ID }}: { {{- range $s := index $.Related $g.ID }}Skill{{ goenum $s.Name }}{{ if ne $s.InheritID 0 }}Inherit{{ end }}, {{ end -}} },
{{- end }}