all: generate json, not code
This includes modifying horsebot to use the generated JSON, as well as moving the generator to another cmd/ directory. Remove the generated code while we're here. Koka tests still have to be updated, but it requires a JSON parser.
This commit is contained in:
403
cmd/horsegen/generate.go
Normal file
403
cmd/horsegen/generate.go
Normal file
@@ -0,0 +1,403 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"cmp"
|
||||
"context"
|
||||
_ "embed"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"maps"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
|
||||
"golang.org/x/sync/errgroup"
|
||||
"zombiezen.com/go/sqlite"
|
||||
"zombiezen.com/go/sqlite/sqlitex"
|
||||
|
||||
"git.sunturtle.xyz/zephyr/horse/horse"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var (
|
||||
mdb string
|
||||
out string
|
||||
region string
|
||||
)
|
||||
flag.StringVar(&mdb, "mdb", os.ExpandEnv(`$USERPROFILE\AppData\LocalLow\Cygames\Umamusume\master\master.mdb`), "`path` to Umamusume master.mdb")
|
||||
flag.StringVar(&out, "o", `horse`, "`dir`ectory for output files")
|
||||
flag.StringVar(®ion, "region", "global", "region the database is for (global, jp)")
|
||||
flag.Parse()
|
||||
|
||||
slog.Info("open", slog.String("mdb", mdb))
|
||||
db, err := sqlitex.NewPool(mdb, sqlitex.PoolOptions{Flags: sqlite.OpenReadOnly})
|
||||
if err != nil {
|
||||
slog.Error("opening mdb", slog.String("mdb", mdb), slog.Any("err", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
stop()
|
||||
}()
|
||||
|
||||
loadgroup, ctx1 := errgroup.WithContext(ctx)
|
||||
charas := load(ctx1, loadgroup, db, "characters", characterSQL, func(s *sqlite.Stmt) horse.Character {
|
||||
return horse.Character{
|
||||
ID: horse.CharacterID(s.ColumnInt(0)),
|
||||
Name: s.ColumnText(1),
|
||||
}
|
||||
})
|
||||
aff := load(ctx1, loadgroup, db, "pair affinity", affinitySQL, func(s *sqlite.Stmt) horse.AffinityRelation {
|
||||
return horse.AffinityRelation{
|
||||
IDA: s.ColumnInt(0),
|
||||
IDB: s.ColumnInt(1),
|
||||
IDC: s.ColumnInt(2),
|
||||
Affinity: s.ColumnInt(3),
|
||||
}
|
||||
})
|
||||
umas := load(ctx1, loadgroup, db, "umas", umaSQL, func(s *sqlite.Stmt) horse.Uma {
|
||||
return horse.Uma{
|
||||
ID: horse.UmaID(s.ColumnInt(0)),
|
||||
CharacterID: horse.CharacterID(s.ColumnInt(1)),
|
||||
Name: s.ColumnText(2),
|
||||
Variant: s.ColumnText(3),
|
||||
Sprint: horse.AptitudeLevel(s.ColumnInt(4)),
|
||||
Mile: horse.AptitudeLevel(s.ColumnInt(6)),
|
||||
Medium: horse.AptitudeLevel(s.ColumnInt(7)),
|
||||
Long: horse.AptitudeLevel(s.ColumnInt(8)),
|
||||
Front: horse.AptitudeLevel(s.ColumnInt(9)),
|
||||
Pace: horse.AptitudeLevel(s.ColumnInt(10)),
|
||||
Late: horse.AptitudeLevel(s.ColumnInt(11)),
|
||||
End: horse.AptitudeLevel(s.ColumnInt(12)),
|
||||
Turf: horse.AptitudeLevel(s.ColumnInt(13)),
|
||||
Dirt: horse.AptitudeLevel(s.ColumnInt(14)),
|
||||
Unique: horse.SkillID(s.ColumnInt(15)),
|
||||
Skill1: horse.SkillID(s.ColumnInt(16)),
|
||||
Skill2: horse.SkillID(s.ColumnInt(17)),
|
||||
Skill3: horse.SkillID(s.ColumnInt(18)),
|
||||
SkillPL2: horse.SkillID(s.ColumnInt(19)),
|
||||
SkillPL3: horse.SkillID(s.ColumnInt(20)),
|
||||
SkillPL4: horse.SkillID(s.ColumnInt(21)),
|
||||
SkillPL5: horse.SkillID(s.ColumnInt(22)),
|
||||
}
|
||||
})
|
||||
sg := load(ctx1, loadgroup, db, "skill groups", skillGroupSQL, func(s *sqlite.Stmt) horse.SkillGroup {
|
||||
return horse.SkillGroup{
|
||||
ID: horse.SkillGroupID(s.ColumnInt(0)),
|
||||
Skill1: horse.SkillID(s.ColumnInt(1)),
|
||||
Skill2: horse.SkillID(s.ColumnInt(2)),
|
||||
Skill3: horse.SkillID(s.ColumnInt(3)),
|
||||
SkillBad: horse.SkillID(s.ColumnInt(4)),
|
||||
}
|
||||
})
|
||||
skills := load(ctx1, loadgroup, db, "skills", skillSQL, func(s *sqlite.Stmt) horse.Skill {
|
||||
return horse.Skill{
|
||||
ID: horse.SkillID(s.ColumnInt(0)),
|
||||
Name: s.ColumnText(1),
|
||||
Description: s.ColumnText(2),
|
||||
Group: horse.SkillGroupID(s.ColumnInt32(3)),
|
||||
Rarity: int8(s.ColumnInt(5)),
|
||||
GroupRate: int8(s.ColumnInt(6)),
|
||||
GradeValue: s.ColumnInt32(7),
|
||||
WitCheck: s.ColumnBool(8),
|
||||
Activations: trimActivations([]horse.Activation{
|
||||
{
|
||||
Precondition: s.ColumnText(9),
|
||||
Condition: s.ColumnText(10),
|
||||
Duration: horse.TenThousandths(s.ColumnInt(11)),
|
||||
DurScale: horse.DurScale(s.ColumnInt(12)),
|
||||
Cooldown: horse.TenThousandths(s.ColumnInt(13)),
|
||||
Abilities: trimAbilities([]horse.Ability{
|
||||
{
|
||||
Type: horse.AbilityType(s.ColumnInt(14)),
|
||||
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(15)),
|
||||
Value: horse.TenThousandths(s.ColumnInt(16)),
|
||||
Target: horse.AbilityTarget(s.ColumnInt(17)),
|
||||
TargetValue: s.ColumnInt32(18),
|
||||
},
|
||||
{
|
||||
Type: horse.AbilityType(s.ColumnInt(19)),
|
||||
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(20)),
|
||||
Value: horse.TenThousandths(s.ColumnInt(21)),
|
||||
Target: horse.AbilityTarget(s.ColumnInt(22)),
|
||||
TargetValue: s.ColumnInt32(23),
|
||||
},
|
||||
{
|
||||
Type: horse.AbilityType(s.ColumnInt(24)),
|
||||
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(25)),
|
||||
Value: horse.TenThousandths(s.ColumnInt(26)),
|
||||
Target: horse.AbilityTarget(s.ColumnInt(27)),
|
||||
TargetValue: s.ColumnInt32(28),
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
Precondition: s.ColumnText(29),
|
||||
Condition: s.ColumnText(30),
|
||||
Duration: horse.TenThousandths(s.ColumnInt(31)),
|
||||
DurScale: horse.DurScale(s.ColumnInt(32)),
|
||||
Cooldown: horse.TenThousandths(s.ColumnInt(33)),
|
||||
Abilities: trimAbilities([]horse.Ability{
|
||||
{
|
||||
Type: horse.AbilityType(s.ColumnInt(34)),
|
||||
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(35)),
|
||||
Value: horse.TenThousandths(s.ColumnInt(36)),
|
||||
Target: horse.AbilityTarget(s.ColumnInt(37)),
|
||||
TargetValue: s.ColumnInt32(38),
|
||||
},
|
||||
{
|
||||
Type: horse.AbilityType(s.ColumnInt(39)),
|
||||
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(40)),
|
||||
Value: horse.TenThousandths(s.ColumnInt(41)),
|
||||
Target: horse.AbilityTarget(s.ColumnInt(42)),
|
||||
TargetValue: s.ColumnInt32(43),
|
||||
},
|
||||
{
|
||||
Type: horse.AbilityType(s.ColumnInt(44)),
|
||||
ValueUsage: horse.AbilityValueUsage(s.ColumnInt(45)),
|
||||
Value: horse.TenThousandths(s.ColumnInt(46)),
|
||||
Target: horse.AbilityTarget(s.ColumnInt(47)),
|
||||
TargetValue: s.ColumnInt32(48),
|
||||
},
|
||||
}),
|
||||
},
|
||||
}),
|
||||
UniqueOwner: s.ColumnText(52), // TODO(zeph): should be id, not name
|
||||
SPCost: s.ColumnInt(49),
|
||||
IconID: s.ColumnInt(53),
|
||||
}
|
||||
})
|
||||
races := load(ctx1, loadgroup, db, "races", raceSQL, func(s *sqlite.Stmt) horse.Race {
|
||||
return horse.Race{
|
||||
ID: horse.RaceID(s.ColumnInt(0)),
|
||||
Name: s.ColumnText(1),
|
||||
// TODO(zeph): grade
|
||||
Thumbnail: s.ColumnInt(3),
|
||||
Primary: horse.RaceID(s.ColumnInt(4)),
|
||||
}
|
||||
})
|
||||
saddles := load(ctx1, loadgroup, db, "saddles", saddleSQL, func(s *sqlite.Stmt) horse.Saddle {
|
||||
return horse.Saddle{
|
||||
ID: horse.SaddleID(s.ColumnInt(0)),
|
||||
Name: s.ColumnText(1),
|
||||
Races: trimZeros(
|
||||
horse.RaceID(s.ColumnInt(2)),
|
||||
horse.RaceID(s.ColumnInt(3)),
|
||||
horse.RaceID(s.ColumnInt(4)),
|
||||
),
|
||||
Type: horse.SaddleType(s.ColumnInt(5)),
|
||||
Primary: horse.SaddleID(s.ColumnInt(6)),
|
||||
}
|
||||
})
|
||||
scenarios := load(ctx1, loadgroup, db, "scenarios", scenarioSQL, func(s *sqlite.Stmt) horse.Scenario {
|
||||
return horse.Scenario{
|
||||
ID: horse.ScenarioID(s.ColumnInt(0)),
|
||||
Name: s.ColumnText(1),
|
||||
Title: s.ColumnText(2),
|
||||
}
|
||||
})
|
||||
sparks := load(ctx1, loadgroup, db, "sparks", sparkSQL, func(s *sqlite.Stmt) horse.Spark {
|
||||
return horse.Spark{
|
||||
ID: horse.SparkID(s.ColumnInt(0)),
|
||||
Name: s.ColumnText(1),
|
||||
Description: s.ColumnText(2),
|
||||
Group: horse.SparkGroupID(s.ColumnInt(3)),
|
||||
Rarity: horse.SparkRarity(s.ColumnInt(4)),
|
||||
Type: horse.SparkType(s.ColumnInt(5)),
|
||||
// Effects filled in later.
|
||||
}
|
||||
})
|
||||
sparkeffs := load(ctx1, loadgroup, db, "spark effects", sparkEffectSQL, func(s *sqlite.Stmt) SparkEffImm {
|
||||
return SparkEffImm{
|
||||
Group: horse.SparkGroupID(s.ColumnInt(0)),
|
||||
Effect: s.ColumnInt(1),
|
||||
Target: horse.SparkTarget(s.ColumnInt(2)),
|
||||
Value1: s.ColumnInt32(3),
|
||||
Value2: s.ColumnInt32(4),
|
||||
}
|
||||
})
|
||||
|
||||
if err := os.MkdirAll(filepath.Join(out, region), 0775); err != nil {
|
||||
slog.Error("create output dir", slog.Any("err", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
writegroup, ctx2 := errgroup.WithContext(ctx)
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "character.json", charas) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "affinity.json", aff) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "uma.json", umas) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "skill-group.json", sg) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "skill.json", skills) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "race.json", races) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "saddle.json", saddles) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "scenario.json", scenarios) })
|
||||
writegroup.Go(func() error { return write(ctx2, out, region, "spark.json", mergesparks(sparks, sparkeffs)) })
|
||||
if err := writegroup.Wait(); err != nil {
|
||||
slog.ErrorContext(ctx, "write", slog.Any("err", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
slog.InfoContext(ctx, "done")
|
||||
}
|
||||
|
||||
var (
|
||||
//go:embed sql/character.sql
|
||||
characterSQL string
|
||||
//go:embed sql/affinity.sql
|
||||
affinitySQL string
|
||||
//go:embed sql/uma.sql
|
||||
umaSQL string
|
||||
//go:embed sql/skill-group.sql
|
||||
skillGroupSQL string
|
||||
//go:embed sql/skill.sql
|
||||
skillSQL string
|
||||
//go:embed sql/race.sql
|
||||
raceSQL string
|
||||
//go:embed sql/saddle.sql
|
||||
saddleSQL string
|
||||
//go:embed sql/scenario.sql
|
||||
scenarioSQL string
|
||||
//go:embed sql/spark.sql
|
||||
sparkSQL string
|
||||
//go:embed sql/spark-effect.sql
|
||||
sparkEffectSQL string
|
||||
)
|
||||
|
||||
func load[T any](ctx context.Context, group *errgroup.Group, db *sqlitex.Pool, kind, sql string, row func(*sqlite.Stmt) T) func() ([]T, error) {
|
||||
slog.InfoContext(ctx, "load", slog.String("kind", kind))
|
||||
var r []T
|
||||
group.Go(func() error {
|
||||
conn, err := db.Take(ctx)
|
||||
defer db.Put(conn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("couldn't get connection for %s: %w", kind, err)
|
||||
}
|
||||
stmt, _, err := conn.PrepareTransient(sql)
|
||||
if err != nil {
|
||||
return fmt.Errorf("couldn't prepare statement for %s: %w", kind, err)
|
||||
}
|
||||
|
||||
for {
|
||||
ok, err := stmt.Step()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error stepping %s: %w", kind, err)
|
||||
}
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
r = append(r, row(stmt))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return func() ([]T, error) {
|
||||
err := group.Wait()
|
||||
if err == context.Canceled {
|
||||
// After the first wait, all future ones return context.Canceled.
|
||||
// We want to be able to wait any number of times, so hide it.
|
||||
err = nil
|
||||
}
|
||||
return r, err
|
||||
}
|
||||
}
|
||||
|
||||
func write[T any](ctx context.Context, out, region, name string, v func() (T, error)) error {
|
||||
p := filepath.Join(out, region, name)
|
||||
r, err := v()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
slog.InfoContext(ctx, "write", slog.String("path", p))
|
||||
f, err := os.Create(p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
w := bufio.NewWriter(f)
|
||||
enc := json.NewEncoder(w)
|
||||
enc.SetEscapeHTML(false)
|
||||
enc.SetIndent("", "\t")
|
||||
err = enc.Encode(r)
|
||||
err = errors.Join(err, w.Flush())
|
||||
slog.InfoContext(ctx, "marshaled", slog.String("path", p))
|
||||
return err
|
||||
}
|
||||
|
||||
func mergesparks(sparks func() ([]horse.Spark, error), effs func() ([]SparkEffImm, error)) func() ([]horse.Spark, error) {
|
||||
return func() ([]horse.Spark, error) {
|
||||
sp, err := sparks()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ef, err := effs()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Spark effects are sorted by group ID, but groups apply to multiple
|
||||
// sparks, and we don't rely on sparks and groups being in the same order.
|
||||
// It is possible to merge in linear time, but not worth the effort:
|
||||
// n log n is fine since this is an AOT step.
|
||||
for i := range sp {
|
||||
k, ok := slices.BinarySearchFunc(ef, sp[i].Group, func(e SparkEffImm, v horse.SparkGroupID) int { return cmp.Compare(e.Group, v) })
|
||||
if !ok {
|
||||
panic(fmt.Errorf("mergesparks: no spark group for %+v", &sp[i]))
|
||||
}
|
||||
// Back up to the first effect in the group.
|
||||
for k > 0 && ef[k-1].Group == sp[i].Group {
|
||||
k--
|
||||
}
|
||||
// Map effect IDs to the lists of their effects.
|
||||
m := make(map[int][]horse.SparkEffect)
|
||||
for _, e := range ef[k:] {
|
||||
if e.Group != sp[i].Group {
|
||||
// Done with this group.
|
||||
break
|
||||
}
|
||||
m[e.Effect] = append(m[e.Effect], horse.SparkEffect{Target: e.Target, Value1: e.Value1, Value2: e.Value2})
|
||||
}
|
||||
// Now get effects in order.
|
||||
keys := slices.Sorted(maps.Keys(m))
|
||||
sp[i].Effects = make([][]horse.SparkEffect, 0, len(keys))
|
||||
for _, key := range keys {
|
||||
sp[i].Effects = append(sp[i].Effects, m[key])
|
||||
}
|
||||
}
|
||||
return sp, nil
|
||||
}
|
||||
}
|
||||
|
||||
type SparkEffImm struct {
|
||||
Group horse.SparkGroupID
|
||||
Effect int
|
||||
Target horse.SparkTarget
|
||||
Value1 int32
|
||||
Value2 int32
|
||||
}
|
||||
|
||||
func trimAbilities(s []horse.Ability) []horse.Ability {
|
||||
for len(s) > 0 && s[len(s)-1].Type == 0 {
|
||||
s = s[:len(s)-1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func trimActivations(s []horse.Activation) []horse.Activation {
|
||||
for len(s) > 0 && s[len(s)-1].Condition == "" {
|
||||
s = s[:len(s)-1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func trimZeros[T comparable](s ...T) []T {
|
||||
var zero T
|
||||
for len(s) > 0 && s[len(s)-1] == zero {
|
||||
s = s[:len(s)-1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
60
cmd/horsegen/sql/affinity.sql
Normal file
60
cmd/horsegen/sql/affinity.sql
Normal file
@@ -0,0 +1,60 @@
|
||||
WITH pairs AS (
|
||||
SELECT
|
||||
a.id AS id_a,
|
||||
b.id AS id_b
|
||||
FROM chara_data a
|
||||
JOIN chara_data b ON a.id < b.id
|
||||
-- Exclude characters who have no succession relations defined.
|
||||
WHERE a.id IN (SELECT chara_id FROM succession_relation_member)
|
||||
AND b.id IN (SELECT chara_id FROM succession_relation_member)
|
||||
), trios AS (
|
||||
SELECT
|
||||
a.id AS id_a,
|
||||
b.id AS id_b,
|
||||
c.id AS id_c
|
||||
FROM chara_data a
|
||||
JOIN chara_data b ON a.id < b.id
|
||||
JOIN chara_data c ON a.id < c.id AND b.id < c.id
|
||||
-- Exclude characters who have no succession relations defined.
|
||||
WHERE a.id IN (SELECT chara_id FROM succession_relation_member)
|
||||
AND b.id IN (SELECT chara_id FROM succession_relation_member)
|
||||
AND c.id IN (SELECT chara_id FROM succession_relation_member)
|
||||
), pair_relations AS (
|
||||
SELECT
|
||||
ra.relation_type,
|
||||
ra.chara_id AS id_a,
|
||||
rb.chara_id AS id_b
|
||||
FROM succession_relation_member ra
|
||||
JOIN succession_relation_member rb ON ra.relation_type = rb.relation_type
|
||||
), trio_relations AS (
|
||||
SELECT
|
||||
ra.relation_type,
|
||||
ra.chara_id AS id_a,
|
||||
rb.chara_id AS id_b,
|
||||
rc.chara_id AS id_c
|
||||
FROM succession_relation_member ra
|
||||
JOIN succession_relation_member rb ON ra.relation_type = rb.relation_type
|
||||
JOIN succession_relation_member rc ON ra.relation_type = rc.relation_type
|
||||
), affinity AS (
|
||||
SELECT
|
||||
pairs.*,
|
||||
0 AS id_c,
|
||||
SUM(IFNULL(relation_point, 0)) AS base_affinity
|
||||
FROM pairs
|
||||
LEFT JOIN pair_relations rp ON pairs.id_a = rp.id_a AND pairs.id_b = rp.id_b
|
||||
LEFT JOIN succession_relation sr ON rp.relation_type = sr.relation_type
|
||||
GROUP BY pairs.id_a, pairs.id_b
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
trios.*,
|
||||
SUM(IFNULL(relation_point, 0)) AS base_affinity
|
||||
FROM trios
|
||||
LEFT JOIN trio_relations rt ON trios.id_a = rt.id_a AND trios.id_b = rt.id_b AND trios.id_c = rt.id_c
|
||||
LEFT JOIN succession_relation sr ON rt.relation_type = sr.relation_type
|
||||
GROUP BY trios.id_a, trios.id_b, trios.id_c
|
||||
)
|
||||
SELECT * FROM affinity
|
||||
WHERE base_affinity != 0
|
||||
ORDER BY id_a, id_b, id_c
|
||||
9
cmd/horsegen/sql/character.sql
Normal file
9
cmd/horsegen/sql/character.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
SELECT
|
||||
"index" AS "id",
|
||||
"text" AS "name",
|
||||
ROW_NUMBER() OVER (ORDER BY "index") - 1 AS "index"
|
||||
FROM text_data
|
||||
WHERE category = 6 AND "index" BETWEEN 1000 AND 1999
|
||||
-- Exclude characters who have no succession relations defined.
|
||||
AND "index" IN (SELECT chara_id FROM succession_relation_member)
|
||||
ORDER BY "id"
|
||||
14
cmd/horsegen/sql/race.sql
Normal file
14
cmd/horsegen/sql/race.sql
Normal file
@@ -0,0 +1,14 @@
|
||||
WITH race_names AS (
|
||||
SELECT "index" AS id, "text" AS name FROM text_data WHERE category = 33
|
||||
)
|
||||
SELECT
|
||||
race.id,
|
||||
race_names.name,
|
||||
race.grade,
|
||||
race.thumbnail_id,
|
||||
MIN(race.id) OVER (PARTITION BY race_names.name) AS "primary",
|
||||
ROW_NUMBER() OVER (PARTITION BY race_names.name ORDER BY race.id) - 1 AS "alternate"
|
||||
FROM race
|
||||
JOIN race_names ON race.id = race_names.id
|
||||
WHERE race."group" = 1
|
||||
ORDER BY race.id
|
||||
20
cmd/horsegen/sql/saddle.sql
Normal file
20
cmd/horsegen/sql/saddle.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
WITH saddle_names AS (
|
||||
SELECT "index" AS id, "text" AS name
|
||||
FROM text_data
|
||||
WHERE category = 111
|
||||
)
|
||||
SELECT
|
||||
s.id,
|
||||
n.name,
|
||||
ri1.id AS race1,
|
||||
IFNULL(ri2.id, 0) AS race2,
|
||||
IFNULL(ri3.id, 0) AS race3,
|
||||
s.win_saddle_type,
|
||||
MIN(s.id) OVER (PARTITION BY n.name) AS "primary",
|
||||
ROW_NUMBER() OVER (PARTITION BY n.name ORDER BY s.id) - 1 AS "alternate"
|
||||
FROM single_mode_wins_saddle s
|
||||
JOIN race_instance ri1 ON s.race_instance_id_1 = ri1.id
|
||||
LEFT JOIN race_instance ri2 ON s.race_instance_id_2 = ri2.id
|
||||
LEFT JOIN race_instance ri3 ON s.race_instance_id_3 = ri3.id
|
||||
LEFT JOIN saddle_names n ON s.id = n.id
|
||||
ORDER BY s.id
|
||||
17
cmd/horsegen/sql/scenario.sql
Normal file
17
cmd/horsegen/sql/scenario.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
WITH scenario_name AS (
|
||||
SELECT "index" AS id, "text" AS name
|
||||
FROM text_data
|
||||
WHERE category = 237
|
||||
), scenario_title AS (
|
||||
SELECT "index" AS id, "text" AS title
|
||||
FROM text_data
|
||||
WHERE category = 119
|
||||
)
|
||||
SELECT
|
||||
sc.id,
|
||||
n.name,
|
||||
t.title
|
||||
FROM single_mode_scenario sc
|
||||
JOIN scenario_name n ON sc.id = n.id
|
||||
JOIN scenario_title t ON sc.id = t.id
|
||||
ORDER BY sc.id
|
||||
15
cmd/horsegen/sql/skill-group.sql
Normal file
15
cmd/horsegen/sql/skill-group.sql
Normal file
@@ -0,0 +1,15 @@
|
||||
WITH skill_groups AS (
|
||||
SELECT DISTINCT group_id FROM skill_data
|
||||
)
|
||||
SELECT
|
||||
g.group_id,
|
||||
IFNULL(s1.id, 0) AS skill1,
|
||||
IFNULL(s2.id, 0) AS skill2,
|
||||
IFNULL(s3.id, 0) AS skill3,
|
||||
IFNULL(m1.id, 0) AS skill_bad
|
||||
FROM skill_groups g
|
||||
LEFT JOIN skill_data s1 ON g.group_id = s1.group_id AND s1.group_rate = 1
|
||||
LEFT JOIN skill_data s2 ON g.group_id = s2.group_id AND s2.group_rate = 2
|
||||
LEFT JOIN skill_data s3 ON g.group_id = s3.group_id AND s3.group_rate = 3
|
||||
LEFT JOIN skill_data m1 ON g.group_id = m1.group_id AND m1.group_rate = -1
|
||||
ORDER BY g.group_id
|
||||
98
cmd/horsegen/sql/skill.sql
Normal file
98
cmd/horsegen/sql/skill.sql
Normal file
@@ -0,0 +1,98 @@
|
||||
WITH skill_names AS (
|
||||
SELECT
|
||||
n."index" AS "id",
|
||||
n."text" AS "name",
|
||||
d."text" AS "description"
|
||||
FROM text_data n
|
||||
JOIN text_data d ON n."index" = d."index" AND n."category" = 47 AND d."category" = 48
|
||||
), skill_groups AS (
|
||||
SELECT
|
||||
group_id,
|
||||
name
|
||||
FROM skill_data d
|
||||
JOIN skill_names n ON d.id = n.id
|
||||
WHERE group_rate = 1
|
||||
), card_name AS (
|
||||
SELECT
|
||||
"index" AS "id",
|
||||
"text" AS "name"
|
||||
FROM text_data n
|
||||
WHERE category = 4
|
||||
), card_unique AS (
|
||||
SELECT DISTINCT
|
||||
ss.skill_id1 AS unique_id,
|
||||
card_name.id AS owner_id,
|
||||
card_name.name
|
||||
FROM card_data card
|
||||
JOIN card_name ON card.id = card_name.id
|
||||
JOIN card_rarity_data rd ON card.id = rd.card_id
|
||||
JOIN skill_set ss ON rd.skill_set = ss.id
|
||||
)
|
||||
SELECT
|
||||
d.id,
|
||||
n.name,
|
||||
n.description,
|
||||
IIF(d.unique_skill_id_1 = 0, d.group_id, ud.group_id) AS group_id,
|
||||
CASE
|
||||
WHEN g.name IS NOT NULL THEN g.name
|
||||
WHEN d.unique_skill_id_1 != 0 THEN n.name
|
||||
ELSE ''
|
||||
END AS group_name,
|
||||
d.rarity,
|
||||
d.group_rate,
|
||||
d.grade_value,
|
||||
d.activate_lot,
|
||||
d.precondition_1,
|
||||
d.condition_1,
|
||||
d.float_ability_time_1,
|
||||
d.ability_time_usage_1,
|
||||
d.float_cooldown_time_1,
|
||||
d.ability_type_1_1,
|
||||
d.ability_value_usage_1_1,
|
||||
d.float_ability_value_1_1,
|
||||
d.target_type_1_1,
|
||||
d.target_value_1_1,
|
||||
d.ability_type_1_2,
|
||||
d.ability_value_usage_1_2,
|
||||
d.float_ability_value_1_2,
|
||||
d.target_type_1_2,
|
||||
d.target_value_1_2,
|
||||
d.ability_type_1_3,
|
||||
d.ability_value_usage_1_3,
|
||||
d.float_ability_value_1_3,
|
||||
d.target_type_1_3,
|
||||
d.target_value_1_3,
|
||||
d.precondition_2,
|
||||
d.condition_2,
|
||||
d.float_ability_time_2,
|
||||
d.ability_time_usage_2,
|
||||
d.float_cooldown_time_2,
|
||||
d.ability_type_2_1,
|
||||
d.ability_value_usage_2_1,
|
||||
d.float_ability_value_2_1,
|
||||
d.target_type_2_1,
|
||||
d.target_value_2_1,
|
||||
d.ability_type_2_2,
|
||||
d.ability_value_usage_2_2,
|
||||
d.float_ability_value_2_2,
|
||||
d.target_type_2_2,
|
||||
d.target_value_2_2,
|
||||
d.ability_type_2_3,
|
||||
d.ability_value_usage_2_3,
|
||||
d.float_ability_value_2_3,
|
||||
d.target_type_2_3,
|
||||
d.target_value_2_3,
|
||||
IFNULL(p.need_skill_point, 0) AS sp_cost,
|
||||
d.unique_skill_id_1,
|
||||
COALESCE(u.owner_id, iu.owner_id, 0) AS unique_owner_id,
|
||||
COALESCE(u.name, iu.name, '') AS unique_owner,
|
||||
d.icon_id,
|
||||
ROW_NUMBER() OVER (ORDER BY d.id) - 1 AS "index"
|
||||
FROM skill_data d
|
||||
JOIN skill_names n ON d.id = n.id
|
||||
LEFT JOIN skill_data ud ON d.unique_skill_id_1 = ud.id
|
||||
LEFT JOIN skill_groups g ON d.group_id = g.group_id
|
||||
LEFT JOIN single_mode_skill_need_point p ON d.id = p.id
|
||||
LEFT JOIN card_unique u ON d.id = u.unique_id
|
||||
LEFT JOIN card_unique iu ON d.unique_skill_id_1 = iu.unique_id
|
||||
ORDER BY d.id
|
||||
9
cmd/horsegen/sql/spark-effect.sql
Normal file
9
cmd/horsegen/sql/spark-effect.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
SELECT
|
||||
factor_group_id,
|
||||
effect_id,
|
||||
target_type,
|
||||
value_1,
|
||||
value_2
|
||||
FROM succession_factor_effect
|
||||
WHERE factor_group_id NOT IN (40001) -- exclude Carnival Bonus
|
||||
ORDER BY factor_group_id, effect_id, id
|
||||
20
cmd/horsegen/sql/spark.sql
Normal file
20
cmd/horsegen/sql/spark.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
WITH spark AS (
|
||||
SELECT
|
||||
n."index" AS "id",
|
||||
n."text" AS "name",
|
||||
d."text" AS "description"
|
||||
FROM text_data n
|
||||
LEFT JOIN text_data d ON n."index" = d."index" AND d."category" = 172
|
||||
WHERE n.category = 147
|
||||
)
|
||||
SELECT
|
||||
sf.factor_id,
|
||||
spark.name,
|
||||
spark.description,
|
||||
sf.factor_group_id,
|
||||
sf.rarity,
|
||||
sf.factor_type
|
||||
FROM spark
|
||||
JOIN succession_factor sf ON spark.id = sf.factor_id
|
||||
WHERE sf.factor_type != 7 -- exclude Carnival Bonus
|
||||
ORDER BY sf.factor_id
|
||||
59
cmd/horsegen/sql/uma.sql
Normal file
59
cmd/horsegen/sql/uma.sql
Normal file
@@ -0,0 +1,59 @@
|
||||
WITH uma_name AS (
|
||||
SELECT "index" AS id, "text" AS name
|
||||
FROM text_data
|
||||
WHERE category = 4
|
||||
), uma_variant AS (
|
||||
SELECT "index" AS id, "text" AS variant
|
||||
FROM text_data
|
||||
WHERE category = 5
|
||||
), chara_name AS (
|
||||
SELECT "index" AS id, "text" AS name
|
||||
FROM text_data
|
||||
WHERE category = 6
|
||||
), skills AS (
|
||||
SELECT
|
||||
uma.id,
|
||||
s.skill_id,
|
||||
s.need_rank,
|
||||
ROW_NUMBER() OVER (PARTITION BY s.available_skill_set_id, s.need_rank) AS idx
|
||||
FROM card_data uma
|
||||
LEFT JOIN available_skill_set s ON uma.available_skill_set_id = s.available_skill_set_id
|
||||
)
|
||||
SELECT
|
||||
uma.card_id,
|
||||
card_data.chara_id,
|
||||
n.name,
|
||||
v.variant,
|
||||
c.name AS chara_name,
|
||||
uma.proper_distance_short,
|
||||
uma.proper_distance_mile,
|
||||
uma.proper_distance_middle,
|
||||
uma.proper_distance_long,
|
||||
uma.proper_running_style_nige,
|
||||
uma.proper_running_style_senko,
|
||||
uma.proper_running_style_sashi,
|
||||
uma.proper_running_style_oikomi,
|
||||
uma.proper_ground_turf,
|
||||
uma.proper_ground_dirt,
|
||||
su.skill_id1 AS unique_skill,
|
||||
s1.skill_id AS skill1,
|
||||
s2.skill_id AS skill2,
|
||||
s3.skill_id AS skill3,
|
||||
sp2.skill_id AS skill_pl2,
|
||||
sp3.skill_id AS skill_pl3,
|
||||
sp4.skill_id AS skill_pl4,
|
||||
sp5.skill_id AS skill_pl5
|
||||
FROM card_data
|
||||
JOIN card_rarity_data uma ON card_data.id = uma.card_id
|
||||
JOIN chara_name c ON card_data.chara_id = c.id
|
||||
JOIN skill_set su ON uma.skill_set = su.id
|
||||
JOIN skills s1 ON uma.card_id = s1.id AND s1.need_rank = 0 AND s1.idx = 1
|
||||
JOIN skills s2 ON uma.card_id = s2.id AND s2.need_rank = 0 AND s2.idx = 2
|
||||
JOIN skills s3 ON uma.card_id = s3.id AND s3.need_rank = 0 AND s3.idx = 3
|
||||
JOIN skills sp2 ON uma.card_id = sp2.id AND sp2.need_rank = 2
|
||||
JOIN skills sp3 ON uma.card_id = sp3.id AND sp3.need_rank = 3
|
||||
JOIN skills sp4 ON uma.card_id = sp4.id AND sp4.need_rank = 4
|
||||
JOIN skills sp5 ON uma.card_id = sp5.id AND sp5.need_rank = 5
|
||||
LEFT JOIN uma_name n ON uma.card_id = n.id
|
||||
LEFT JOIN uma_variant v ON uma.card_id = v.id
|
||||
WHERE uma.rarity = 5
|
||||
Reference in New Issue
Block a user