Skip to content

Commit

Permalink
Merge pull request #5 from nevir/nevir/file-by-file
Browse files Browse the repository at this point in the history
Add a --split output mode (one file per asset)
  • Loading branch information
Vilsol authored Apr 5, 2020
2 parents e8ecc4b + 3c73c70 commit 2471d81
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 45 deletions.
15 changes: 7 additions & 8 deletions cmd/class-tree.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@ package cmd
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"

"github.com/Vilsol/ue4pak/parser"
"github.com/fatih/color"
"github.com/gobwas/glob"
"github.com/spf13/cobra"
"io/ioutil"
"os"
"path/filepath"
)

func init() {
Expand Down Expand Up @@ -55,16 +56,14 @@ var classTreeCmd = &cobra.Command{
}

p := parser.NewParser(file)
things, _ := p.ProcessPak(nil)

for _, thing := range things {
for _, export := range thing.Exports {
p.ProcessPak(nil, func(_ string, entry *parser.PakEntrySet, _ *parser.PakFile) {
for _, export := range entry.Exports {
open.WriteString(fmt.Sprintf("Class: %s%s\n", trim(export.Export.ObjectName), BuildClassTree(export.Export.ClassIndex)))
open.WriteString(fmt.Sprintf("Super: %s%s\n", trim(export.Export.ObjectName), BuildSuperTree(export.Export.SuperIndex)))
open.WriteString(fmt.Sprintf("Templ: %s%s\n", trim(export.Export.ObjectName), BuildTemplateTree(export.Export.TemplateIndex)))
open.WriteString(fmt.Sprintf("Outer: %s%s\n", trim(export.Export.ObjectName), BuildOuterTree(export.Export.OuterIndex)))
}
}
})

// indent, _ := json.MarshalIndent(concreteRecipe.Exports, "", " ")
// fmt.Println(string(indent))
Expand Down
68 changes: 48 additions & 20 deletions cmd/extract.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@ package cmd
import (
"encoding/json"
"fmt"
"github.com/Vilsol/ue4pak/parser"
"github.com/fatih/color"
"io/ioutil"
"os"
"path/filepath"

"github.com/Vilsol/ue4pak/parser"
"github.com/fatih/color"
log "github.com/sirupsen/logrus"

"github.com/gobwas/glob"
"github.com/spf13/cobra"
)
Expand All @@ -18,7 +20,8 @@ var assets *[]string
func init() {
assets = extractCmd.Flags().StringSliceP("assets", "a", []string{}, "Comma-separated list of asset paths to extract. (supports glob) (required)")
format = extractCmd.Flags().StringP("format", "f", "json", "Output format type")
output = extractCmd.Flags().StringP("output", "o", "extracted.json", "Output file")
output = extractCmd.Flags().StringP("output", "o", "extracted.json", "Output file (or directory if --split)")
split = extractCmd.Flags().Bool("split", false, "Whether output should be split into a file per asset")
pretty = extractCmd.Flags().Bool("pretty", false, "Whether to output in a pretty format")

extractCmd.MarkFlagRequired("assets")
Expand Down Expand Up @@ -54,40 +57,65 @@ var extractCmd = &cobra.Command{
panic(err)
}

p := parser.NewParser(file)
entrySets, _ := p.ProcessPak(func(name string) bool {
shouldProcess := func(name string) bool {
for _, pattern := range patterns {
if pattern.Match(name) {
return true
}
}

return false
}

p := parser.NewParser(file)
p.ProcessPak(shouldProcess, func(name string, entry *parser.PakEntrySet, _ *parser.PakFile) {
if *split {
destination := filepath.Join(*output, name+"."+*format)
err := os.MkdirAll(filepath.Dir(destination), 0755)
if err != nil {
panic(err)
}

log.Infof("Writing Result: %s\n", destination)
resultBytes := formatResults(entry)
err = ioutil.WriteFile(destination, resultBytes, 0644)
if err != nil {
panic(err)
}
} else {
results = append(results, entry)
}
})
}

results = append(results, entrySets...)
if !*split {
resultBytes := formatResults(results)
err = ioutil.WriteFile(*output, resultBytes, 0644)
}

var resultBytes []byte
if err != nil {
panic(err)
}
},
}

if *format == "json" {
if *pretty {
resultBytes, err = json.MarshalIndent(results, "", " ")
} else {
resultBytes, err = json.Marshal(results)
}
func formatResults(result interface{}) []byte {
var resultBytes []byte
var err error

if err != nil {
panic(err)
}
if *format == "json" {
if *pretty {
resultBytes, err = json.MarshalIndent(result, "", " ")
} else {
panic("Unknown output format: " + *format)
resultBytes, err = json.Marshal(result)
}

err = ioutil.WriteFile(*output, resultBytes, 0644)

if err != nil {
panic(err)
}
},
} else {
panic("Unknown output format: " + *format)
}

return resultBytes
}
3 changes: 2 additions & 1 deletion cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@ package cmd

import (
"fmt"
"os"

_ "github.com/Vilsol/ue4pak/parser/games/satisfactory"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"os"
)

var PakFile string
Expand Down
1 change: 1 addition & 0 deletions cmd/shared.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ package cmd

var format *string
var output *string
var split *bool
var pretty *bool
7 changes: 4 additions & 3 deletions cmd/test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@ package cmd

import (
"fmt"
"github.com/Vilsol/ue4pak/parser"
"github.com/fatih/color"
"os"
"path/filepath"
"strings"

"github.com/Vilsol/ue4pak/parser"
"github.com/fatih/color"

"github.com/spf13/cobra"
)

Expand Down Expand Up @@ -37,7 +38,7 @@ var testCmd = &cobra.Command{
}

p := parser.NewParser(file)
p.ProcessPak(nil)
p.ProcessPak(nil, nil)
/*
f, err := os.OpenFile("dump.txt", os.O_WRONLY | os.O_CREATE, 0644)
fmt.Println(err)
Expand Down
25 changes: 12 additions & 13 deletions parser/parser_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ package parser
import (
"encoding/binary"
"fmt"
"math"
"strings"

"github.com/Vilsol/ue4pak/utils"
log "github.com/sirupsen/logrus"
"github.com/spf13/viper"
"math"
"strings"
)

type PakParser struct {
Expand Down Expand Up @@ -35,11 +36,9 @@ func NewParser(reader PakReader) *PakParser {
}
}

func (parser *PakParser) ProcessPak(parseFile func(string) bool) ([]*PakEntrySet, *PakFile) {
func (parser *PakParser) ProcessPak(parseFile func(string) bool, handleEntry func(string, *PakEntrySet, *PakFile)) {
pak := parser.Parse()

results := make([]*PakEntrySet, 0)

summaries := make(map[string]*FPackageFileSummary, 0)

// First pass, parse summaries
Expand All @@ -54,7 +53,7 @@ func (parser *PakParser) ProcessPak(parseFile func(string) bool) ([]*PakEntrySet

if strings.HasSuffix(trimmed, "uasset") {
offset := record.FileOffset + pak.Footer.HeaderSize()
log.Infof("Reading Record: %d [%x-%x]: %s\n", j, offset, offset+record.FileSize, trimmed)
log.Infof("Reading Summary: %d [%x-%x]: %s\n", j, offset, offset+record.FileSize, trimmed)
summaries[trimmed[0:strings.Index(trimmed, ".uasset")]] = record.ReadUAsset(pak, parser)
summaries[trimmed[0:strings.Index(trimmed, ".uasset")]].Record = record
}
Expand Down Expand Up @@ -95,15 +94,15 @@ func (parser *PakParser) ProcessPak(parseFile func(string) bool) ([]*PakEntrySet
i++
}

results = append(results, &PakEntrySet{
ExportRecord: record,
Summary: summary,
Exports: exportSet,
})
if handleEntry != nil {
handleEntry(trimmed, &PakEntrySet{
ExportRecord: record,
Summary: summary,
Exports: exportSet,
}, pak)
}
}
}

return results, pak
}

func (parser *PakParser) Parse() *PakFile {
Expand Down

0 comments on commit 2471d81

Please sign in to comment.