diff --git a/parser/class.go b/parser/class.go new file mode 100644 index 0000000..372a14b --- /dev/null +++ b/parser/class.go @@ -0,0 +1,78 @@ +package parser + +import ( + log "github.com/sirupsen/logrus" + "strings" +) + +type ClassResolver func(parser *PakParser, export *FObjectExport, size int32, uAsset *FPackageFileSummary) interface{} + +var classResolvers = map[string]ClassResolver{ + "DataTable": func(parser *PakParser, export *FObjectExport, size int32, uAsset *FPackageFileSummary) interface{} { + return parser.ReadUDataTable(uAsset) + }, + "ObjectProperty": func(parser *PakParser, export *FObjectExport, size int32, uAsset *FPackageFileSummary) interface{} { + // TODO Figure out + parser.Read(24) + return parser.ReadFPackageIndex(uAsset.Imports, uAsset.Exports) + }, + "BoolProperty": func(parser *PakParser, export *FObjectExport, size int32, uAsset *FPackageFileSummary) interface{} { + // TODO Figure out + parser.Read(25) + return parser.Read(1)[0] != 0 + }, + "StructProperty": func(parser *PakParser, export *FObjectExport, size int32, uAsset *FPackageFileSummary) interface{} { + // TODO Figure out + parser.Read(24) + return parser.ReadFPackageIndex(uAsset.Imports, uAsset.Exports) + }, + "DelegateProperty": func(parser *PakParser, export *FObjectExport, size int32, uAsset *FPackageFileSummary) interface{} { + // TODO Figure out + parser.Read(24) + return parser.ReadFPackageIndex(uAsset.Imports, uAsset.Exports) + }, +} + +type ClassType struct { + Type string `json:"type"` + Value interface{} `json:"value"` +} + +func (parser *PakParser) ReadClass(export *FObjectExport, size int32, uAsset *FPackageFileSummary) (interface{}, bool) { + var className string + + if classNameTemp := export.TemplateIndex.ClassName(); classNameTemp != nil { + className = *classNameTemp + } else { + return nil, false + } + + trimmedType := strings.Trim(className, "\x00") + + resolver, ok := classResolvers[trimmedType] + + if !ok { + return nil, false + } + + if resolver != nil { + value := resolver(parser, export, size, uAsset) + + if value != nil { + return value, true + } + } + + // TODO Read types correctly + log.Warningf("Unread Class Type [%d]: %s", size, trimmedType) + // fmt.Println(utils.HexDump(data[offset:])) + if size > 0 { + parser.Read(size) + } + + return nil, true +} + +func RegisterClassResolver(classType string, resolver ClassResolver) { + classResolvers[classType] = resolver +} diff --git a/parser/class_data_table.go b/parser/class_data_table.go new file mode 100644 index 0000000..23960ca --- /dev/null +++ b/parser/class_data_table.go @@ -0,0 +1,24 @@ +package parser + +// https://github.com/SatisfactoryModdingUE/UnrealEngine/blob/4.22-CSS/Engine/Source/Runtime/Engine/Classes/Engine/DataTable.h#L56 +type UDataTable struct { + Values map[string][]*FPropertyTag `json:"values"` +} + +func (parser *PakParser) ReadUDataTable(uAsset *FPackageFileSummary) *UDataTable { + // Unknown + parser.Read(4) + + count := parser.ReadUint32() + + values := make(map[string][]*FPropertyTag) + + for i := uint32(0); i < count; i++ { + name := parser.ReadFName(uAsset.Names) + values[name] = parser.ReadFPropertyTagLoop(uAsset) + } + + return &UDataTable{ + Values: values, + } +} diff --git a/parser/parser.go b/parser/parser.go new file mode 100644 index 0000000..d29b6d5 --- /dev/null +++ b/parser/parser.go @@ -0,0 +1,102 @@ +package parser + +import ( + "fmt" + "github.com/spf13/viper" +) + +type PakParser struct { + reader PakReader + tracker *readTracker + preload []byte +} + +type readTracker struct { + child *readTracker + bytesRead int32 +} + +func (tracker *readTracker) Increment(n int32) { + tracker.bytesRead += n + + if tracker.child != nil { + tracker.child.Increment(n) + } +} + +func NewParser(reader PakReader) *PakParser { + return &PakParser{ + reader: reader, + } +} + +func (parser *PakParser) TrackRead() *readTracker { + parser.tracker = &readTracker{ + child: parser.tracker, + } + + return parser.tracker +} + +func (parser *PakParser) UnTrackRead() { + if parser.tracker != nil { + parser.tracker = parser.tracker.child + } +} + +func (parser *PakParser) Seek(offset int64, whence int) (ret int64, err error) { + parser.preload = nil + return parser.reader.Seek(offset, whence) +} + +func (parser *PakParser) Preload(n int32) { + if viper.GetBool("NoPreload") { + return + } + + buffer := make([]byte, n) + read, err := parser.reader.Read(buffer) + + if err != nil { + panic(err) + } + + if int32(read) < n { + panic(fmt.Sprintf("End of stream: %d < %d", read, n)) + } + + if parser.preload != nil && len(parser.preload) > 0 { + parser.preload = append(parser.preload, buffer...) + } else { + parser.preload = buffer + } +} + +func (parser *PakParser) Read(n int32) []byte { + toRead := n + buffer := make([]byte, toRead) + + if parser.preload != nil && len(parser.preload) > 0 { + copied := copy(buffer, parser.preload) + parser.preload = parser.preload[copied:] + toRead = toRead - int32(copied) + } + + if toRead > 0 { + read, err := parser.reader.Read(buffer[n-toRead:]) + + if err != nil { + panic(err) + } + + if int32(read) < toRead { + panic(fmt.Sprintf("End of stream: %d < %d", read, toRead)) + } + } + + if parser.tracker != nil { + parser.tracker.Increment(n) + } + + return buffer +} diff --git a/parser/parser_asset.go b/parser/parser_asset.go new file mode 100644 index 0000000..5882d36 --- /dev/null +++ b/parser/parser_asset.go @@ -0,0 +1,562 @@ +package parser + +import ( + log "github.com/sirupsen/logrus" + "strings" +) + +func (record *FPakEntry) ReadUAsset(pak *PakFile, parser *PakParser) *FPackageFileSummary { + // Skip UE4 pak header + // TODO Find out what's in the pak header + headerSize := int64(pak.Footer.HeaderSize()) + + parser.Seek(headerSize+int64(record.FileOffset), 0) + parser.Preload(int32(record.FileSize)) + + tag := parser.ReadInt32() + legacyFileVersion := parser.ReadInt32() + legacyUE3Version := parser.ReadInt32() + fileVersionUE4 := parser.ReadInt32() + fileVersionLicenseeUE4 := parser.ReadInt32() + + // TODO custom_version_container: Vec + parser.Read(4) + + totalHeaderSize := parser.ReadInt32() + folderName := parser.ReadString() + packageFlags := parser.ReadUint32() + nameCount := parser.ReadUint32() + nameOffset := parser.ReadInt32() + gatherableTextDataCount := parser.ReadInt32() + gatherableTextDataOffset := parser.ReadInt32() + exportCount := parser.ReadUint32() + exportOffset := parser.ReadInt32() + importCount := parser.ReadUint32() + importOffset := parser.ReadInt32() + dependsOffset := parser.ReadInt32() + stringAssetReferencesCount := parser.ReadInt32() + stringAssetReferencesOffset := parser.ReadInt32() + searchableNamesOffset := parser.ReadInt32() + thumbnailTableOffset := parser.ReadInt32() + guid := parser.ReadFGuid() + generationCount := parser.ReadUint32() + + generations := make([]*FGenerationInfo, generationCount) + for i := uint32(0); i < generationCount; i++ { + generations[i] = parser.ReadFGenerationInfo() + } + + savedByEngineVersion := parser.ReadFEngineVersion() + compatibleWithEngineVersion := parser.ReadFEngineVersion() + compressionFlags := parser.ReadUint32() + compressedChunkCount := parser.ReadUint32() + + compressedChunks := make([]*FCompressedChunk, compressedChunkCount) + for i := uint32(0); i < compressedChunkCount; i++ { + compressedChunks[i] = parser.ReadFCompressedChunk() + } + + packageSource := parser.ReadUint32() + additionalPackageCount := parser.ReadUint32() + + additionalPackagesToCook := make([]string, additionalPackageCount) + for i := uint32(0); i < additionalPackageCount; i++ { + additionalPackagesToCook[i] = parser.ReadString() + } + + assetRegistryDataOffset := parser.ReadInt32() + bulkDataStartOffset := parser.ReadInt32() + worldTileInfoDataOffset := parser.ReadInt32() + chunkCount := parser.ReadUint32() + + chunkIds := make([]int32, chunkCount) + for i := uint32(0); i < chunkCount; i++ { + chunkIds[i] = parser.ReadInt32() + } + + // TODO unknown bytes + parser.Read(4) + + preloadDependencyCount := parser.ReadInt32() + preloadDependencyOffset := parser.ReadInt32() + + names := make([]*FNameEntrySerialized, nameCount) + for i := uint32(0); i < nameCount; i++ { + names[i] = &FNameEntrySerialized{ + Name: parser.ReadString(), + NonCasePreservingHash: parser.ReadUint16(), + CasePreservingHash: parser.ReadUint16(), + } + } + + imports := make([]*FObjectImport, importCount) + for i := uint32(0); i < importCount; i++ { + imports[i] = &FObjectImport{ + ClassPackage: parser.ReadFName(names), + ClassName: parser.ReadFName(names), + OuterIndex: parser.ReadInt32(), + ObjectName: parser.ReadFName(names), + } + } + + exports := make([]*FObjectExport, exportCount) + for i := uint32(0); i < exportCount; i++ { + exports[i] = &FObjectExport{ + ClassIndex: parser.ReadFPackageIndex(imports, exports), + SuperIndex: parser.ReadFPackageIndex(imports, exports), + TemplateIndex: parser.ReadFPackageIndex(imports, exports), + OuterIndex: parser.ReadFPackageIndex(imports, exports), + ObjectName: parser.ReadFName(names), + Save: parser.ReadUint32(), + SerialSize: parser.ReadInt64(), + SerialOffset: parser.ReadInt64(), + ForcedExport: parser.ReadInt32() != 0, + NotForClient: parser.ReadInt32() != 0, + NotForServer: parser.ReadInt32() != 0, + PackageGuid: parser.ReadFGuid(), + PackageFlags: parser.ReadUint32(), + NotAlwaysLoadedForEditorGame: parser.ReadInt32() != 0, + IsAsset: parser.ReadInt32() != 0, + FirstExportDependency: parser.ReadInt32(), + SerializationBeforeSerializationDependencies: parser.ReadInt32() != 0, + CreateBeforeSerializationDependencies: parser.ReadInt32() != 0, + SerializationBeforeCreateDependencies: parser.ReadInt32() != 0, + CreateBeforeCreateDependencies: parser.ReadInt32() != 0, + } + } + + for _, objectImport := range imports { + objectImport.OuterPackage = parser.ReadFPackageIndexInt(objectImport.OuterIndex, imports, exports) + } + + // fmt.Println("UASSET LEFTOVERS:", len(fileData[offset:])) + // fmt.Println(utils.HexDump(fileData[offset:])) + + // TODO Bunch of unknown bytes at the end + + return &FPackageFileSummary{ + Tag: tag, + LegacyFileVersion: legacyFileVersion, + LegacyUE3Version: legacyUE3Version, + FileVersionUE4: fileVersionUE4, + FileVersionLicenseeUE4: fileVersionLicenseeUE4, + TotalHeaderSize: totalHeaderSize, + FolderName: folderName, + PackageFlags: packageFlags, + NameOffset: nameOffset, + GatherableTextDataCount: gatherableTextDataCount, + GatherableTextDataOffset: gatherableTextDataOffset, + ExportOffset: exportOffset, + ImportOffset: importOffset, + DependsOffset: dependsOffset, + StringAssetReferencesCount: stringAssetReferencesCount, + StringAssetReferencesOffset: stringAssetReferencesOffset, + SearchableNamesOffset: searchableNamesOffset, + ThumbnailTableOffset: thumbnailTableOffset, + GUID: guid, + Generations: generations, + SavedByEngineVersion: savedByEngineVersion, + CompatibleWithEngineVersion: compatibleWithEngineVersion, + CompressionFlags: compressionFlags, + CompressedChunks: compressedChunks, + PackageSource: packageSource, + AdditionalPackagesToCook: additionalPackagesToCook, + AssetRegistryDataOffset: assetRegistryDataOffset, + BulkDataStartOffset: bulkDataStartOffset, + WorldTileInfoDataOffset: worldTileInfoDataOffset, + ChunkIds: chunkIds, + PreloadDependencyCount: preloadDependencyCount, + PreloadDependencyOffset: preloadDependencyOffset, + Names: names, + Imports: imports, + Exports: exports, + } +} + +func (record *FPakEntry) ReadUExp(pak *PakFile, parser *PakParser, uAsset *FPackageFileSummary) map[*FObjectExport]*ExportData { + // Skip UE4 pak header + // TODO Find out what's in the pak header + headerSize := int64(pak.Footer.HeaderSize()) + + exports := make(map[*FObjectExport]*ExportData) + + for _, export := range uAsset.Exports { + offset := headerSize + int64(record.FileOffset) + (export.SerialOffset - int64(uAsset.TotalHeaderSize)) + log.Debugf("Reading export [%x]: %#v", offset, export.TemplateIndex.Reference) + parser.Seek(offset, 0) + + tracker := parser.TrackRead() + + properties := parser.ReadFPropertyTagLoop(uAsset) + + if int64(tracker.bytesRead) < export.SerialSize { + parser.Preload(int32(export.SerialSize - int64(tracker.bytesRead))) + } + + parser.UnTrackRead() + + var data interface{} + + if parser.preload != nil { + preloadSize := len(parser.preload) + if preloadSize > 4 { + var parsed bool + data, parsed = parser.ReadClass(export, int32(preloadSize), uAsset) + + if !parsed { + if className := export.TemplateIndex.ClassName(); className != nil { + log.Warningf("Unknown export class type (%s)[%d]: %s", strings.Trim(export.ObjectName, "\x00"), preloadSize, strings.Trim(*className, "\x00")) + } + } + } + } + + exports[export] = &ExportData{ + Properties: properties, + Data: data, + } + } + + return exports +} + +func (parser *PakParser) ReadFPropertyTag(uAsset *FPackageFileSummary, readData bool, depth int) *FPropertyTag { + name := parser.ReadFName(uAsset.Names) + + if strings.Trim(name, "\x00") == "None" { + return nil + } + + propertyType := parser.ReadFName(uAsset.Names) + size := parser.ReadInt32() + arrayIndex := parser.ReadInt32() + + log.Tracef("%sReading Property %s (%s)[%d]", d(depth), strings.Trim(name, "\x00"), strings.Trim(propertyType, "\x00"), size) + + var tagData interface{} + + switch strings.Trim(propertyType, "\x00") { + case "StructProperty": + tagData = &StructProperty{ + Type: parser.ReadFName(uAsset.Names), + Guid: parser.ReadFGuid(), + } + + log.Tracef("%sStructProperty Type: %s", d(depth), tagData.(*StructProperty).Type) + break + case "BoolProperty": + tagData = parser.Read(1)[0] != 0 + break + case "EnumProperty": + fallthrough + case "ByteProperty": + fallthrough + case "SetProperty": + fallthrough + case "ArrayProperty": + tagData = parser.ReadFName(uAsset.Names) + break + case "MapProperty": + tagData = &MapProperty{ + KeyType: parser.ReadFName(uAsset.Names), + ValueType: parser.ReadFName(uAsset.Names), + } + break + } + + hasGuid := parser.Read(1)[0] != 0 + + var propertyGuid *FGuid + + if hasGuid { + propertyGuid = parser.ReadFGuid() + } + + var tag interface{} + + if readData && size > 0 { + parser.Preload(size) + tracker := parser.TrackRead() + tag = parser.ReadTag(size, uAsset, propertyType, tagData, &name, depth) + + if tracker.bytesRead != size { + log.Warningf("%sProperty not read correctly %s (%s)[%#v]: %d read out of %d", + d(depth), + strings.Trim(name, "\x00"), + strings.Trim(propertyType, "\x00"), + tagData, + tracker.bytesRead, + size) + + if tracker.bytesRead > size { + log.Fatalf("More bytes read than available!") + } else { + parser.Read(size - tracker.bytesRead) + } + } + + parser.UnTrackRead() + } + + return &FPropertyTag{ + Name: name, + PropertyType: propertyType, + TagData: tagData, + Size: size, + ArrayIndex: arrayIndex, + PropertyGuid: propertyGuid, + Tag: tag, + } +} + +func (parser *PakParser) ReadTag(size int32, uAsset *FPackageFileSummary, propertyType string, tagData interface{}, name *string, depth int) interface{} { + var tag interface{} + switch strings.Trim(propertyType, "\x00") { + case "FloatProperty": + tag = parser.ReadFloat32() + break + case "ArrayProperty": + arrayTypes := strings.Trim(tagData.(string), "\x00") + valueCount := parser.ReadInt32() + + var innerTagData *FPropertyTag + + if arrayTypes == "StructProperty" { + innerTagData = parser.ReadFPropertyTag(uAsset, false, depth+1) + } + + values := make([]interface{}, valueCount) + for i := int32(0); i < valueCount; i++ { + switch arrayTypes { + case "SoftObjectProperty": + values[i] = &FSoftObjectPath{ + AssetPathName: parser.ReadFName(uAsset.Names), + SubPath: parser.ReadString(), + } + break + case "StructProperty": + log.Tracef("%sReading Array StructProperty: %s", d(depth), strings.Trim(innerTagData.TagData.(*StructProperty).Type, "\x00")) + values[i] = &ArrayStructProperty{ + InnerTagData: innerTagData, + Properties: parser.ReadTag(-1, uAsset, arrayTypes, innerTagData.TagData, nil, depth+1), + } + break + case "ObjectProperty": + values[i] = parser.ReadFPackageIndex(uAsset.Imports, uAsset.Exports) + break + case "BoolProperty": + values[i] = parser.Read(1)[0] != 0 + break + case "ByteProperty": + if (size-4)/valueCount == 1 { + values[i] = parser.Read(1)[0] + } else { + values[i] = parser.ReadFName(uAsset.Names) + } + break + case "NameProperty": + fallthrough + case "EnumProperty": + values[i] = parser.ReadFName(uAsset.Names) + break + case "IntProperty": + values[i] = parser.ReadInt32() + break + case "FloatProperty": + values[i] = parser.ReadFloat32() + break + case "UInt32Property": + values[i] = parser.ReadUint32() + break + case "TextProperty": + values[i] = parser.ReadFText() + break + case "StrProperty": + values[i] = parser.ReadString() + break + case "DelegateProperty": + values[i] = &FScriptDelegate{ + Object: parser.ReadInt32(), + Name: parser.ReadFName(uAsset.Names), + } + break + default: + panic("unknown array type: " + arrayTypes) + } + } + + tag = values + + if valueCount > 0 && arrayTypes == "StructProperty" && values[0].(*ArrayStructProperty).Properties == nil { + if size > 0 { + // Struct data was not processed + parser.Read(innerTagData.Size) + } + } + + break + case "StructProperty": + if tagData == nil { + log.Trace("%sReading Generic StructProperty", d(depth)) + } else { + log.Tracef("%sReading StructProperty: %s", d(depth), strings.Trim(tagData.(*StructProperty).Type, "\x00")) + + if structData, ok := tagData.(*StructProperty); ok { + result, success := parser.ReadStruct(structData, size, uAsset, depth) + + if success { + return &StructType{ + Type: structData.Type, + Value: result, + } + } + } + } + + properties := make([]*FPropertyTag, 0) + + for { + property := parser.ReadFPropertyTag(uAsset, true, depth+1) + + if property == nil { + break + } + + properties = append(properties, property) + } + + tag = properties + break + case "IntProperty": + tag = parser.ReadInt32() + break + case "Int8Property": + tag = int8(parser.Read(1)[0]) + break + case "ObjectProperty": + tag = parser.ReadFPackageIndex(uAsset.Imports, uAsset.Exports) + break + case "TextProperty": + tag = parser.ReadFText() + break + case "BoolProperty": + // No extra data + break + case "NameProperty": + tag = parser.ReadFName(uAsset.Names) + break + case "StrProperty": + tag = parser.ReadString() + break + case "UInt16Property": + tag = parser.ReadUint16() + break + case "UInt32Property": + tag = parser.ReadUint32() + break + case "UInt64Property": + tag = parser.ReadUint64() + break + case "InterfaceProperty": + tag = &UInterfaceProperty{ + InterfaceNumber: parser.ReadUint32(), + } + break + case "ByteProperty": + if size == 4 || size == -4 { + tag = parser.ReadUint32() + } else if size >= 8 { + tag = parser.ReadFName(uAsset.Names) + } else { + tag = parser.Read(1)[0] + } + break + case "SoftObjectProperty": + tag = &FSoftObjectPath{ + AssetPathName: parser.ReadFName(uAsset.Names), + SubPath: parser.ReadString(), + } + break + case "EnumProperty": + if size == 8 { + tag = parser.ReadFName(uAsset.Names) + } else if size == 0 { + break + } else { + panic("unknown state!") + } + break + case "MapProperty": + keyType := tagData.(*MapProperty).KeyType + valueType := tagData.(*MapProperty).ValueType + + var keyData interface{} + var valueData interface{} + + realTagData, ok := mapPropertyTypeOverrides[strings.Trim(*name, "\x00")] + + if ok { + if strings.Trim(keyType, "\x00") != "StructProperty" { + keyType = realTagData.KeyType + } else { + keyData = &StructProperty{ + Type: realTagData.KeyType, + } + } + + if strings.Trim(valueType, "\x00") != "StructProperty" { + valueType = realTagData.ValueType + } else { + valueData = &StructProperty{ + Type: realTagData.ValueType, + } + } + } + + if strings.Trim(keyType, "\x00") == "StructProperty" && keyData == nil { + parser.Read(size) + log.Warningf("%sSkipping MapProperty [%s] %s -> %s", d(depth), strings.Trim(*name, "\x00"), strings.Trim(keyType, "\x00"), strings.Trim(valueType, "\x00")) + break + } + + log.Tracef("%sReading MapProperty [%d]: %s -> %s", d(depth), size, strings.Trim(keyType, "\x00"), strings.Trim(valueType, "\x00")) + + numKeysToRemove := parser.ReadUint32() + + if numKeysToRemove != 0 { + // TODO Read MapProperty where numKeysToRemove != 0 + parser.Read(size - 4) + log.Warningf("%sSkipping MapProperty [%s] Remove Key Count: %d", d(depth), strings.Trim(*name, "\x00"), numKeysToRemove) + break + } + + num := parser.ReadInt32() + + results := make([]*MapPropertyEntry, num) + for i := int32(0); i < num; i++ { + key := parser.ReadTag(-4, uAsset, keyType, keyData, nil, depth+1) + + if key == nil { + parser.Read(size - 8) + log.Warningf("%sSkipping MapProperty [%s]: nil key", d(depth), strings.Trim(*name, "\x00")) + break + } + + value := parser.ReadTag(-4, uAsset, valueType, valueData, nil, depth+1) + + results[i] = &MapPropertyEntry{ + Key: key, + Value: value, + } + } + + tag = results + break + default: + log.Debugf("%sUnread Tag Type: %s", d(depth), strings.Trim(propertyType, "\x00")) + parser.Read(size) + break + } + + return tag +} diff --git a/parser/parser_basic.go b/parser/parser_basic.go new file mode 100644 index 0000000..0b07e84 --- /dev/null +++ b/parser/parser_basic.go @@ -0,0 +1,48 @@ +package parser + +import ( + "encoding/binary" + "github.com/Vilsol/ue4pak/utils" + "math" +) + +func (parser *PakParser) ReadString() string { + stringLength := parser.ReadInt32() + + if stringLength == 0 { + return "" + } + + if stringLength < 0 { + stringLength = (stringLength * -1) * 2 + return utils.DecodeUtf16(parser.Read(stringLength)) + } + + return string(parser.Read(stringLength)) +} + +func (parser *PakParser) ReadFloat32() float32 { + value := math.Float32frombits(parser.ReadUint32()) + assertFloat32IsFinite(value) + return value +} + +func (parser *PakParser) ReadInt32() int32 { + return utils.Int32(parser.Read(4)) +} + +func (parser *PakParser) ReadInt64() int64 { + return utils.Int64(parser.Read(8)) +} + +func (parser *PakParser) ReadUint16() uint16 { + return binary.LittleEndian.Uint16(parser.Read(2)) +} + +func (parser *PakParser) ReadUint32() uint32 { + return binary.LittleEndian.Uint32(parser.Read(4)) +} + +func (parser *PakParser) ReadUint64() uint64 { + return binary.LittleEndian.Uint64(parser.Read(8)) +} diff --git a/parser/parser_common.go b/parser/parser_common.go new file mode 100644 index 0000000..dba267c --- /dev/null +++ b/parser/parser_common.go @@ -0,0 +1,127 @@ +package parser + +import ( + "math" + "strings" +) + +func (parser *PakParser) ReadFGenerationInfo() *FGenerationInfo { + return &FGenerationInfo{ + ExportCount: parser.ReadInt32(), + NameCount: parser.ReadInt32(), + } +} + +func (parser *PakParser) ReadFEngineVersion() *FEngineVersion { + return &FEngineVersion{ + Major: parser.ReadUint16(), + Minor: parser.ReadUint16(), + Patch: parser.ReadUint16(), + ChangeList: parser.ReadUint32(), + Branch: parser.ReadString(), + } +} + +func (parser *PakParser) ReadFCompressedChunk() *FCompressedChunk { + return &FCompressedChunk{ + UncompressedOffset: parser.ReadInt32(), + UncompressedSize: parser.ReadInt32(), + CompressedOffset: parser.ReadInt32(), + CompressedSize: parser.ReadInt32(), + } +} + +func (parser *PakParser) ReadFName(names []*FNameEntrySerialized) string { + index := parser.ReadUint32() + // Instance ID + parser.Read(4) + return names[index].Name +} + +func (parser *PakParser) ReadFPackageIndex(imports []*FObjectImport, exports []*FObjectExport) *FPackageIndex { + return parser.ReadFPackageIndexInt(parser.ReadInt32(), imports, exports) +} + +func (parser *PakParser) ReadFPackageIndexInt(index int32, imports []*FObjectImport, exports []*FObjectExport) *FPackageIndex { + if index == 0 { + // TODO Values of 0 indicate that this resource represents a top-level UPackage object (the linker's LinkerRoot). Serialized + return &FPackageIndex{ + Index: index, + Reference: nil, + } + } + + if index < 0 { + correctedIndex := index*-1 - 1 + if correctedIndex >= 0 && correctedIndex < int32(len(imports)) { + return &FPackageIndex{ + Index: index, + Reference: imports[index*-1-1], + } + } + + return &FPackageIndex{ + Index: index, + Reference: nil, + } + } + + if index-1 < int32(len(exports)) { + return &FPackageIndex{ + Index: index - 1, + Reference: exports[index-1], + } + } + + return nil +} + +func (parser *PakParser) ReadFText() *FText { + flags := parser.ReadUint32() + historyType := int8(parser.Read(1)[0]) + + if historyType != 0 { + return &FText{ + Flags: flags, + HistoryType: historyType, + } + } + + return &FText{ + Flags: flags, + HistoryType: historyType, + Namespace: parser.ReadString(), + Key: parser.ReadString(), + SourceString: parser.ReadString(), + } +} + +func (parser *PakParser) ReadFPropertyTagLoop(uAsset *FPackageFileSummary) []*FPropertyTag { + properties := make([]*FPropertyTag, 0) + + for { + property := parser.ReadFPropertyTag(uAsset, true, 0) + + if property == nil { + break + } + + properties = append(properties, property) + } + + return properties +} + +func d(n int) string { + return strings.Repeat(" ", n) +} + +func assertFloat32IsFinite(n float32) { + value := float64(n) + if math.IsNaN(value) { + panic("Expected a float32, but received NaN") + } + if math.IsInf(value, 0) { + panic("Expected a float32, but received inf") + } +} diff --git a/parser/parser_pak.go b/parser/parser_pak.go new file mode 100644 index 0000000..6a52885 --- /dev/null +++ b/parser/parser_pak.go @@ -0,0 +1,98 @@ +package parser + +import ( + "encoding/binary" + log "github.com/sirupsen/logrus" +) + +func (parser *PakParser) Parse() *PakFile { + // Find magic number + magicOffset := int64(-44) + + for { + parser.Seek(magicOffset, 2) + + magicArray := parser.Read(4) + + if magicArray[0] == 0xE1 && magicArray[1] == 0x12 && magicArray[2] == 0x6F && magicArray[3] == 0x5A { + break + } + + magicOffset -= 1 + + if magicOffset < -1024 { + log.Fatal("Could not find magic bytes in pak!") + } + } + + // Seek and read the footer of the file + parser.Seek(magicOffset, 2) + footer := parser.Read(int32(magicOffset * -1)) + + pakFooter := &FPakInfo{ + Magic: binary.LittleEndian.Uint32(footer[0:4]), + Version: binary.LittleEndian.Uint32(footer[4:8]), + IndexOffset: binary.LittleEndian.Uint64(footer[8:16]), + IndexSize: binary.LittleEndian.Uint64(footer[16:24]), + IndexSHA1Hash: footer[24:44], + } + + // Seek and read the index of the file + parser.Seek(int64(pakFooter.IndexOffset), 0) + + mountPoint := parser.ReadString() + recordCount := parser.ReadUint32() + + pakIndex := &FPakIndex{ + MountPoint: mountPoint, + Records: make([]*FPakEntry, recordCount), + } + + for i := 0; i < len(pakIndex.Records); i++ { + pakIndex.Records[i] = &FPakEntry{} + + pakIndex.Records[i].FileName = parser.ReadString() + pakIndex.Records[i].FileOffset = parser.ReadUint64() + pakIndex.Records[i].FileSize = parser.ReadUint64() + pakIndex.Records[i].UncompressedSize = parser.ReadUint64() + + if pakFooter.Version >= 8 { + pakIndex.Records[i].CompressionMethod = uint32(parser.Read(1)[0]) + } else { + pakIndex.Records[i].CompressionMethod = parser.ReadUint32() + } + + if pakFooter.Version <= 1 { + pakIndex.Records[i].Timestamp = parser.ReadUint64() + } + + pakIndex.Records[i].DataSHA1Hash = parser.Read(20) + + if pakFooter.Version >= 3 { + if pakIndex.Records[i].CompressionMethod != 0 { + blockCount := parser.ReadUint32() + + pakIndex.Records[i].CompressionBlocks = make([]*FPakCompressedBlock, blockCount) + + for j := 0; j < len(pakIndex.Records[i].CompressionBlocks); j++ { + pakIndex.Records[i].CompressionBlocks[j] = &FPakCompressedBlock{ + StartOffset: parser.ReadUint64(), + EndOffset: parser.ReadUint64(), + } + } + } + + pakIndex.Records[i].IsEncrypted = parser.Read(1)[0] > 0 + pakIndex.Records[i].CompressionBlockSize = parser.ReadUint32() + } + + if pakFooter.Version == 4 { + // TODO ??? + } + } + + return &PakFile{ + Footer: pakFooter, + Index: pakIndex, + } +} diff --git a/parser/parser_types.go b/parser/parser_types.go index e2ea165..09dd042 100644 --- a/parser/parser_types.go +++ b/parser/parser_types.go @@ -1,41 +1,11 @@ package parser import ( - "encoding/binary" - "fmt" - "math" "strings" - "github.com/Vilsol/ue4pak/utils" log "github.com/sirupsen/logrus" - "github.com/spf13/viper" ) -type PakParser struct { - reader PakReader - tracker *readTracker - preload []byte -} - -type readTracker struct { - child *readTracker - bytesRead int32 -} - -func (tracker *readTracker) Increment(n int32) { - tracker.bytesRead += n - - if tracker.child != nil { - tracker.child.Increment(n) - } -} - -func NewParser(reader PakReader) *PakParser { - return &PakParser{ - reader: reader, - } -} - func (parser *PakParser) ProcessPak(parseFile func(string) bool, handleEntry func(string, *PakEntrySet, *PakFile)) { pak := parser.Parse() @@ -86,10 +56,10 @@ func (parser *PakParser) ProcessPak(parseFile func(string) bool, handleEntry fun exportSet := make([]PakExportSet, len(exports)) i := 0 - for export, properties := range exports { + for export, data := range exports { exportSet[i] = PakExportSet{ - Export: export, - Properties: properties, + Export: export, + Data: data, } i++ } @@ -104,859 +74,3 @@ func (parser *PakParser) ProcessPak(parseFile func(string) bool, handleEntry fun } } } - -func (parser *PakParser) Parse() *PakFile { - // Find magic number - magicOffset := int64(-44) - - for { - parser.Seek(magicOffset, 2) - - magicArray := parser.Read(4) - - if magicArray[0] == 0xE1 && magicArray[1] == 0x12 && magicArray[2] == 0x6F && magicArray[3] == 0x5A { - break - } - - magicOffset -= 1 - - if magicOffset < -1024 { - log.Fatal("Could not find magic bytes in pak!") - } - } - - // Seek and read the footer of the file - parser.Seek(magicOffset, 2) - footer := parser.Read(int32(magicOffset * -1)) - - pakFooter := &FPakInfo{ - Magic: binary.LittleEndian.Uint32(footer[0:4]), - Version: binary.LittleEndian.Uint32(footer[4:8]), - IndexOffset: binary.LittleEndian.Uint64(footer[8:16]), - IndexSize: binary.LittleEndian.Uint64(footer[16:24]), - IndexSHA1Hash: footer[24:44], - } - - // Seek and read the index of the file - parser.Seek(int64(pakFooter.IndexOffset), 0) - - mountPoint := parser.ReadString() - recordCount := parser.ReadUint32() - - pakIndex := &FPakIndex{ - MountPoint: mountPoint, - Records: make([]*FPakEntry, recordCount), - } - - for i := 0; i < len(pakIndex.Records); i++ { - pakIndex.Records[i] = &FPakEntry{} - - pakIndex.Records[i].FileName = parser.ReadString() - pakIndex.Records[i].FileOffset = parser.ReadUint64() - pakIndex.Records[i].FileSize = parser.ReadUint64() - pakIndex.Records[i].UncompressedSize = parser.ReadUint64() - - if pakFooter.Version >= 8 { - pakIndex.Records[i].CompressionMethod = uint32(parser.Read(1)[0]) - } else { - pakIndex.Records[i].CompressionMethod = parser.ReadUint32() - } - - if pakFooter.Version <= 1 { - pakIndex.Records[i].Timestamp = parser.ReadUint64() - } - - pakIndex.Records[i].DataSHA1Hash = parser.Read(20) - - if pakFooter.Version >= 3 { - if pakIndex.Records[i].CompressionMethod != 0 { - blockCount := parser.ReadUint32() - - pakIndex.Records[i].CompressionBlocks = make([]*FPakCompressedBlock, blockCount) - - for j := 0; j < len(pakIndex.Records[i].CompressionBlocks); j++ { - pakIndex.Records[i].CompressionBlocks[j] = &FPakCompressedBlock{ - StartOffset: parser.ReadUint64(), - EndOffset: parser.ReadUint64(), - } - } - } - - pakIndex.Records[i].IsEncrypted = parser.Read(1)[0] > 0 - pakIndex.Records[i].CompressionBlockSize = parser.ReadUint32() - } - - if pakFooter.Version == 4 { - // TODO ??? - } - } - - return &PakFile{ - Footer: pakFooter, - Index: pakIndex, - } -} - -func (parser *PakParser) TrackRead() { - parser.tracker = &readTracker{ - child: parser.tracker, - } -} - -func (parser *PakParser) UnTrackRead() { - if parser.tracker != nil { - parser.tracker = parser.tracker.child - } -} - -func (parser *PakParser) Seek(offset int64, whence int) (ret int64, err error) { - parser.preload = nil - return parser.reader.Seek(offset, whence) -} - -func (parser *PakParser) Preload(n int32) { - if viper.GetBool("NoPreload") { - return - } - - buffer := make([]byte, n) - read, err := parser.reader.Read(buffer) - - if err != nil { - panic(err) - } - - if int32(read) < n { - panic(fmt.Sprintf("End of stream: %d < %d", read, n)) - } - - if parser.preload != nil && len(parser.preload) > 0 { - parser.preload = append(parser.preload, buffer...) - } else { - parser.preload = buffer - } -} - -func (parser *PakParser) Read(n int32) []byte { - toRead := n - buffer := make([]byte, toRead) - - if parser.preload != nil && len(parser.preload) > 0 { - copied := copy(buffer, parser.preload) - parser.preload = parser.preload[copied:] - toRead = toRead - int32(copied) - } - - if toRead > 0 { - read, err := parser.reader.Read(buffer[n-toRead:]) - - if err != nil { - panic(err) - } - - if int32(read) < toRead { - panic(fmt.Sprintf("End of stream: %d < %d", read, toRead)) - } - } - - if parser.tracker != nil { - parser.tracker.Increment(n) - } - - return buffer -} - -func (parser *PakParser) ReadString() string { - stringLength := parser.ReadInt32() - - if stringLength == 0 { - return "" - } - - if stringLength < 0 { - stringLength = (stringLength * -1) * 2 - return utils.DecodeUtf16(parser.Read(stringLength)) - } - - return string(parser.Read(stringLength)) -} - -func (parser *PakParser) ReadFloat32() float32 { - value := math.Float32frombits(parser.ReadUint32()) - assertFloat32IsFinite(value) - return value -} - -func (parser *PakParser) ReadInt32() int32 { - return utils.Int32(parser.Read(4)) -} - -func (parser *PakParser) ReadInt64() int64 { - return utils.Int64(parser.Read(8)) -} - -func (parser *PakParser) ReadUint16() uint16 { - return binary.LittleEndian.Uint16(parser.Read(2)) -} - -func (parser *PakParser) ReadUint32() uint32 { - return binary.LittleEndian.Uint32(parser.Read(4)) -} - -func (parser *PakParser) ReadUint64() uint64 { - return binary.LittleEndian.Uint64(parser.Read(8)) -} - -func (parser *PakParser) ReadFGenerationInfo() *FGenerationInfo { - return &FGenerationInfo{ - ExportCount: parser.ReadInt32(), - NameCount: parser.ReadInt32(), - } -} - -func (parser *PakParser) ReadFEngineVersion() *FEngineVersion { - return &FEngineVersion{ - Major: parser.ReadUint16(), - Minor: parser.ReadUint16(), - Patch: parser.ReadUint16(), - ChangeList: parser.ReadUint32(), - Branch: parser.ReadString(), - } -} - -func (parser *PakParser) ReadFCompressedChunk() *FCompressedChunk { - return &FCompressedChunk{ - UncompressedOffset: parser.ReadInt32(), - UncompressedSize: parser.ReadInt32(), - CompressedOffset: parser.ReadInt32(), - CompressedSize: parser.ReadInt32(), - } -} - -func (parser *PakParser) ReadFName(names []*FNameEntrySerialized) string { - index := parser.ReadUint32() - // Instance ID - parser.Read(4) - return names[index].Name -} - -func (parser *PakParser) ReadFPackageIndex(imports []*FObjectImport, exports []*FObjectExport) *FPackageIndex { - return parser.ReadFPackageIndexInt(parser.ReadInt32(), imports, exports) -} - -func (parser *PakParser) ReadFPackageIndexInt(index int32, imports []*FObjectImport, exports []*FObjectExport) *FPackageIndex { - if index == 0 { - // TODO Values of 0 indicate that this resource represents a top-level UPackage object (the linker's LinkerRoot). Serialized - return &FPackageIndex{ - Index: index, - Reference: nil, - } - } - - if index < 0 { - correctedIndex := index*-1 - 1 - if correctedIndex >= 0 && correctedIndex < int32(len(imports)) { - return &FPackageIndex{ - Index: index, - Reference: imports[index*-1-1], - } - } - - return &FPackageIndex{ - Index: index, - Reference: nil, - } - } - - if index-1 < int32(len(exports)) { - return &FPackageIndex{ - Index: index - 1, - Reference: exports[index-1], - } - } - - return nil -} - -func (parser *PakParser) ReadFText() *FText { - flags := parser.ReadUint32() - historyType := int8(parser.Read(1)[0]) - - if historyType != 0 { - return &FText{ - Flags: flags, - HistoryType: historyType, - } - } - - return &FText{ - Flags: flags, - HistoryType: historyType, - Namespace: parser.ReadString(), - Key: parser.ReadString(), - SourceString: parser.ReadString(), - } -} - -func (record *FPakEntry) ReadUAsset(pak *PakFile, parser *PakParser) *FPackageFileSummary { - // Skip UE4 pak header - // TODO Find out what's in the pak header - headerSize := int64(pak.Footer.HeaderSize()) - - parser.Seek(headerSize+int64(record.FileOffset), 0) - parser.Preload(int32(record.FileSize)) - - tag := parser.ReadInt32() - legacyFileVersion := parser.ReadInt32() - legacyUE3Version := parser.ReadInt32() - fileVersionUE4 := parser.ReadInt32() - fileVersionLicenseeUE4 := parser.ReadInt32() - - // TODO custom_version_container: Vec - parser.Read(4) - - totalHeaderSize := parser.ReadInt32() - folderName := parser.ReadString() - packageFlags := parser.ReadUint32() - nameCount := parser.ReadUint32() - nameOffset := parser.ReadInt32() - gatherableTextDataCount := parser.ReadInt32() - gatherableTextDataOffset := parser.ReadInt32() - exportCount := parser.ReadUint32() - exportOffset := parser.ReadInt32() - importCount := parser.ReadUint32() - importOffset := parser.ReadInt32() - dependsOffset := parser.ReadInt32() - stringAssetReferencesCount := parser.ReadInt32() - stringAssetReferencesOffset := parser.ReadInt32() - searchableNamesOffset := parser.ReadInt32() - thumbnailTableOffset := parser.ReadInt32() - guid := parser.ReadFGuid() - generationCount := parser.ReadUint32() - - generations := make([]*FGenerationInfo, generationCount) - for i := uint32(0); i < generationCount; i++ { - generations[i] = parser.ReadFGenerationInfo() - } - - savedByEngineVersion := parser.ReadFEngineVersion() - compatibleWithEngineVersion := parser.ReadFEngineVersion() - compressionFlags := parser.ReadUint32() - compressedChunkCount := parser.ReadUint32() - - compressedChunks := make([]*FCompressedChunk, compressedChunkCount) - for i := uint32(0); i < compressedChunkCount; i++ { - compressedChunks[i] = parser.ReadFCompressedChunk() - } - - packageSource := parser.ReadUint32() - additionalPackageCount := parser.ReadUint32() - - additionalPackagesToCook := make([]string, additionalPackageCount) - for i := uint32(0); i < additionalPackageCount; i++ { - additionalPackagesToCook[i] = parser.ReadString() - } - - assetRegistryDataOffset := parser.ReadInt32() - bulkDataStartOffset := parser.ReadInt32() - worldTileInfoDataOffset := parser.ReadInt32() - chunkCount := parser.ReadUint32() - - chunkIds := make([]int32, chunkCount) - for i := uint32(0); i < chunkCount; i++ { - chunkIds[i] = parser.ReadInt32() - } - - // TODO unknown bytes - parser.Read(4) - - preloadDependencyCount := parser.ReadInt32() - preloadDependencyOffset := parser.ReadInt32() - - names := make([]*FNameEntrySerialized, nameCount) - for i := uint32(0); i < nameCount; i++ { - names[i] = &FNameEntrySerialized{ - Name: parser.ReadString(), - NonCasePreservingHash: parser.ReadUint16(), - CasePreservingHash: parser.ReadUint16(), - } - } - - imports := make([]*FObjectImport, importCount) - for i := uint32(0); i < importCount; i++ { - imports[i] = &FObjectImport{ - ClassPackage: parser.ReadFName(names), - ClassName: parser.ReadFName(names), - OuterIndex: parser.ReadInt32(), - ObjectName: parser.ReadFName(names), - } - } - - exports := make([]*FObjectExport, exportCount) - for i := uint32(0); i < exportCount; i++ { - exports[i] = &FObjectExport{ - ClassIndex: parser.ReadFPackageIndex(imports, exports), - SuperIndex: parser.ReadFPackageIndex(imports, exports), - TemplateIndex: parser.ReadFPackageIndex(imports, exports), - OuterIndex: parser.ReadFPackageIndex(imports, exports), - ObjectName: parser.ReadFName(names), - Save: parser.ReadUint32(), - SerialSize: parser.ReadInt64(), - SerialOffset: parser.ReadInt64(), - ForcedExport: parser.ReadInt32() != 0, - NotForClient: parser.ReadInt32() != 0, - NotForServer: parser.ReadInt32() != 0, - PackageGuid: parser.ReadFGuid(), - PackageFlags: parser.ReadUint32(), - NotAlwaysLoadedForEditorGame: parser.ReadInt32() != 0, - IsAsset: parser.ReadInt32() != 0, - FirstExportDependency: parser.ReadInt32(), - SerializationBeforeSerializationDependencies: parser.ReadInt32() != 0, - CreateBeforeSerializationDependencies: parser.ReadInt32() != 0, - SerializationBeforeCreateDependencies: parser.ReadInt32() != 0, - CreateBeforeCreateDependencies: parser.ReadInt32() != 0, - } - } - - for _, objectImport := range imports { - objectImport.OuterPackage = parser.ReadFPackageIndexInt(objectImport.OuterIndex, imports, exports) - } - - // fmt.Println("UASSET LEFTOVERS:", len(fileData[offset:])) - // fmt.Println(utils.HexDump(fileData[offset:])) - - // TODO Bunch of unknown bytes at the end - - return &FPackageFileSummary{ - Tag: tag, - LegacyFileVersion: legacyFileVersion, - LegacyUE3Version: legacyUE3Version, - FileVersionUE4: fileVersionUE4, - FileVersionLicenseeUE4: fileVersionLicenseeUE4, - TotalHeaderSize: totalHeaderSize, - FolderName: folderName, - PackageFlags: packageFlags, - NameOffset: nameOffset, - GatherableTextDataCount: gatherableTextDataCount, - GatherableTextDataOffset: gatherableTextDataOffset, - ExportOffset: exportOffset, - ImportOffset: importOffset, - DependsOffset: dependsOffset, - StringAssetReferencesCount: stringAssetReferencesCount, - StringAssetReferencesOffset: stringAssetReferencesOffset, - SearchableNamesOffset: searchableNamesOffset, - ThumbnailTableOffset: thumbnailTableOffset, - GUID: guid, - Generations: generations, - SavedByEngineVersion: savedByEngineVersion, - CompatibleWithEngineVersion: compatibleWithEngineVersion, - CompressionFlags: compressionFlags, - CompressedChunks: compressedChunks, - PackageSource: packageSource, - AdditionalPackagesToCook: additionalPackagesToCook, - AssetRegistryDataOffset: assetRegistryDataOffset, - BulkDataStartOffset: bulkDataStartOffset, - WorldTileInfoDataOffset: worldTileInfoDataOffset, - ChunkIds: chunkIds, - PreloadDependencyCount: preloadDependencyCount, - PreloadDependencyOffset: preloadDependencyOffset, - Names: names, - Imports: imports, - Exports: exports, - } -} - -func (record *FPakEntry) ReadUExp(pak *PakFile, parser *PakParser, uAsset *FPackageFileSummary) map[*FObjectExport][]*FPropertyTag { - // Skip UE4 pak header - // TODO Find out what's in the pak header - headerSize := int64(pak.Footer.HeaderSize()) - - exports := make(map[*FObjectExport][]*FPropertyTag) - - for _, export := range uAsset.Exports { - offset := headerSize + int64(record.FileOffset) + (export.SerialOffset - int64(uAsset.TotalHeaderSize)) - log.Debugf("Reading export [%x]: %#v", offset, export.TemplateIndex.Reference) - parser.Seek(offset, 0) - - properties := make([]*FPropertyTag, 0) - - for { - property := parser.ReadFPropertyTag(uAsset, true, 0) - - if property == nil { - break - } - - properties = append(properties, property) - } - - /*if parser.preload != nil { - preloadData := parser.preload - if len(preloadData) > 0 { - fmt.Println() - // spew.Dump(export) - log.Warningf("Export leftovers: %d", len(preloadData)) - - if len(preloadData) < 1000 { - fmt.Println(utils.HexDump(preloadData)) - } - } - }*/ - - exports[export] = properties - } - - // fmt.Println("UEXP LEFTOVERS:", len(fileData[globalOffset:])) - // fmt.Println(utils.HexDump(fileData[globalOffset:])) - - return exports -} - -func (parser *PakParser) ReadFPropertyTag(uAsset *FPackageFileSummary, readData bool, depth int) *FPropertyTag { - name := parser.ReadFName(uAsset.Names) - - if strings.Trim(name, "\x00") == "None" { - return nil - } - - propertyType := parser.ReadFName(uAsset.Names) - size := parser.ReadInt32() - arrayIndex := parser.ReadInt32() - - log.Tracef("%sReading Property %s (%s)[%d]", d(depth), strings.Trim(name, "\x00"), strings.Trim(propertyType, "\x00"), size) - - var tagData interface{} - - switch strings.Trim(propertyType, "\x00") { - case "StructProperty": - tagData = &StructProperty{ - Type: parser.ReadFName(uAsset.Names), - Guid: parser.ReadFGuid(), - } - - log.Tracef("%sStructProperty Type: %s", d(depth), tagData.(*StructProperty).Type) - break - case "BoolProperty": - tagData = parser.Read(1)[0] != 0 - break - case "EnumProperty": - fallthrough - case "ByteProperty": - fallthrough - case "SetProperty": - fallthrough - case "ArrayProperty": - tagData = parser.ReadFName(uAsset.Names) - break - case "MapProperty": - tagData = &MapProperty{ - KeyType: parser.ReadFName(uAsset.Names), - ValueType: parser.ReadFName(uAsset.Names), - } - break - } - - hasGuid := parser.Read(1)[0] != 0 - - var propertyGuid *FGuid - - if hasGuid { - propertyGuid = parser.ReadFGuid() - } - - var tag interface{} - - if readData && size > 0 { - parser.Preload(size) - parser.TrackRead() - tag = parser.ReadTag(size, uAsset, propertyType, tagData, &name, depth) - - if parser.tracker.bytesRead != size { - log.Warningf("%sProperty not read correctly %s (%s)[%#v]: %d read out of %d", - d(depth), - strings.Trim(name, "\x00"), - strings.Trim(propertyType, "\x00"), - tagData, - parser.tracker.bytesRead, - size) - - if parser.tracker.bytesRead > size { - log.Fatalf("More bytes read than available!") - } else { - parser.Read(size - parser.tracker.bytesRead) - } - } - - parser.UnTrackRead() - } - - return &FPropertyTag{ - Name: name, - PropertyType: propertyType, - TagData: tagData, - Size: size, - ArrayIndex: arrayIndex, - PropertyGuid: propertyGuid, - Tag: tag, - } -} - -func (parser *PakParser) ReadTag(size int32, uAsset *FPackageFileSummary, propertyType string, tagData interface{}, name *string, depth int) interface{} { - var tag interface{} - switch strings.Trim(propertyType, "\x00") { - case "FloatProperty": - tag = parser.ReadFloat32() - break - case "ArrayProperty": - arrayTypes := strings.Trim(tagData.(string), "\x00") - valueCount := parser.ReadInt32() - - var innerTagData *FPropertyTag - - if arrayTypes == "StructProperty" { - innerTagData = parser.ReadFPropertyTag(uAsset, false, depth+1) - } - - values := make([]interface{}, valueCount) - for i := int32(0); i < valueCount; i++ { - switch arrayTypes { - case "SoftObjectProperty": - values[i] = &FSoftObjectPath{ - AssetPathName: parser.ReadFName(uAsset.Names), - SubPath: parser.ReadString(), - } - break - case "StructProperty": - log.Tracef("%sReading Array StructProperty: %s", d(depth), strings.Trim(innerTagData.TagData.(*StructProperty).Type, "\x00")) - values[i] = &ArrayStructProperty{ - InnerTagData: innerTagData, - Properties: parser.ReadTag(-1, uAsset, arrayTypes, innerTagData.TagData, nil, depth+1), - } - break - case "ObjectProperty": - values[i] = parser.ReadFPackageIndex(uAsset.Imports, uAsset.Exports) - break - case "BoolProperty": - values[i] = parser.Read(1)[0] != 0 - break - case "ByteProperty": - if (size-4)/valueCount == 1 { - values[i] = parser.Read(1)[0] - } else { - values[i] = parser.ReadFName(uAsset.Names) - } - break - case "NameProperty": - fallthrough - case "EnumProperty": - values[i] = parser.ReadFName(uAsset.Names) - break - case "IntProperty": - values[i] = parser.ReadInt32() - break - case "FloatProperty": - values[i] = parser.ReadFloat32() - break - case "TextProperty": - values[i] = parser.ReadFText() - break - case "StrProperty": - values[i] = parser.ReadString() - break - case "DelegateProperty": - values[i] = &FScriptDelegate{ - Object: parser.ReadInt32(), - Name: parser.ReadFName(uAsset.Names), - } - break - default: - panic("unknown array type: " + arrayTypes) - } - } - - tag = values - - if valueCount > 0 && arrayTypes == "StructProperty" && values[0].(*ArrayStructProperty).Properties == nil { - if size > 0 { - // Struct data was not processed - parser.Read(innerTagData.Size) - } - } - - break - case "StructProperty": - if tagData == nil { - log.Trace("%sReading Generic StructProperty", d(depth)) - } else { - log.Tracef("%sReading StructProperty: %s", d(depth), strings.Trim(tagData.(*StructProperty).Type, "\x00")) - - if structData, ok := tagData.(*StructProperty); ok { - result, success := parser.ReadStruct(structData, size, uAsset, depth) - - if success { - return &StructType{ - Type: structData.Type, - Value: result, - } - } - } - } - - properties := make([]*FPropertyTag, 0) - - for { - property := parser.ReadFPropertyTag(uAsset, true, depth+1) - - if property == nil { - break - } - - properties = append(properties, property) - } - - tag = properties - break - case "IntProperty": - tag = parser.ReadInt32() - break - case "Int8Property": - tag = int8(parser.Read(1)[0]) - break - case "ObjectProperty": - tag = parser.ReadFPackageIndex(uAsset.Imports, uAsset.Exports) - break - case "TextProperty": - tag = parser.ReadFText() - break - case "BoolProperty": - // No extra data - break - case "NameProperty": - tag = parser.ReadFName(uAsset.Names) - break - case "StrProperty": - tag = parser.ReadString() - break - case "UInt32Property": - tag = parser.ReadUint32() - break - case "UInt64Property": - tag = parser.ReadUint64() - break - case "InterfaceProperty": - tag = &UInterfaceProperty{ - InterfaceNumber: parser.ReadUint32(), - } - break - case "ByteProperty": - if size == 4 || size == -4 { - tag = parser.ReadUint32() - } else if size >= 8 { - tag = parser.ReadFName(uAsset.Names) - } else { - tag = parser.Read(1)[0] - } - break - case "SoftObjectProperty": - tag = &FSoftObjectPath{ - AssetPathName: parser.ReadFName(uAsset.Names), - SubPath: parser.ReadString(), - } - break - case "EnumProperty": - if size == 8 { - tag = parser.ReadFName(uAsset.Names) - } else if size == 0 { - break - } else { - panic("unknown state!") - } - break - case "MapProperty": - keyType := tagData.(*MapProperty).KeyType - valueType := tagData.(*MapProperty).ValueType - - var keyData interface{} - var valueData interface{} - - realTagData, ok := mapPropertyTypeOverrides[strings.Trim(*name, "\x00")] - - if ok { - if strings.Trim(keyType, "\x00") != "StructProperty" { - keyType = realTagData.KeyType - } else { - keyData = &StructProperty{ - Type: realTagData.KeyType, - } - } - - if strings.Trim(valueType, "\x00") != "StructProperty" { - valueType = realTagData.ValueType - } else { - valueData = &StructProperty{ - Type: realTagData.ValueType, - } - } - } - - if strings.Trim(keyType, "\x00") == "StructProperty" && keyData == nil { - parser.Read(size) - log.Warningf("%sSkipping MapProperty [%s] %s -> %s", d(depth), strings.Trim(*name, "\x00"), strings.Trim(keyType, "\x00"), strings.Trim(valueType, "\x00")) - break - } - - log.Tracef("%sReading MapProperty [%d]: %s -> %s", d(depth), size, strings.Trim(keyType, "\x00"), strings.Trim(valueType, "\x00")) - - numKeysToRemove := parser.ReadUint32() - - if numKeysToRemove != 0 { - // TODO Read MapProperty where numKeysToRemove != 0 - parser.Read(size - 4) - log.Warningf("%sSkipping MapProperty [%s] Remove Key Count: %d", d(depth), strings.Trim(*name, "\x00"), numKeysToRemove) - break - } - - num := parser.ReadInt32() - - results := make([]*MapPropertyEntry, num) - for i := int32(0); i < num; i++ { - key := parser.ReadTag(-4, uAsset, keyType, keyData, nil, depth+1) - - if key == nil { - parser.Read(size - 8) - log.Warningf("%sSkipping MapProperty [%s]: nil key", d(depth), strings.Trim(*name, "\x00")) - break - } - - value := parser.ReadTag(-4, uAsset, valueType, valueData, nil, depth+1) - - results[i] = &MapPropertyEntry{ - Key: key, - Value: value, - } - } - - tag = results - break - default: - log.Debugf("%sUnread Tag Type: %s", d(depth), strings.Trim(propertyType, "\x00")) - parser.Read(size) - break - } - - return tag -} - -func d(n int) string { - return strings.Repeat(" ", n) -} - -func assertFloat32IsFinite(n float32) { - value := float64(n) - if math.IsNaN(value) { - panic("Expected a float32, but recieved NaN") - } - if math.IsInf(value, 0) { - panic("Expected a float32, but recieved inf") - } -} diff --git a/parser/types.go b/parser/types.go index 48cb514..3a85cec 100644 --- a/parser/types.go +++ b/parser/types.go @@ -42,8 +42,8 @@ type PakEntrySet struct { } type PakExportSet struct { - Export *FObjectExport `json:"export"` - Properties []*FPropertyTag `json:"properties"` + Export *FObjectExport `json:"export"` + Data *ExportData `json:"data"` } type FPakInfo struct { @@ -248,6 +248,11 @@ type MapPropertyEntry struct { Value interface{} `json:"value"` } +type ExportData struct { + Properties []*FPropertyTag `json:"properties"` + Data interface{} `json:"data"` +} + func (pakInfo *FPakInfo) HeaderSize() uint64 { if pakInfo.Version < 8 { return 53 @@ -255,3 +260,35 @@ func (pakInfo *FPakInfo) HeaderSize() uint64 { return 50 } + +func (index *FPackageIndex) ObjectName() *string { + classReference := index.Reference + + if classReference == nil { + return nil + } + + if ref, ok := classReference.(*FObjectImport); ok { + return &ref.ObjectName + } else if ref, ok := classReference.(*FObjectExport); ok { + return &ref.ObjectName + } + + return nil +} + +func (index *FPackageIndex) ClassName() *string { + classReference := index.Reference + + if classReference == nil { + return nil + } + + if ref, ok := classReference.(*FObjectImport); ok { + return &ref.ClassName + } else if ref, ok := classReference.(*FObjectExport); ok { + return ref.ClassIndex.ObjectName() + } + + return nil +} diff --git a/utils/bytewise.go b/utils/bytewise.go index 0f1c3dc..0801fd2 100644 --- a/utils/bytewise.go +++ b/utils/bytewise.go @@ -40,9 +40,12 @@ func safeChar(char byte) string { } func HexDump(data []byte) string { + return HexDumpWidth(data, 32) +} + +func HexDumpWidth(data []byte, perRow int) string { result := "" - perRow := 32 rows := int(math.Ceil(float64(len(data)) / float64(perRow))) rowWidth := perRow * 5