Fix lint errors

This commit is contained in:
lordwelch 2020-11-26 02:16:40 -08:00
parent 1c7c8e5055
commit 23cb8d1e85
8 changed files with 464 additions and 434 deletions

96
.golangci.yaml Normal file
View File

@ -0,0 +1,96 @@
run:
tests: false
build-tags:
- noasm
output:
# colored-line-number|line-number|json|tab|checkstyle|code-climate, default is "colored-line-number"
format: tab
# print lines of code with issue, default is true
print-issued-lines: true
# print linter name in the end of issue text, default is true
print-linter-name: true
# make issues output unique by line, default is true
uniq-by-line: true
# add a prefix to the output file references; default is no prefix
path-prefix: ""
# sorts results by: filepath, line and column
sort-results: true
linters-settings:
depguard:
list-type: blacklist
dupl:
threshold: 100
funlen:
lines: 100
statements: 50
gci:
local-prefixes: github.com/golangci/golangci-lint
goconst:
min-len: 2
min-occurrences: 2
gocritic:
enabled-tags:
- diagnostic
- experimental
- opinionated
- performance
- style
gocyclo:
min-complexity: 15
golint:
min-confidence: 0
gomnd:
settings:
mnd:
# don't include the "operation" and "assign"
checks: argument,case,condition,return
govet:
check-shadowing: true
lll:
line-length: 140
maligned:
suggest-new: true
misspell:
locale: US
nolintlint:
allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space)
allow-unused: false # report any unused nolint directives
require-explanation: false # don't require an explanation for nolint directives
require-specific: false # don't require nolint directives to be specific about which linter is being skipped
linters:
# please, do not use `enable-all`: it's deprecated and will be removed soon.
# inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint
disable-all: true
enable:
- deadcode
- depguard
- dogsled
- dupl
- errcheck
- funlen
- goconst
- gocritic
- gocyclo
- golint
- gosec
- gosimple
- govet
- ineffassign
- interfacer
- lll
- misspell
- nakedret
- noctx
- nolintlint
- rowserrcheck
- scopelint
- staticcheck
- structcheck
- stylecheck
- typecheck
- unconvert
- unparam
- varcheck
- whitespace

View File

@ -135,43 +135,43 @@ func (v Vec) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
type DataType int
const (
DT_None DataType = iota
DT_Byte
DT_Short
DT_UShort
DT_Int
DT_UInt
DT_Float
DT_Double
DT_IVec2
DT_IVec3
DT_IVec4
DT_Vec2
DT_Vec3
DT_Vec4
DT_Mat2
DT_Mat3
DT_Mat3x4
DT_Mat4x3
DT_Mat4
DT_Bool
DT_String
DT_Path
DT_FixedString
DT_LSString
DT_ULongLong
DT_ScratchBuffer
DTNone DataType = iota
DTByte
DTShort
DTUShort
DTInt
DTUInt
DTFloat
DTDouble
DTIVec2
DTIVec3
DTIVec4
DTVec2
DTVec3
DTVec4
DTMat2
DTMat3
DTMat3x4
DTMat4x3
DTMat4
DTBool
DTString
DTPath
DTFixedString
DTLSString
DTULongLong
DTScratchBuffer
// Seems to be unused?
DT_Long
DT_Int8
DT_TranslatedString
DT_WString
DT_LSWString
DT_UUID
DT_Int64
DT_TranslatedFSString
DTLong
DTInt8
DTTranslatedString
DTWString
DTLSWString
DTUUID
DTInt64
DTTranslatedFSString
// Last supported datatype, always keep this one at the end
DT_Max = iota - 1
DTMax = iota - 1
)
func (dt *DataType) MarshalXMLAttr(name xml.Name) (xml.Attr, error) {
@ -183,73 +183,73 @@ func (dt *DataType) MarshalXMLAttr(name xml.Name) (xml.Attr, error) {
func (dt DataType) String() string {
switch dt {
case DT_None:
case DTNone:
return "None"
case DT_Byte:
case DTByte:
return "uint8"
case DT_Short:
case DTShort:
return "int16"
case DT_UShort:
case DTUShort:
return "uint16"
case DT_Int:
case DTInt:
return "int32"
case DT_UInt:
case DTUInt:
return "uint32"
case DT_Float:
case DTFloat:
return "float"
case DT_Double:
case DTDouble:
return "double"
case DT_IVec2:
case DTIVec2:
return "ivec2"
case DT_IVec3:
case DTIVec3:
return "ivec3"
case DT_IVec4:
case DTIVec4:
return "ivec4"
case DT_Vec2:
case DTVec2:
return "fvec2"
case DT_Vec3:
case DTVec3:
return "fvec3"
case DT_Vec4:
case DTVec4:
return "fvec4"
case DT_Mat2:
case DTMat2:
return "mat2x2"
case DT_Mat3:
case DTMat3:
return "mat3x3"
case DT_Mat3x4:
case DTMat3x4:
return "mat3x4"
case DT_Mat4x3:
case DTMat4x3:
return "mat4x3"
case DT_Mat4:
case DTMat4:
return "mat4x4"
case DT_Bool:
case DTBool:
return "bool"
case DT_String:
case DTString:
return "string"
case DT_Path:
case DTPath:
return "path"
case DT_FixedString:
case DTFixedString:
return "FixedString"
case DT_LSString:
case DTLSString:
return "LSString"
case DT_ULongLong:
case DTULongLong:
return "uint64"
case DT_ScratchBuffer:
case DTScratchBuffer:
return "ScratchBuffer"
case DT_Long:
case DTLong:
return "old_int64"
case DT_Int8:
case DTInt8:
return "int8"
case DT_TranslatedString:
case DTTranslatedString:
return "TranslatedString"
case DT_WString:
case DTWString:
return "WString"
case DT_LSWString:
case DTLSWString:
return "LSWString"
case DT_UUID:
case DTUUID:
return "guid"
case DT_Int64:
case DTInt64:
return "int64"
case DT_TranslatedFSString:
case DTTranslatedFSString:
return "TranslatedFSString"
}
return ""
@ -298,21 +298,21 @@ func (na NodeAttribute) MarshalXML(e *xml.Encoder, start xml.StartElement) error
func (na NodeAttribute) String() string {
switch na.Type {
case DT_ScratchBuffer:
case DTScratchBuffer:
// ScratchBuffer is a special case, as its stored as byte[] and ToString() doesn't really do what we want
if value, ok := na.Value.([]byte); ok {
return base64.StdEncoding.EncodeToString(value)
}
return fmt.Sprint(na.Value)
case DT_Double:
case DTDouble:
v := na.Value.(float64)
if na.Value == 0 {
na.Value = 0
}
return strconv.FormatFloat(v, 'f', -1, 64)
case DT_Float:
case DTFloat:
v := na.Value.(float32)
if na.Value == 0 {
na.Value = 0
@ -330,20 +330,20 @@ func (na NodeAttribute) GetRows() (int, error) {
func (dt DataType) GetRows() (int, error) {
switch dt {
case DT_IVec2, DT_IVec3, DT_IVec4, DT_Vec2, DT_Vec3, DT_Vec4:
case DTIVec2, DTIVec3, DTIVec4, DTVec2, DTVec3, DTVec4:
return 1, nil
case DT_Mat2:
case DTMat2:
return 2, nil
case DT_Mat3, DT_Mat3x4:
case DTMat3, DTMat3x4:
return 3, nil
case DT_Mat4x3, DT_Mat4:
case DTMat4x3, DTMat4:
return 4, nil
default:
return 0, errors.New("Data type does not have rows")
return 0, errors.New("data type does not have rows")
}
}
@ -353,23 +353,23 @@ func (na NodeAttribute) GetColumns() (int, error) {
func (dt DataType) GetColumns() (int, error) {
switch dt {
case DT_IVec2, DT_Vec2, DT_Mat2:
case DTIVec2, DTVec2, DTMat2:
return 2, nil
case DT_IVec3, DT_Vec3, DT_Mat3, DT_Mat4x3:
case DTIVec3, DTVec3, DTMat3, DTMat4x3:
return 3, nil
case DT_IVec4, DT_Vec4, DT_Mat3x4, DT_Mat4:
case DTIVec4, DTVec4, DTMat3x4, DTMat4:
return 4, nil
default:
return 0, errors.New("Data type does not have columns")
return 0, errors.New("data type does not have columns")
}
}
func (na NodeAttribute) IsNumeric() bool {
switch na.Type {
case DT_Byte, DT_Short, DT_Int, DT_UInt, DT_Float, DT_Double, DT_ULongLong, DT_Long, DT_Int8:
case DTByte, DTShort, DTInt, DTUInt, DTFloat, DTDouble, DTULongLong, DTLong, DTInt8:
return true
default:
return false
@ -390,58 +390,62 @@ func (na *NodeAttribute) FromString(str string) error {
)
switch na.Type {
case DT_None:
case DTNone:
// This is a null type, cannot have a value
case DT_Byte:
case DTByte:
na.Value = []byte(str)
case DT_Short:
case DTShort:
na.Value, err = strconv.ParseInt(str, 0, 16)
if err != nil {
return err
}
case DT_UShort:
case DTUShort:
na.Value, err = strconv.ParseUint(str, 0, 16)
if err != nil {
return err
}
case DT_Int:
case DTInt:
na.Value, err = strconv.ParseInt(str, 0, 32)
if err != nil {
return err
}
case DT_UInt:
case DTUInt:
na.Value, err = strconv.ParseUint(str, 0, 16)
if err != nil {
return err
}
case DT_Float:
case DTFloat:
na.Value, err = strconv.ParseFloat(str, 32)
if err != nil {
return err
}
case DT_Double:
case DTDouble:
na.Value, err = strconv.ParseFloat(str, 64)
if err != nil {
return err
}
case DT_IVec2, DT_IVec3, DT_IVec4:
case DTIVec2, DTIVec3, DTIVec4:
var (
nums []string
length int
)
nums := strings.Split(str, ".")
length, err := na.GetColumns()
nums = strings.Split(str, ".")
length, err = na.GetColumns()
if err != nil {
return err
}
if length != len(nums) {
return fmt.Errorf("A vector of length %d was expected, got %d", length, len(nums))
return fmt.Errorf("a vector of length %d was expected, got %d", length, len(nums))
}
vec := make([]int, length)
@ -456,14 +460,18 @@ func (na *NodeAttribute) FromString(str string) error {
na.Value = vec
case DT_Vec2, DT_Vec3, DT_Vec4:
nums := strings.Split(str, ".")
length, err := na.GetColumns()
case DTVec2, DTVec3, DTVec4:
var (
nums []string
length int
)
nums = strings.Split(str, ".")
length, err = na.GetColumns()
if err != nil {
return err
}
if length != len(nums) {
return fmt.Errorf("A vector of length %d was expected, got %d", length, len(nums))
return fmt.Errorf("a vector of length %d was expected, got %d", length, len(nums))
}
vec := make([]float64, length)
@ -476,7 +484,7 @@ func (na *NodeAttribute) FromString(str string) error {
na.Value = vec
case DT_Mat2, DT_Mat3, DT_Mat3x4, DT_Mat4x3, DT_Mat4:
case DTMat2, DTMat3, DTMat3x4, DTMat4x3, DTMat4:
// var mat = Matrix.Parse(str);
// if (mat.cols != na.GetColumns() || mat.rows != na.GetRows()){
// return errors.New("Invalid column/row count for matrix");
@ -484,16 +492,16 @@ func (na *NodeAttribute) FromString(str string) error {
// value = mat;
return errors.New("not implemented")
case DT_Bool:
case DTBool:
na.Value, err = strconv.ParseBool(str)
if err != nil {
return err
}
case DT_String, DT_Path, DT_FixedString, DT_LSString, DT_WString, DT_LSWString:
case DTString, DTPath, DTFixedString, DTLSString, DTWString, DTLSWString:
na.Value = str
case DT_TranslatedString:
case DTTranslatedString:
// // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part
// // That can be changed separately via attribute.Value.Handle
// if (value == null)
@ -501,7 +509,7 @@ func (na *NodeAttribute) FromString(str string) error {
// ((TranslatedString)value).Value = str;
case DT_TranslatedFSString:
case DTTranslatedFSString:
// // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part
// // That can be changed separately via attribute.Value.Handle
// if (value == null)
@ -509,28 +517,31 @@ func (na *NodeAttribute) FromString(str string) error {
// ((TranslatedFSString)value).Value = str;
case DT_ULongLong:
case DTULongLong:
na.Value, err = strconv.ParseUint(str, 10, 64)
if err != nil {
return err
}
case DT_ScratchBuffer:
case DTScratchBuffer:
na.Value, err = base64.StdEncoding.DecodeString(str)
if err != nil {
return err
}
case DT_Long, DT_Int64:
case DTLong, DTInt64:
na.Value, err = strconv.ParseInt(str, 10, 64)
if err != nil {
return err
}
case DT_Int8:
case DTInt8:
na.Value, err = strconv.ParseInt(str, 10, 8)
if err != nil {
return err
}
case DT_UUID:
case DTUUID:
na.Value, err = uuid.Parse(str)
if err != nil {
return err
@ -538,7 +549,7 @@ func (na *NodeAttribute) FromString(str string) error {
default:
// This should not happen!
return fmt.Errorf("FromString() not implemented for type %v", na.Type)
return fmt.Errorf("not implemented for type %v", na.Type)
}
return nil
}

View File

@ -8,7 +8,6 @@ import (
"fmt"
"io"
"io/ioutil"
"math"
"github.com/go-kit/kit/log"
"github.com/google/uuid"
@ -63,9 +62,9 @@ func CompressionFlagsToLevel(flags byte) CompressionLevel {
return MaxCompression
default:
panic(errors.New("Invalid compression flags"))
panic(errors.New("invalid compression flags"))
}
return 0
// return 0
}
func MakeCompressionFlags(method CompressionMethod, level CompressionLevel) int {
@ -108,22 +107,21 @@ func Decompress(compressed io.Reader, uncompressedSize int, compressionFlags byt
panic(err)
}
return bytes.NewReader(p)
} else {
// logger.Println("lz4 block compressed")
// panic(errors.New("not implemented"))
src, _ := ioutil.ReadAll(compressed)
// logger.Println(len(src))
dst := make([]byte, uncompressedSize*2)
_, err := lz4.UncompressBlock(src, dst)
if err != nil {
panic(err)
}
return bytes.NewReader(dst)
}
// logger.Println("lz4 block compressed")
// panic(errors.New("not implemented"))
src, _ := ioutil.ReadAll(compressed)
// logger.Println(len(src))
dst := make([]byte, uncompressedSize*2)
_, err := lz4.UncompressBlock(src, dst)
if err != nil {
panic(err)
}
return bytes.NewReader(dst)
default:
panic(fmt.Errorf("No decompressor found for this format: %v", compressionFlags))
panic(fmt.Errorf("no decompressor found for this format: %v", compressionFlags))
}
}
@ -142,26 +140,6 @@ func ReadCString(r io.Reader, length int) (string, error) {
return string(buf[:clen(buf)]), nil
}
func roundFloat(x float64, prec int) float64 {
var rounder float64
pow := math.Pow(10, float64(prec))
intermed := x * pow
_, frac := math.Modf(intermed)
intermed += .5
x = .5
if frac < 0.0 {
x = -.5
intermed -= 1
}
if frac >= x {
rounder = math.Ceil(intermed)
} else {
rounder = math.Floor(intermed)
}
return rounder / pow
}
func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log.Logger) (NodeAttribute, error) {
var (
attr = NodeAttribute{
@ -176,91 +154,83 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
pos, err = r.Seek(0, io.SeekCurrent)
switch DT {
case DT_None:
case DTNone:
l.Log("member", name, "read", length, "start position", pos, "value", nil)
pos += int64(length)
return attr, nil
case DT_Byte:
case DTByte:
p := make([]byte, 1)
n, err = r.Read(p)
attr.Value = p[0]
l.Log("member", name, "read", n, "start position", pos, "value", attr.Value)
pos += int64(n)
return attr, err
case DT_Short:
case DTShort:
var v int16
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_UShort:
case DTUShort:
var v uint16
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_Int:
case DTInt:
var v int32
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_UInt:
case DTUInt:
var v uint32
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_Float:
case DTFloat:
var v float32
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_Double:
case DTDouble:
var v float64
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_IVec2, DT_IVec3, DT_IVec4:
case DTIVec2, DTIVec3, DTIVec4:
var col int
col, err = attr.GetColumns()
if err != nil {
return attr, err
}
vec := make(Ivec, col)
for i, _ := range vec {
for i := range vec {
var v int32
err = binary.Read(r, binary.LittleEndian, &v)
if err != nil {
@ -271,18 +241,17 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = vec
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, nil
case DT_Vec2, DT_Vec3, DT_Vec4:
case DTVec2, DTVec3, DTVec4:
var col int
col, err = attr.GetColumns()
if err != nil {
return attr, err
}
vec := make(Vec, col)
for i, _ := range vec {
for i := range vec {
var v float32
err = binary.Read(r, binary.LittleEndian, &v)
if err != nil {
@ -293,11 +262,10 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = vec
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, nil
case DT_Mat2, DT_Mat3, DT_Mat3x4, DT_Mat4x3, DT_Mat4:
case DTMat2, DTMat3, DTMat3x4, DTMat4x3, DTMat4:
var (
row int
col int
@ -325,54 +293,52 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = (*Mat)(mat.NewDense(row, col, []float64(vec)))
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, nil
case DT_Bool:
case DTBool:
var v bool
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_ULongLong:
case DTULongLong:
var v uint64
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_Long, DT_Int64:
case DTLong, DTInt64:
var v int64
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_Int8:
case DTInt8:
var v int8
err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err
case DT_UUID:
case DTUUID:
var v uuid.UUID
p := make([]byte, 16)
n, err = r.Read(p)
if err != nil {
return attr, err
}
reverse(p[:4])
reverse(p[4:6])
reverse(p[6:8])
@ -380,7 +346,6 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = v
l.Log("member", name, "read", n, "start position", pos, "value", attr.Value)
pos += int64(n)
return attr, err
@ -388,18 +353,18 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
// Strings are serialized differently for each file format and should be
// handled by the format-specific ReadAttribute()
// pretty.Log(attr)
return attr, fmt.Errorf("ReadAttribute() not implemented for type %v", DT)
return attr, fmt.Errorf("readAttribute() not implemented for type %v", DT)
}
return attr, nil
// return attr, nil
}
// LimitReader returns a Reader that reads from r
// LimitReadSeeker returns a Reader that reads from r
// but stops with EOF after n bytes.
// The underlying implementation is a *LimitedReader.
func LimitReadSeeker(r io.ReadSeeker, n int64) io.ReadSeeker { return &LimitedReadSeeker{r, n} }
// A LimitedReader reads from R but limits the amount of
// A LimitedReadSeeker reads from R but limits the amount of
// data returned to just N bytes. Each call to Read
// updates N to reflect the new amount remaining.
// Read returns EOF when N <= 0 or when the underlying R returns EOF.

View File

@ -41,14 +41,17 @@ func main() {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
if !fi.IsDir() {
switch {
case !fi.IsDir():
err = openLSF(v)
if err != nil && !errors.As(err, &lslib.HeaderError{}) {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
} else if *recurse {
filepath.Walk(v, func(path string, info os.FileInfo, err error) error {
case *recurse:
_ = filepath.Walk(v, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil
}
@ -64,7 +67,8 @@ func main() {
}
return nil
})
} else {
default:
fmt.Fprintf(os.Stderr, "lsconvert: %s: Is a directory\n", v)
os.Exit(1)
}
@ -75,11 +79,14 @@ func openLSF(filename string) error {
l *lslib.Resource
err error
n string
f strwr
f interface {
io.Writer
io.StringWriter
}
)
l, err = readLSF(filename)
if err != nil {
return fmt.Errorf("Reading LSF file %s failed: %w\n", filename, err)
return fmt.Errorf("reading LSF file %s failed: %w", filename, err)
}
if *printResource {
pretty.Log(l)
@ -87,13 +94,13 @@ func openLSF(filename string) error {
if *printXML || *write {
n, err = marshalXML(l)
if err != nil {
return fmt.Errorf("Creating XML from LSF file %s failed: %w\n", filename, err)
return fmt.Errorf("creating XML from LSF file %s failed: %w", filename, err)
}
if *write {
f, err = os.OpenFile(filename, os.O_TRUNC|os.O_RDWR, 0o666)
if err != nil {
return fmt.Errorf("Writing XML from LSF file %s failed: %w\n", filename, err)
return fmt.Errorf("writing XML from LSF file %s failed: %w", filename, err)
}
} else if *printXML {
f = os.Stdout
@ -102,7 +109,7 @@ func openLSF(filename string) error {
err = writeXML(f, n)
fmt.Fprint(f, "\n")
if err != nil {
return fmt.Errorf("Writing XML from LSF file %s failed: %w\n", filename, err)
return fmt.Errorf("writing XML from LSF file %s failed: %w", filename, err)
}
}
return nil
@ -115,10 +122,10 @@ func readLSF(filename string) (*lslib.Resource, error) {
err error
)
f, err = os.Open(filename)
defer f.Close()
if err != nil {
return nil, err
}
defer f.Close()
l, err = lslib.ReadLSF(f)
if err != nil {
@ -149,12 +156,7 @@ func marshalXML(l *lslib.Resource) (string, error) {
return n, nil
}
type strwr interface {
io.Writer
io.StringWriter
}
func writeXML(f strwr, n string) error {
func writeXML(f io.StringWriter, n string) error {
var (
err error
)

View File

@ -5,29 +5,19 @@ import "errors"
type FileVersion uint32
const (
/// <summary>
/// Initial version of the LSF format
/// </summary>
// Initial version of the LSF format
VerInitial FileVersion = iota + 1
/// <summary>
/// LSF version that added chunked compression for substreams
/// </summary>
// LSF version that added chunked compression for substreams
VerChunkedCompress
/// <summary>
/// LSF version that extended the node descriptors
/// </summary>
// LSF version that extended the node descriptors
VerExtendedNodes
/// <summary>
/// BG3 version, no changes found so far apart from version numbering
/// </summary>
// BG3 version, no changes found so far apart from version numbering
VerBG3
/// <summary>
/// Latest version supported by this library
/// </summary>
// Latest version supported by this library
MaxVersion = iota
)

64
lsb.go
View File

@ -21,13 +21,13 @@ type LSBHeader struct {
}
type LSBRegion struct {
name string
offset
name string
offset uint32
}
type IdentifierDictionary map[int]string
func ReadLSBDictionary(r io.Reader, endianness binary.ByteOrder) (IdentifierDictionary, error) {
func ReadLSBDictionary(r io.ReadSeeker, endianness binary.ByteOrder) (IdentifierDictionary, error) {
var (
dict IdentifierDictionary
size uint32
@ -56,11 +56,12 @@ func ReadLSBDictionary(r io.Reader, endianness binary.ByteOrder) (IdentifierDict
if err != nil {
return dict, err
}
dict[key] = str
dict[int(key)] = str
}
return dict, nil
}
func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteOrder) (Resource, error) {
func ReadLSBRegions(r io.ReadSeeker, d IdentifierDictionary, endianness binary.ByteOrder) (Resource, error) {
var (
nodes []struct {
node *Node
@ -72,9 +73,12 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
err = binary.Read(r, endianness, &nodeCount)
if err != nil {
return dict, err
return Resource{}, err
}
nodes = make([]struct{ Node, offset uint32 }, nodeCount)
nodes = make([]struct {
node *Node
offset uint32
}, nodeCount)
for _, n := range nodes {
var (
key uint32
@ -82,7 +86,7 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
)
err = binary.Read(r, endianness, &key)
if err != nil {
return dict, err
return Resource{}, err
}
n.node = new(Node)
if n.node.Name, ok = d[int(key)]; !ok {
@ -90,21 +94,21 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
}
err = binary.Read(r, endianness, &n.offset)
if err != nil {
return dict, err
return Resource{}, err
}
}
// TODO: Sort
for _, n := range nodes {
var (
key uint32
attrCount uint32
childCount uint32
key uint32
attrCount uint32
// childCount uint32
)
// TODO: Check offset
err = binary.Read(r, endianness, &key)
if err != nil {
return dict, err
return Resource{}, err
}
// if keyV, ok := d[int(key)]; !ok {
// return Resource{}, ErrKeyDoesNotMatch
@ -112,36 +116,39 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
err = binary.Read(r, endianness, &attrCount)
if err != nil {
return dict, err
return Resource{}, err
}
n.node.Attributes = make([]NodeAttribute, int(attrCount))
err = binary.Read(r, endianness, &nodeCount)
if err != nil {
return dict, err
return Resource{}, err
}
}
return Resource{}, nil
}
func readLSBAttribute(r io.Reader) (NodeAttribute, err) {
func readLSBAttribute(r io.ReadSeeker, d IdentifierDictionary, endianness binary.ByteOrder) (NodeAttribute, error) {
var (
key uint32
name string
attrType uint32
attr NodeAttribute
err error
ok bool
)
err = binary.Read(r, endianness, &key)
if err != nil {
return dict, err
return attr, err
}
if name, ok = d[int(key)]; !ok {
return Resource{}, ErrInvalidNameKey
return attr, ErrInvalidNameKey
}
err = binary.Read(r, endianness, &attrType)
if err != nil {
return dict, err
return attr, err
}
ReadLSBAttribute(r, name, DataType(attrType))
return ReadLSBAttr(r, name, DataType(attrType))
}
func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, error) {
@ -153,7 +160,8 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
Type: DT,
Name: name,
}
err error
err error
length uint32
l log.Logger
pos int64
@ -162,7 +170,7 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
pos, err = r.Seek(0, io.SeekCurrent)
switch DT {
case DT_String, DT_Path, DT_FixedString, DT_LSString, DT_WString, DT_LSWString:
case DTString, DTPath, DTFixedString, DTLSString, DTWString, DTLSWString:
var v string
v, err = ReadCString(r, int(length))
attr.Value = v
@ -172,9 +180,9 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err
case DT_TranslatedString:
case DTTranslatedString:
var v TranslatedString
v, err = ReadTranslatedString(r, Version, EngineVersion)
// v, err = ReadTranslatedString(r, Version, EngineVersion)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -182,9 +190,9 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err
case DT_TranslatedFSString:
case DTTranslatedFSString:
var v TranslatedFSString
v, err = ReadTranslatedFSString(r, Version)
// v, err = ReadTranslatedFSString(r, Version)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -192,7 +200,7 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err
case DT_ScratchBuffer:
case DTScratchBuffer:
v := make([]byte, length)
_, err = r.Read(v)
@ -204,6 +212,6 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err
default:
return ReadAttribute(r, name, DT, length, l)
return ReadAttribute(r, name, DT, uint(length), l)
}
}

332
lsf.go
View File

@ -1,7 +1,6 @@
package lslib
import (
"bufio"
"encoding/binary"
"fmt"
"io"
@ -52,64 +51,48 @@ func (f filter) Log(keyvals ...interface{}) error {
}
type LSFHeader struct {
/// summary
/// LSOF file signature
/// /summary
// LSOF file signature
Signature [4]byte
/// summary
/// Version of the LSOF file D:OS EE is version 1/2, D:OS 2 is version 3
/// /summary
// Version of the LSOF file D:OS EE is version 1/2, D:OS 2 is version 3
Version FileVersion
/// summary
/// Possibly version number? (major, minor, rev, build)
/// /summary
// Possibly version number? (major, minor, rev, build)
EngineVersion uint32
/// summary
/// Total uncompressed size of the string hash table
/// /summary
// Total uncompressed size of the string hash table
StringsUncompressedSize uint32
/// summary
/// Compressed size of the string hash table
/// /summary
// Compressed size of the string hash table
StringsSizeOnDisk uint32
/// summary
/// Total uncompressed size of the node list
/// /summary
// Total uncompressed size of the node list
NodesUncompressedSize uint32
/// summary
/// Compressed size of the node list
/// /summary
// Compressed size of the node list
NodesSizeOnDisk uint32
/// summary
/// Total uncompressed size of the attribute list
/// /summary
// Total uncompressed size of the attribute list
AttributesUncompressedSize uint32
/// summary
/// Compressed size of the attribute list
/// /summary
// Compressed size of the attribute list
AttributesSizeOnDisk uint32
/// summary
/// Total uncompressed size of the raw value buffer
/// /summary
// Total uncompressed size of the raw value buffer
ValuesUncompressedSize uint32
/// summary
/// Compressed size of the raw value buffer
/// /summary
// Compressed size of the raw value buffer
ValuesSizeOnDisk uint32
/// summary
/// Compression method and level used for the string, node, attribute and value buffers.
/// Uses the same format as packages (see BinUtils.MakeCompressionFlags)
/// /summary
// summary
// Uses the same format as packages (see BinUtils.MakeCompressionFlags)
CompressionFlags byte
/// summary
/// Possibly unused, always 0
/// /summary
// Possibly unused, always 0
Unknown2 byte
Unknown3 uint16
/// summary
/// Extended node/attribute format indicator, 0 for V2, 0/1 for V3
/// /summary
// Extended node/attribute format indicator, 0 for V2, 0/1 for V3
Extended uint32
}
@ -256,28 +239,24 @@ func (lsfh LSFHeader) IsCompressed() bool {
type NodeEntry struct {
Long bool
/// summary
/// Name of this node
/// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain)
/// /summary
// summary
// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain)
NameHashTableIndex uint32
/// summary
/// Index of the first attribute of this node
/// (-1: node has no attributes)
/// /summary
// summary
// (-1: node has no attributes)
FirstAttributeIndex int32
/// summary
/// Index of the parent node
/// (-1: this node is a root region)
/// /summary
// summary
// (-1: this node is a root region)
ParentIndex int32
/// summary
/// Index of the next sibling of this node
/// (-1: this is the last node)
/// /summary
// summary
// (-1: this is the last node)
NextSiblingIndex int32
}
@ -378,65 +357,49 @@ func (ne NodeEntry) NameOffset() int {
return int(ne.NameHashTableIndex & 0xffff)
}
/// summary
/// Processed node information for a node in the LSF file
/// /summary
// Processed node information for a node in the LSF file
type NodeInfo struct {
/// summary
/// Index of the parent node
/// (-1: this node is a root region)
/// /summary
// summary
// (-1: this node is a root region)
ParentIndex int
/// summary
/// Index into name hash table
/// /summary
// Index into name hash table
NameIndex int
/// summary
/// Offset in hash chain
/// /summary
// Offset in hash chain
NameOffset int
/// summary
/// Index of the first attribute of this node
/// (-1: node has no attributes)
/// /summary
// summary
// (-1: node has no attributes)
FirstAttributeIndex int
}
/// summary
/// attribute extension in the LSF file
/// /summary
// attribute extension in the LSF file
type AttributeEntry struct {
Long bool
/// summary
/// Name of this attribute
/// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain)
/// /summary
// summary
// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain)
NameHashTableIndex uint32
/// summary
/// 6-bit LSB: Type of this attribute (see NodeAttribute.DataType)
/// 26-bit MSB: Length of this attribute
/// /summary
// summary
// 26-bit MSB: Length of this attribute
TypeAndLength uint32
/// summary
/// Index of the node that this attribute belongs to
/// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list
/// /summary
// summary
// Note: These indexes are assigned seemingly arbitrarily, and are not necessarily indices into the node list
NodeIndex int32
/// summary
/// Index of the node that this attribute belongs to
/// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list
/// /summary
// summary
// Note: These indexes are assigned seemingly arbitrarily, and are not necessarily indices into the node list
NextAttributeIndex int32
/// <summary>
/// Absolute position of attribute value in the value stream
/// </summary>
// Absolute position of attribute value in the value stream
Offset uint32
}
@ -529,30 +492,22 @@ func (ae *AttributeEntry) readLong(r io.ReadSeeker) error {
return nil
}
/// summary
/// Index into name hash table
/// /summary
// Index into name hash table
func (ae AttributeEntry) NameIndex() int {
return int(ae.NameHashTableIndex >> 16)
}
/// summary
/// Offset in hash chain
/// /summary
// Offset in hash chain
func (ae AttributeEntry) NameOffset() int {
return int(ae.NameHashTableIndex & 0xffff)
}
/// summary
/// Type of this attribute (see NodeAttribute.DataType)
/// /summary
// Type of this attribute (see NodeAttribute.DataType)
func (ae AttributeEntry) TypeID() DataType {
return DataType(ae.TypeAndLength & 0x3f)
}
/// summary
/// Length of this attribute
/// /summary
// Length of this attribute
func (ae AttributeEntry) Len() int {
return int(ae.TypeAndLength >> 6)
}
@ -560,35 +515,24 @@ func (ae AttributeEntry) Len() int {
type AttributeInfo struct {
V2 bool
/// summary
/// Index into name hash table
/// /summary
// Index into name hash table
NameIndex int
/// summary
/// Offset in hash chain
/// /summary
NameOffset int
/// summary
/// Type of this attribute (see NodeAttribute.DataType)
/// /summary
TypeId DataType
/// summary
/// Length of this attribute
/// /summary
Length uint
/// summary
/// Absolute position of attribute data in the values section
/// /summary
DataOffset uint
/// summary
/// Index of the next attribute in this node
/// (-1: this is the last attribute)
/// /summary
NextAttributeIndex int
}
type LSFReader struct {
data *bufio.Reader
// Offset in hash chain
NameOffset int
// Type of this attribute (see NodeAttribute.DataType)
TypeID DataType
// Length of this attribute
Length uint
// Absolute position of attribute data in the values section
DataOffset uint
// summary
// (-1: this is the last attribute)
NextAttributeIndex int
}
// extract to lsf package
@ -614,7 +558,7 @@ func ReadNames(r io.ReadSeeker) ([][]string, error) {
pos += int64(n)
names = make([][]string, int(numHashEntries))
for i, _ := range names {
for i := range names {
var numStrings uint16
@ -624,7 +568,7 @@ func ReadNames(r io.ReadSeeker) ([][]string, error) {
pos += int64(n)
var hash = make([]string, int(numStrings))
for x, _ := range hash {
for x := range hash {
var (
nameLen uint16
name []byte
@ -686,10 +630,8 @@ func readNodeInfo(r io.ReadSeeker, longNodes bool) ([]NodeInfo, error) {
return nodes[:len(nodes)-1], err
}
/// <summary>
/// Reads the attribute headers for the LSOF resource
/// </summary>
/// <param name="s">Stream to read the attribute headers from</param>
// Reads the attribute headers for the LSOF resource
// <param name="s">Stream to read the attribute headers from</param>
func readAttributeInfo(r io.ReadSeeker, long bool) []AttributeInfo {
// var rawAttributes = new List<AttributeEntryV2>();
@ -717,7 +659,7 @@ func readAttributeInfo(r io.ReadSeeker, long bool) []AttributeInfo {
resolved := AttributeInfo{
NameIndex: attribute.NameIndex(),
NameOffset: attribute.NameOffset(),
TypeId: attribute.TypeID(),
TypeID: attribute.TypeID(),
Length: uint(attribute.Len()),
DataOffset: dataOffset,
NextAttributeIndex: nextAttrIndex,
@ -730,7 +672,7 @@ func readAttributeInfo(r io.ReadSeeker, long bool) []AttributeInfo {
attributes[indexOfLastAttr].NextAttributeIndex = index
}
// set the previous attribute of this node to the current attribute, we are done with it and at the end of the loop
dataOffset += uint(resolved.Length)
dataOffset += resolved.Length
prevAttributeRefs[int(attribute.NodeIndex)] = index
}
@ -775,21 +717,17 @@ func (he HeaderError) Error() string {
func ReadLSF(r io.ReadSeeker) (Resource, error) {
var (
err error
/// summary
/// Static string hash map
/// /summary
// Static string hash map
names [][]string
/// summary
/// Preprocessed list of nodes (structures)
/// /summary
// Preprocessed list of nodes (structures)
nodeInfo []NodeInfo
/// summary
/// Preprocessed list of node attributes
/// /summary
// Preprocessed list of node attributes
attributeInfo []AttributeInfo
/// summary
/// Node instances
/// /summary
// Node instances
nodeInstances []*Node
)
var (
@ -799,7 +737,7 @@ func ReadLSF(r io.ReadSeeker) (Resource, error) {
)
l = log.With(Logger, "component", "LS converter", "file type", "lsf", "part", "file")
pos, err = r.Seek(0, io.SeekCurrent)
l.Log("member", "LSF header", "start position", pos)
l.Log("member", "header", "start position", pos)
hdr := &LSFHeader{}
err = hdr.Read(r)
@ -922,10 +860,10 @@ func ReadLSF(r io.ReadSeeker) (Resource, error) {
}
}
res.Metadata.MajorVersion = (hdr.EngineVersion & 0xf0000000) >> 28
res.Metadata.MinorVersion = (hdr.EngineVersion & 0xf000000) >> 24
res.Metadata.Major = (hdr.EngineVersion & 0xf0000000) >> 28
res.Metadata.Minor = (hdr.EngineVersion & 0xf000000) >> 24
res.Metadata.Revision = (hdr.EngineVersion & 0xff0000) >> 16
res.Metadata.BuildNumber = (hdr.EngineVersion & 0xffff)
res.Metadata.Build = (hdr.EngineVersion & 0xffff)
// pretty.Log(res)
return res, nil
@ -934,11 +872,11 @@ func ReadLSF(r io.ReadSeeker) (Resource, error) {
var valueStart int64
func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attributeInfo []AttributeInfo, Version FileVersion, EngineVersion uint32) ([]*Node, error) {
func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attributeInfo []AttributeInfo, version FileVersion, engineVersion uint32) ([]*Node, error) {
NodeInstances := make([]*Node, 0, len(nodeInfo))
for _, nodeInfo := range nodeInfo {
if nodeInfo.ParentIndex == -1 {
region, err := ReadNode(r, nodeInfo, names, attributeInfo, Version, EngineVersion)
region, err := ReadNode(r, nodeInfo, names, attributeInfo, version, engineVersion)
// pretty.Log(err, region)
@ -949,7 +887,7 @@ func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attribu
return NodeInstances, err
}
} else {
node, err := ReadNode(r, nodeInfo, names, attributeInfo, Version, EngineVersion)
node, err := ReadNode(r, nodeInfo, names, attributeInfo, version, engineVersion)
// pretty.Log(err, node)
@ -965,7 +903,7 @@ func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attribu
return NodeInstances, nil
}
func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []AttributeInfo, Version FileVersion, EngineVersion uint32) (Node, error) {
func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []AttributeInfo, version FileVersion, engineVersion uint32) (Node, error) {
var (
node = Node{}
index = ni.FirstAttributeIndex
@ -993,7 +931,7 @@ func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []At
panic("shit")
}
}
v, err = ReadLSFAttribute(r, names[attribute.NameIndex][attribute.NameOffset], attribute.TypeId, attribute.Length, Version, EngineVersion)
v, err = ReadLSFAttribute(r, names[attribute.NameIndex][attribute.NameOffset], attribute.TypeID, attribute.Length, version, engineVersion)
node.Attributes = append(node.Attributes, v)
if err != nil {
return node, err
@ -1005,13 +943,13 @@ func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []At
return node, nil
}
func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Version FileVersion, EngineVersion uint32) (NodeAttribute, error) {
func ReadLSFAttribute(r io.ReadSeeker, name string, dt DataType, length uint, version FileVersion, engineVersion uint32) (NodeAttribute, error) {
// LSF and LSB serialize the buffer types differently, so specialized
// code is added to the LSB and LSf serializers, and the common code is
// available in BinUtils.ReadAttribute()
var (
attr = NodeAttribute{
Type: DT,
Type: dt,
Name: name,
}
err error
@ -1022,8 +960,8 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
l = log.With(Logger, "component", "LS converter", "file type", "lsf", "part", "attribute")
pos, err = r.Seek(0, io.SeekCurrent)
switch DT {
case DT_String, DT_Path, DT_FixedString, DT_LSString, DT_WString, DT_LSWString:
switch dt {
case DTString, DTPath, DTFixedString, DTLSString, DTWString, DTLSWString:
var v string
v, err = ReadCString(r, int(length))
attr.Value = v
@ -1033,9 +971,9 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err
case DT_TranslatedString:
case DTTranslatedString:
var v TranslatedString
v, err = ReadTranslatedString(r, Version, EngineVersion)
v, err = ReadTranslatedString(r, version, engineVersion)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -1043,9 +981,9 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err
case DT_TranslatedFSString:
case DTTranslatedFSString:
var v TranslatedFSString
v, err = ReadTranslatedFSString(r, Version)
v, err = ReadTranslatedFSString(r, version)
attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -1053,7 +991,7 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err
case DT_ScratchBuffer:
case DTScratchBuffer:
v := make([]byte, length)
_, err = r.Read(v)
@ -1065,21 +1003,37 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err
default:
return ReadAttribute(r, name, DT, length, l)
return ReadAttribute(r, name, dt, length, l)
}
}
func ReadTranslatedString(r io.ReadSeeker, Version FileVersion, EngineVersion uint32) (TranslatedString, error) {
func ReadTranslatedString(r io.ReadSeeker, version FileVersion, engineVersion uint32) (TranslatedString, error) {
var (
str TranslatedString
err error
)
if Version >= VerBG3 || EngineVersion == 0x4000001d {
if version >= VerBG3 || engineVersion == 0x4000001d {
// logger.Println("decoding bg3 data")
var version uint16
/*err =*/ binary.Read(r, binary.LittleEndian, &version)
err = binary.Read(r, binary.LittleEndian, &version)
if err != nil {
return str, err
}
str.Version = version
err = binary.Read(r, binary.LittleEndian, &version)
if err != nil {
return str, err
}
if version == 0 {
str.Value, err = ReadCString(r, int(str.Version))
if err != nil {
return str, err
}
str.Version = 0
} else {
_, err = r.Seek(-2, io.SeekCurrent)
}
} else {
str.Version = 0
@ -1098,7 +1052,6 @@ func ReadTranslatedString(r io.ReadSeeker, Version FileVersion, EngineVersion ui
if err != nil {
return str, err
}
// logger.Printf("value length: %d value: %s read length: %d len of v: %d", vlength, v, n, len(v))
str.Value = string(v)
}
@ -1115,15 +1068,18 @@ func ReadTranslatedString(r io.ReadSeeker, Version FileVersion, EngineVersion ui
return str, nil
}
func ReadTranslatedFSString(r io.ReadSeeker, Version FileVersion) (TranslatedFSString, error) {
func ReadTranslatedFSString(r io.ReadSeeker, version FileVersion) (TranslatedFSString, error) {
var (
str = TranslatedFSString{}
err error
)
if Version >= VerBG3 {
if version >= VerBG3 {
var version uint16
/*err =*/ binary.Read(r, binary.LittleEndian, &version)
err = binary.Read(r, binary.LittleEndian, &version)
if err != nil {
return str, err
}
str.Version = version
} else {
str.Version = 0
@ -1132,8 +1088,10 @@ func ReadTranslatedFSString(r io.ReadSeeker, Version FileVersion) (TranslatedFSS
length int32
)
/*err =*/
binary.Read(r, binary.LittleEndian, &length)
err = binary.Read(r, binary.LittleEndian, &length)
if err != nil {
return str, err
}
str.Value, err = ReadCString(r, int(length))
if err != nil {
return str, err
@ -1169,7 +1127,7 @@ func ReadTranslatedFSString(r io.ReadSeeker, Version FileVersion) (TranslatedFSS
return str, err
}
arg.String, err = ReadTranslatedFSString(r, Version)
arg.String, err = ReadTranslatedFSString(r, version)
if err != nil {
return str, err
}

View File

@ -8,11 +8,11 @@ import (
type LSMetadata struct {
//public const uint CurrentMajorVersion = 33;
Timestamp uint64 `xml:"-"`
MajorVersion uint32 `xml:"major,attr"`
MinorVersion uint32 `xml:"minor,attr"`
Revision uint32 `xml:"revision,attr"`
BuildNumber uint32 `xml:"build,attr"`
Timestamp uint64 `xml:"-"`
Major uint32 `xml:"major,attr"`
Minor uint32 `xml:"minor,attr"`
Revision uint32 `xml:"revision,attr"`
Build uint32 `xml:"build,attr"`
}
type format struct {