Jump to content

Helldivers 2 Model Extraction Help!


Skelethor

Recommended Posts

7 minutes ago, Unordinal said:

I haven't actively done any reversing for Helldivers 2 but I've done some for Vermintide 2 which uses the same base engine. Here's a hash dictionary of the known extensions for that game. I can see that at least the first file type you have there matches the `.bones` file type.
 

  Hide contents

ac2b738a374cf583 actor
931e336d7646cc26 animation
dcfb9e18fff13984 animation_curves
44957ef5a3a368e4 animation_set
3eed05ba83af5090 apb
7ffdb779b04e4ed1 baked_lighting
aa5965f03029fa18 bik
e301e8af94e3b5a3 blend_set
18dead01056b72e9 bones
b7893adf7567506a chroma
f7b361bde8b4dd03 common
fe9754bd19814a47 common_package
0001a6d201e87086 compiled_shader
82645835e6b73232 config
69108ded1e3e634b crypto
8fd0d44d20650b68 data
64298613551f58d4 dds
ed74b77c6d447d84 disabled
9831ca893b0d087d entity
92d3ee038eeb610d flow
6a00deed7d3b235a flow_editor
9efe0a916aae7880 font
d526a27da14f1dc5 ini
fa4a8e091a91201e ivf
a62f9297dc969e85 keys
2a690fd348fe9ac5 level
a14e8dfa2cd117e2 lua
eac0b497876adedf material
48ff313713a997a1 mesh
3fcdd69156a46417 mod
b277b11fe4a61d37 mouse_cursor
169de9566953d264 navdata
3b1fa9e8f6bac374 network_config
ad9c6d9ed1e5e77a package
a8193123526fad64 particles
5f7203c8f280dab8 physics
22bd082459e868d8 physics_data
bf21403a3ab0bbb1 physics_properties
46cab355bd8d4d37 prototype
27862fe24795319c render_config
d37e11a77baafc01 resource
9d0a795bfe818d19 scene
cce8d5b5f5ae333f shader
e5ee32a477239a93 shader_library
9e5c3cc74575aeb5 shader_library_group
fe73c7dcff8a7ca5 shading_environment
250e0a11ac8e26f8 shading_environment_mapping
a27b4d04a9ba6f9e slug
e9fc9ea7042e5ec0 slug_album
90641b51c98b7aac sound
d8b27864a97ffdd7 sound_environment
f97af9983c05b950 spu_job
a486d4045106165c state_machine
bb0279e548747a0a statemachine_editor
0d972bab10b40fd3 strings
ad2d3fa30d9ab394 surface_properties
cd4238c6a0c69e32 texture
91f98cecac930de8 timestamp
94616b1760e1c463 timpani
99736be1fff739a4 timpani_bank
00a3e6c59a2b9c6c timpani_master
19c792357c99f49b tome
e0a48d0be9a7453f unit
8c074c8f95e7841b user_settings
f7505933166d6755 vector_field
6ad9817aa72d9533 volume
786f65c00a816b19 wav
535a7bd3e650d799 wwise_bank
af32095c82f2b070 wwise_dep
d50a8b7e1c82b110 wwise_metadata
504b55235d21440e wwise_stream
76015845a6003765 xml
712d6e3dd1024c9c ugg
a99510c6e86dd3c2 upb
2bbcabe5074ade9e input
52014e20a7fdccef xp
7b82d55316f98c9b vista
de8c9d00247f8fc6 w7
0605775c0481181b w8
293bb3b1982c6d5a main
b6b3099f78886ff9 win32
d596cd750e4e279f zD

 

Thank you!

I will just have to figure out how to get that hash dictonary into 010,

Link to comment
Share on other sites

5 hours ago, h3x3r said:

Anyone familiar with IDA code. Here is that hash function.

  Reveal hidden contents
  {
    .maxstack 8
    ldarg.0
    call     instance void [mscorlib]System.Object::.ctor()
    ldarg.0
    newobj   instance void class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string>::.ctor()
    stfld    class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string> Stingray.Console.ReverseIdLookup::_map
    ldarg.0
    ldfld    class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string> Stingray.Console.ReverseIdLookup::_map
    ldc.i4.0
    conv.i8
    ldstr    asc_1429A                  // ""
    callvirt instance void class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string>::set_Item(var<u1>, !!T0)
    ldarg.0
    ldarg.1
    call     instance void Stingray.Console.ReverseIdLookup::ReadFile(string path)
    ret
  }

  .method public hidebysig instance string Lookup(unsigned int64 id)
  {
    .maxstack 8
    ldarg.0
    ldfld    class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string> Stingray.Console.ReverseIdLookup::_map
    ldarg.1
    callvirt instance var<u1> class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string>::get_Item(void)
    ret
  }

  .method public hidebysig instance string LookupOrHex(unsigned int64 id)
                                        // CODE XREF: Stingray.Console.LoadingProfiler__DrawScope+57↑p
                                        // Stingray.Console.LoadingProfiler__DrawUnfinishedScope+3A↑p ...
  {
    .maxstack 8
    ldarg.0
    ldfld    class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string> Stingray.Console.ReverseIdLookup::_map
    ldarg.1
    callvirt instance bool class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string>::ContainsKey(var<u1>)
    brfalse.s loc_A5BB
    ldarg.0
    ldfld    class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string> Stingray.Console.ReverseIdLookup::_map
    ldarg.1
    callvirt instance var<u1> class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string>::get_Item(void)
    ret

loc_A5BB:                               // CODE XREF: Stingray.Console.ReverseIdLookup__LookupOrHex+C↑j
    ldarga.s 1
    ldstr    aX                         // "X"
    call     instance string [mscorlib]System.UInt64::ToString(string)
    ret
  }

  .method private hidebysig instance void ReadFile(string path)
                                        // CODE XREF: Stingray.Console.ReverseIdLookup__.ctor+25↑p
  {
    .maxstack 5
    .locals init (unsigned int8[] V0,
                  int32 V1,
                  int32 V2,
                  string V3,
                  unsigned int8[] V4)
    ldarg.1
    call     bool [mscorlib]System.IO.File::Exists(string)
    brtrue.s loc_A5D9
    ret

loc_A5D9:                               // CODE XREF: Stingray.Console.ReverseIdLookup__ReadFile+6↑j
    ldarg.1
    ldc.i4.3
    call     unsigned int8[] [common]Stingray.FileUtils::ReadAllBytes(string, valuetype [mscorlib]System.IO.FileShare)
    stloc.0
    ldc.i4.0
    stloc.1
    ldc.i4.0
    stloc.2
    br.s     loc_A626

loc_A5E7:                               // CODE XREF: Stingray.Console.ReverseIdLookup__ReadFile+5A↓j
    ldloc.0
    ldloc.2
    ldelem.u1
    brtrue.s loc_A622
    call     class [mscorlib]System.Text.Encoding [mscorlib]System.Text.Encoding::get_UTF8()
    ldloc.0
    ldloc.1
    ldloc.2
    ldloc.1
    sub
    callvirt instance string [mscorlib]System.Text.Encoding::GetString(unsigned int8[], int32, int32)
    stloc.3
    call     class [mscorlib]System.Text.Encoding [mscorlib]System.Text.Encoding::get_UTF8()
    ldloc.3
    callvirt instance unsigned int8[] [mscorlib]System.Text.Encoding::GetBytes(string)
    stloc.s  4
    ldarg.0
    ldfld    class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string> Stingray.Console.ReverseIdLookup::_map
    ldarg.0
    ldloc.s  4
    ldc.i4.0
    call     instance unsigned int64 Stingray.Console.ReverseIdLookup::MurmurHash64(unsigned int8[] data, unsigned int32 seed)
    ldloc.3
    callvirt instance void class [mscorlib]System.Collections.Generic.Dictionary`2<unsigned int64, string>::set_Item(var<u1>, !!T0)
    ldloc.2
    ldc.i4.1
    add
    stloc.1

loc_A622:                               // CODE XREF: Stingray.Console.ReverseIdLookup__ReadFile+1A↑j
    ldloc.2
    ldc.i4.1
    add
    stloc.2

loc_A626:                               // CODE XREF: Stingray.Console.ReverseIdLookup__ReadFile+15↑j
    ldloc.2
    ldloc.0
    ldlen
    conv.i4
    blt.s    loc_A5E7
    ret
  }

  .method private hidebysig instance unsigned int64 MurmurHash64(unsigned int8[] data, unsigned int32 seed)
                                        // CODE XREF: Stingray.Console.ReverseIdLookup__ReadFile+43↑p
  {
    .maxstack 4
    .locals init (int32 V0,
                  unsigned int64 V1,
                  int32 V2,
                  int32 V3,
                  unsigned int8& pinned V4,
                  unsigned int64* V5,
                  unsigned int64 V6,
                  unsigned int8& pinned V7)
    ldarg.1
    ldlen
    conv.i4
    stloc.0
    ldarg.2
    conv.u8
    ldloc.0
    conv.i8
    ldc.i8   0xC6A4A7935BD1E995
    mul
    xor
    stloc.1
    ldloc.0
    ldc.i4.8
    rem
    stloc.2
    ldloc.0
    ldc.i4.8
    div
    stloc.3
    ldloc.3
    ldc.i4.0
    ble.s    loc_A6AD
    ldarg.1
    ldc.i4.0
    ldelema  [mscorlib]System.Byte
    stloc.s  4
    ldloc.s  4
    conv.i
    stloc.s  5
    br.s     loc_A6A6

loc_A660:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+77↓j
    ldloc.s  5
    ldind.i8
    stloc.s  6
    ldloc.s  6
    ldc.i8   0xC6A4A7935BD1E995
    mul
    stloc.s  6
    ldloc.s  6
    ldloc.s  6
    ldc.i4.s 0x2F
    shr.un
    xor
    stloc.s  6
    ldloc.s  6
    ldc.i8   0xC6A4A7935BD1E995
    mul
    stloc.s  6
    ldloc.1
    ldloc.s  6
    xor
    stloc.1
    ldloc.1
    ldc.i8   0xC6A4A7935BD1E995
    mul
    stloc.1
    ldloc.3
    ldc.i4.1
    sub
    stloc.3
    ldloc.s  5
    ldc.i4.8
    add
    stloc.s  5

loc_A6A6:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+2E↑j
    ldloc.3
    brtrue.s loc_A660
    ldc.i4.0
    conv.u
    stloc.s  4

loc_A6AD:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+1E↑j
    ldloc.2
    ldc.i4.0
    ble      loc_A74C
    ldarg.1
    ldc.i4.8
    ldloc.0
    ldc.i4.8
    div
    mul
    ldelema  [mscorlib]System.Byte
    stloc.s  7
    ldloc.2
    ldc.i4.1
    sub
    switch   loc_A734, loc_A728, loc_A71B, loc_A70E, loc_A701, loc_A6F4, loc_A6E7
    br.s     loc_A748

loc_A6E7:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+94↑j
    ldloc.1
    ldloc.s  7
    conv.i
    ldc.i4.6
    add
    ldind.u1
    conv.u8
    ldc.i4.s 0x30
    shl
    xor
    stloc.1

loc_A6F4:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+94↑j
    ldloc.1
    ldloc.s  7
    conv.i
    ldc.i4.5
    add
    ldind.u1
    conv.u8
    ldc.i4.s 0x28
    shl
    xor
    stloc.1

loc_A701:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+94↑j
    ldloc.1
    ldloc.s  7
    conv.i
    ldc.i4.4
    add
    ldind.u1
    conv.u8
    ldc.i4.s 0x20
    shl
    xor
    stloc.1

loc_A70E:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+94↑j
    ldloc.1
    ldloc.s  7
    conv.i
    ldc.i4.3
    add
    ldind.u1
    conv.u8
    ldc.i4.s 0x18
    shl
    xor
    stloc.1

loc_A71B:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+94↑j
    ldloc.1
    ldloc.s  7
    conv.i
    ldc.i4.2
    add
    ldind.u1
    conv.u8
    ldc.i4.s 0x10
    shl
    xor
    stloc.1

loc_A728:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+94↑j
    ldloc.1
    ldloc.s  7
    conv.i
    ldc.i4.1
    add
    ldind.u1
    conv.u8
    ldc.i4.8
    shl
    xor
    stloc.1

loc_A734:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+94↑j
    ldloc.1
    ldloc.s  7
    conv.i
    ldind.u1
    conv.u8
    xor
    stloc.1
    ldloc.1
    ldc.i8   0xC6A4A7935BD1E995
    mul
    stloc.1

loc_A748:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+B5↑j
    ldc.i4.0
    conv.u
    stloc.s  7

loc_A74C:                               // CODE XREF: Stingray.Console.ReverseIdLookup__MurmurHash64+7F↑j
    ldloc.1
    ldloc.1
    ldc.i4.s 0x2F
    shr.un
    xor
    stloc.1
    ldloc.1
    ldc.i8   0xC6A4A7935BD1E995
    mul
    stloc.1
    ldloc.1
    ldloc.1
    ldc.i4.s 0x2F
    shr.un
    xor
    stloc.1
    ldloc.1
    ret
  }
}
}

 

 

Why are you loading this in IDA? this looks suspiciously like .Net IL Code especially from that `mscorlib` refrence

Link to comment
Share on other sites

12 hours ago, orion42m said:

How would I go about running .bt files with 010 Editor?

In the menu bar you can select Templates -> Run Template, then select the matching .bt file for the file you want to inspect.

11 hours ago, Unordinal said:

Here's a hash dictionary of the known extensions for that game. I can see that at least the first file type you have there matches the `.bones` file type.

Thanks, with this I was able to discover more about the files themselves, and got my exporter to export everything.

8 hours ago, Myrkur said:

anyone else getting some rather "cursed" imports of the meshes?

Those are LoDs. It's unclear if/how we can tell which is a LoD for what so far.


I've modified Hellextractor quite a bit, and it now dumps all the files from the /data directory directly, instead of needing to go through QuickBMS. No more random 8 bytes at the start of random files, which was super annoying to deal with. It exported the entire content of the game, 17.0 GiB, in just under 50 seconds - much faster than QuickBMS had any chance to be.

Right now it just dumps the contents of meta, stream and gpu in a sequence, so more handling is necessary to work fully. But with this I've been able to somewhat piece together that each of the file types has some sort of special header at the start, which seems to tell us something about the file itself. For example, this is what i figured out about the TrueType Font content header:

// Always the same as the files own name hash.
uint64_t my_own_name_hash; 
// If no alternative font file:
// 00 00 00 00 00 00 80 3F
// If alternative font file:
// 87 88 08 C0 8B 88 88 3F
uint32_t __unk; 
uint32_t __unk;
uint32_t __unk;
uint32_t __unk;
uint64_t different_font_name_hash;

do {
  uint32_t count;
  uint32_t type;
  ##type## entries[count];
} while (count != 0)


// Shortest header found, file is basically empty.
C6 3C 13 E2 58 3D 1B 65 00 00 00 00 00 00 80 3F
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
00 00 00 00 28 00 00 00

// The average header.
C0 E7 1D 12 3C 53 27 53 87 88 08 C0 8B 88 88 3F
3F 00 00 00 00 00 00 00 CC E8 77 49 D4 BD 15 22
01 00 00 00 28 00 00 00 00 40 5B 44 00 80 BA C3
00 40 9C 44 40 00 00 00 00 00 00 00 00 00 00 00

// The longest header encountered so far.
DD 17 83 34 E5 2C 8E 81 87 88 08 C0 8B 88 88 3F
3F 00 00 00 00 00 00 00 69 FB CF 7A 07 EB 33 C0
05 00 00 00 28 00 00 00 00 40 5B 44 00 80 BA C3
00 40 9C 44 80 00 00 00 00 40 5F 44 00 00 FE C2
00 00 FF 44 D0 F2 00 00 00 C0 71 44 00 00 7D C3
00 80 98 44 E0 3C 2C 00 00 80 6D 44 00 00 75 C3
00 60 95 44 70 20 93 00 00 00 5C 44 00 00 F0 C2
00 00 FA 44 50 76 CA 00 00 00 00 00 00 00 00 00

I do not yet understand what all of this means, but it's successfully allowed me to extract the .ttf content from the engine-specific .ttf containers. Different file types have different headers, for example:

  • .bik: 16 byte header that seemingly always is 0xE9 0x03 0x00[14].
  • .animation: 16 byte header, of which the last 4 bytes are the length of the file. Rest is unknown so far.
  • .bones: Some common bytes, but no noticable header information.
  • .config: 8 common bytes. Nothing else noticable.
  • .entity: No header, straight up contains content only.
  • .font: No noticable information.
  • .level: Probably is just the file itself, since all of them start with "IM"
  • .material: Same as above, seems to be straight up content.
  • .mouse_cursor: Same as above.
  • .network_config: Same as above.
  • .package: 16 byte header, but probably just content. Bytes 8 to 12 is the number of elements contained.
  • .particles: Same as .material.
  • .physics: Some common bytes between files. No noticable structure though. Likely just content.
  • .shader_library: Straight up content.
  • .shader_library_group: Just content.
  • .shading_environment: Same.
  • .shading_environment_mapping: Same.
  • .state_machine: Same.
  • .strings: No noticable header. Seems to contain a variable length unicode format, likely UTF-16. Might be compressed beyond a certain size.
  • .texture: Has a 192 byte header. It is unclear why the content for this file is in both .stream and .gpu_resources at once. Unlike .font, header size is constant. "Corrupted" DDS files are BC7_UNORM R16G16B16A16_FLOAT textures, something that is hardly ever used and requires a reasonably recent version of the DDS importer - something many tools and engines do not have.
  • .unit: Unclear so far. The engine may actually consider this to be separate files as the offsets in the meta section are relative to the start of the stream/gpu section, not relative to the entire thing.
  • .wwise_bank, .wwise_stream, .wwside_dep: Content only.

The hash list @Unordinal provided was really helpful. I'll now switch to reimplementing the MurmurHash64 that Stingray uses.

  • Like 1
Link to comment
Share on other sites

42 minutes ago, Xaymar said:
  • .texture: Has a 192 byte header. It is unclear why the content for this file is in both .stream and .gpu_resources at once. Unlike .font, header size is constant. "Corrupted" DDS files are BC7_UNORM R16G16B16A16_FLOAT textures, something that is hardly ever used and requires a reasonably recent version of the DDS importer - something many tools and engines do not have.

My guess would be the stream is the main texture data, and gpu_resources is mipmaps texture data.

Link to comment
Share on other sites

7 hours ago, Helldiver said:

My guess would be the stream is the main texture data, and gpu_resources is mipmaps texture data.

I wish, that'd make it much easier. Unfortunately with DDS inspection tools, you'll be able to see that when you remove the 0xC0 long header and only export Meta+Stream, you'll only end up with 2/3rds of the file being used. That shrinks to 1/3rds of the file with you export Meta+Stream+GPU as one section. And neither section appears to repeat the other, which is making figuring out what it actually does rather difficult.

Edit: Comparing with some raw input files that were sent to me, any file containing pure text is "compiled" down to a more optimal distribution format. A lot of data appears to be lost.

Edit 2: Hashes for file types are without the ".", so '.wwise_stream' is hashed as 'wwise_stream'. It's an unseeded murmurhash64a, super easy to implement. Runs in microseconds, so I could theoretically brute force matches for each hash.

Edit 3: Spent an ungodly amount of CPU power on this:

Depth: 1
Depth: 2
  00DE8C9D00247F8FC6 = w7
  000605775C0481181B = w8
  0052014E20A7FDCCEF = xp
  00D596CD750E4E279F = zD
Depth: 3
  003EED05BA83AF5090 = apb
  00AA5965F03029FA18 = bik
  00D526A27DA14F1DC5 = ini
  00FA4A8E091A91201E = ivf
  00A14E8DFA2CD117E2 = lua
  003FCDD69156A46417 = mod
  00712D6E3DD1024C9C = ugg
  00A99510C6E86DD3C2 = upb
  00786F65C00A816B19 = wav
  0076015845A6003765 = xml
Depth: 4
  008FD0D44D20650B68 = data
  0092D3EE038EEB610D = flow
  009EFE0A916AAE7880 = font
  00A62F9297DC969E85 = keys
  00293BB3B1982C6D5A = main
  0048FF313713A997A1 = mesh
  00A27B4D04A9BA6F9E = slug
  0019C792357C99F49B = tome
  00E0A48D0BE9A7453F = unit
Depth: 5
  00AC2B738A374CF583 = actor
  0018DEAD01056B72E9 = bones
  00D7014A50477953E0 = cloth
  002BBCABE5074ADE9E = input
  002A690FD348FE9AC5 = level
  009D0A795BFE818D19 = scene
  0090641B51C98B7AAC = sound
  007B82D55316F98C9B = vista
  00B6B3099F78886FF9 = win32

I could let this run for unreasonable amounts of time (time spent is O(depth^depth^validchars)), but it's pretty much confirmed that the hash function i made is identical.

Edited by Xaymar
Link to comment
Share on other sites

2 hours ago, Xaymar said:

I wish, that'd make it much easier. Unfortunately with DDS inspection tools, you'll be able to see that when you remove the 0xC0 long header and only export Meta+Stream, you'll only end up with 2/3rds of the file being used. That shrinks to 1/3rds of the file with you export Meta+Stream+GPU as one section. And neither section appears to repeat the other, which is making figuring out what it actually does rather difficult.

Edit: Comparing with some raw input files that were sent to me, any file containing pure text is "compiled" down to a more optimal distribution format. A lot of data appears to be lost.

Edit 2: Hashes for file types are without the ".", so '.wwise_stream' is hashed as 'wwise_stream'. It's an unseeded murmurhash64a, super easy to implement. Runs in microseconds, so I could theoretically brute force matches for each hash.

Edit 3: Spent an ungodly amount of CPU power on this:

Depth: 1
Depth: 2
  00DE8C9D00247F8FC6 = w7
  000605775C0481181B = w8
  0052014E20A7FDCCEF = xp
  00D596CD750E4E279F = zD
Depth: 3
  003EED05BA83AF5090 = apb
  00AA5965F03029FA18 = bik
  00D526A27DA14F1DC5 = ini
  00FA4A8E091A91201E = ivf
  00A14E8DFA2CD117E2 = lua
  003FCDD69156A46417 = mod
  00712D6E3DD1024C9C = ugg
  00A99510C6E86DD3C2 = upb
  00786F65C00A816B19 = wav
  0076015845A6003765 = xml
Depth: 4
  008FD0D44D20650B68 = data
  0092D3EE038EEB610D = flow
  009EFE0A916AAE7880 = font
  00A62F9297DC969E85 = keys
  00293BB3B1982C6D5A = main
  0048FF313713A997A1 = mesh
  00A27B4D04A9BA6F9E = slug
  0019C792357C99F49B = tome
  00E0A48D0BE9A7453F = unit
Depth: 5
  00AC2B738A374CF583 = actor
  0018DEAD01056B72E9 = bones
  00D7014A50477953E0 = cloth
  002BBCABE5074ADE9E = input
  002A690FD348FE9AC5 = level
  009D0A795BFE818D19 = scene
  0090641B51C98B7AAC = sound
  007B82D55316F98C9B = vista
  00B6B3099F78886FF9 = win32

I could let this run for unreasonable amounts of time (time spent is O(depth^depth^validchars)), but it's pretty much confirmed that the hash function i made is identical.

I am sorry to be a bother, but how would I use the hashes, or is it something that would only be used in the extractor

 

Sorry, just found out that I can’t utilize this and would really only be useful for making the extractor

Edited by orion42m
Link to comment
Share on other sites

image.thumb.png.3e821280a013d737f8c7514c3da14f0a.png

Out of 1.7 GiB of strings detected, only ~100 KiB ended up being Stingray related, and only some actually match. But this means that we will be able to restore the file structure once I get around to implementing that part.

  

48 minutes ago, orion42m said:

I am sorry to be a bother, but how would I use the hashes, or is it something that would only be used in the extractor

 

Sorry, just found out that I can’t utilize this and would really only be useful for making the extractor

Yes, the hashes themselves are not exactly useful without a tool to convert them back into data, which can be done by modifying the QuickBMS script if you feel like it. Though at this point, it might be much easier to just wait until Hellextractor does it for you 🙂

Edited by Xaymar
  • Like 1
Link to comment
Share on other sites

11 hours ago, Xaymar said:

In the menu bar you can select Templates -> Run Template, then select the matching .bt file for the file you want to inspect.

Thanks, with this I was able to discover more about the files themselves, and got my exporter to export everything.

Those are LoDs. It's unclear if/how we can tell which is a LoD for what so far.


I've modified Hellextractor quite a bit, and it now dumps all the files from the /data directory directly, instead of needing to go through QuickBMS. No more random 8 bytes at the start of random files, which was super annoying to deal with. It exported the entire content of the game, 17.0 GiB, in just under 50 seconds - much faster than QuickBMS had any chance to be.

Right now it just dumps the contents of meta, stream and gpu in a sequence, so more handling is necessary to work fully. But with this I've been able to somewhat piece together that each of the file types has some sort of special header at the start, which seems to tell us something about the file itself. For example, this is what i figured out about the TrueType Font content header:

// Always the same as the files own name hash.
uint64_t my_own_name_hash; 
// If no alternative font file:
// 00 00 00 00 00 00 80 3F
// If alternative font file:
// 87 88 08 C0 8B 88 88 3F
uint32_t __unk; 
uint32_t __unk;
uint32_t __unk;
uint32_t __unk;
uint64_t different_font_name_hash;

do {
  uint32_t count;
  uint32_t type;
  ##type## entries[count];
} while (count != 0)


// Shortest header found, file is basically empty.
C6 3C 13 E2 58 3D 1B 65 00 00 00 00 00 00 80 3F
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
00 00 00 00 28 00 00 00

// The average header.
C0 E7 1D 12 3C 53 27 53 87 88 08 C0 8B 88 88 3F
3F 00 00 00 00 00 00 00 CC E8 77 49 D4 BD 15 22
01 00 00 00 28 00 00 00 00 40 5B 44 00 80 BA C3
00 40 9C 44 40 00 00 00 00 00 00 00 00 00 00 00

// The longest header encountered so far.
DD 17 83 34 E5 2C 8E 81 87 88 08 C0 8B 88 88 3F
3F 00 00 00 00 00 00 00 69 FB CF 7A 07 EB 33 C0
05 00 00 00 28 00 00 00 00 40 5B 44 00 80 BA C3
00 40 9C 44 80 00 00 00 00 40 5F 44 00 00 FE C2
00 00 FF 44 D0 F2 00 00 00 C0 71 44 00 00 7D C3
00 80 98 44 E0 3C 2C 00 00 80 6D 44 00 00 75 C3
00 60 95 44 70 20 93 00 00 00 5C 44 00 00 F0 C2
00 00 FA 44 50 76 CA 00 00 00 00 00 00 00 00 00

I do not yet understand what all of this means, but it's successfully allowed me to extract the .ttf content from the engine-specific .ttf containers. Different file types have different headers, for example:

  • .bik: 16 byte header that seemingly always is 0xE9 0x03 0x00[14].
  • .animation: 16 byte header, of which the last 4 bytes are the length of the file. Rest is unknown so far.
  • .bones: Some common bytes, but no noticable header information.
  • .config: 8 common bytes. Nothing else noticable.
  • .entity: No header, straight up contains content only.
  • .font: No noticable information.
  • .level: Probably is just the file itself, since all of them start with "IM"
  • .material: Same as above, seems to be straight up content.
  • .mouse_cursor: Same as above.
  • .network_config: Same as above.
  • .package: 16 byte header, but probably just content. Bytes 8 to 12 is the number of elements contained.
  • .particles: Same as .material.
  • .physics: Some common bytes between files. No noticable structure though. Likely just content.
  • .shader_library: Straight up content.
  • .shader_library_group: Just content.
  • .shading_environment: Same.
  • .shading_environment_mapping: Same.
  • .state_machine: Same.
  • .strings: No noticable header. Seems to contain a variable length unicode format, likely UTF-16. Might be compressed beyond a certain size.
  • .texture: Has a 192 byte header. It is unclear why the content for this file is in both .stream and .gpu_resources at once. Unlike .font, header size is constant. "Corrupted" DDS files are BC7_UNORM R16G16B16A16_FLOAT textures, something that is hardly ever used and requires a reasonably recent version of the DDS importer - something many tools and engines do not have.
  • .unit: Unclear so far. The engine may actually consider this to be separate files as the offsets in the meta section are relative to the start of the stream/gpu section, not relative to the entire thing.
  • .wwise_bank, .wwise_stream, .wwside_dep: Content only.

The hash list @Unordinal provided was really helpful. I'll now switch to reimplementing the MurmurHash64 that Stingray uses.

Glad it helped! I have a VT2 repo, done in C# - code's a bit of a mess and the differences between the two games might mean most of it is useless for Helldivers but I'll leave it here just in case it's of some use: https://github.com/Unordinal/VT2Lib/

I used it to extract some models with rigs from VT2, but VT2 doesn't have the `.gpu_resources` files and the format's probably diverged quite a bit, since `.unit` files held the meshes/scene data/bones/etc for that game.

Link to comment
Share on other sites

2 hours ago, Xaymar said:

image.thumb.png.3e821280a013d737f8c7514c3da14f0a.png

Out of 1.7 GiB of strings detected, only ~100 KiB ended up being Stingray related, and only some actually match. But this means that we will be able to restore the file structure once I get around to implementing that part.

  

Yes, the hashes themselves are not exactly useful without a tool to convert them back into data, which can be done by modifying the QuickBMS script if you feel like it. Though at this point, it might be much easier to just wait until Hellextractor does it for you 🙂

Thank you, That is probably the case, however I still will try to take the effort and time to absorb what information I can so I might be able to be helpful in future endeavors. 

Link to comment
Share on other sites

Great news, everyone! Hellextractor is now at a reasonably advanced point, and can be used for more what it's originally intended for. Lots of thanks to everyone here, as well as the contributors on other platforms. Does not yet handle file conversion, but with the new modular code, more functionality is not far away anymore.

Usage: hellextractor <mode> ...
It'll print the help if you provide an invalid mode, or provide -h as the first argument to a mode

Features

  • Generate hashes for arbitrary text using the "hash" mode.
  • Extract files from container files using the "extract" mode.
    • Filter input files using the new "-i" option.
    • Filter ouput files using the new "-f" option.
    • Provide a Type Hash Translation file using the new "-t" option.
    • Provide a Name Hash Translation file using the new "-n" option.
    • Provide a generic String Hash Translation file using the new "-s" option.
    • Make the whole thing only print text, and not do anything with the "-d" option. (Dry run!)
  • Hash Translation files now allow converting hashes back into normal text, sample files are provided here.

Also thanks to ThePotato97 on Github for automating the build process. If you want a more up to date version, log in on github, click the actions tab, and find the most recent build, and click the asset to download.

Download: on Github

 

 

Edited by Xaymar
Downloads moved to Github so I don't have to edit this so often. No account needed anymore!
  • Thanks 2
Link to comment
Share on other sites

47 minutes ago, Xaymar said:

Great news, everyone! Hellextractor is now at a reasonably advanced point, and can be used for more what it's originally intended for. Lots of thanks to everyone here, as well as the contributors on other platforms. Does not yet handle file conversion, but with the new modular code, more functionality is not far away anymore.

Usage: hellextractor <mode> ...
It'll print the help if you provide an invalid mode, or provide -h as the first argument to a mode

Features

  • Generate hashes for arbitrary text using the "hash" mode.
  • Extract files from container files using the "extract" mode.
    • Filter input files using the new "-i" option.
    • Filter ouput files using the new "-f" option.
    • Provide a Type Hash Translation file using the new "-t" option.
    • Provide a Name Hash Translation file using the new "-n" option.
    • Provide a generic String Hash Translation file using the new "-s" option.
    • Make the whole thing only print text, and not do anything with the "-d" option. (Dry run!)
  • Hash Translation files now allow converting hashes back into normal text, sample files are provided here.

Also thanks to ThePotato97 on Github for automating the build process. If you want a more up to date version, log in on github, click the actions tab, and find the most recent build, and click the asset to download.

Example:

  •  Extract all files and use translation files to convert back to normal names:
Hellextractor.exe extract -o output -t types.txt -n files.txt -s strings.txt "D:\Games\Steam\steamapps\common\Helldivers 2\data"

hellextractor.zip 84.75 kB · 2 downloads

Do remember that the dds images can be array images, if your converting those to png, you will need to convert the other array images into pngs after splitting them apart from the original dds file.

Link to comment
Share on other sites

1 hour ago, Xaymar said:

Great news, everyone! Hellextractor is now at a reasonably advanced point, and can be used for more what it's originally intended for.

I grabbed your tool you attached to your latest post, placed the most recent files/strings/types.txts from your github next to it and used the command

Hellextractor.exe extract -o output -t types.txt -n files.txt -s strings.txt "[path_to_data_folder]"

to extract and rename the tracks. 51600 got exported (17GB), but none of those files got renamed. In addition, the files don't look proper - their headers don't appear at the start. Here is a screenshot of two files:hellextractor.jpg.b3ea9a2bbec3e8ee3d837dcb3646081c.jpg

Does anybody have the same problem?

Link to comment
Share on other sites

8 hours ago, Xaymar said:

I wish, that'd make it much easier. Unfortunately with DDS inspection tools, you'll be able to see that when you remove the 0xC0 long header and only export Meta+Stream, you'll only end up with 2/3rds of the file being used. That shrinks to 1/3rds of the file with you export Meta+Stream+GPU as one section. And neither section appears to repeat the other, which is making figuring out what it actually does rather difficult.

Edit: Comparing with some raw input files that were sent to me, any file containing pure text is "compiled" down to a more optimal distribution format. A lot of data appears to be lost.

Edit 2: Hashes for file types are without the ".", so '.wwise_stream' is hashed as 'wwise_stream'. It's an unseeded murmurhash64a, super easy to implement. Runs in microseconds, so I could theoretically brute force matches for each hash.

Edit 3: Spent an ungodly amount of CPU power on this:

Depth: 1
Depth: 2
  00DE8C9D00247F8FC6 = w7
  000605775C0481181B = w8
  0052014E20A7FDCCEF = xp
  00D596CD750E4E279F = zD
Depth: 3
  003EED05BA83AF5090 = apb
  00AA5965F03029FA18 = bik
  00D526A27DA14F1DC5 = ini
  00FA4A8E091A91201E = ivf
  00A14E8DFA2CD117E2 = lua
  003FCDD69156A46417 = mod
  00712D6E3DD1024C9C = ugg
  00A99510C6E86DD3C2 = upb
  00786F65C00A816B19 = wav
  0076015845A6003765 = xml
Depth: 4
  008FD0D44D20650B68 = data
  0092D3EE038EEB610D = flow
  009EFE0A916AAE7880 = font
  00A62F9297DC969E85 = keys
  00293BB3B1982C6D5A = main
  0048FF313713A997A1 = mesh
  00A27B4D04A9BA6F9E = slug
  0019C792357C99F49B = tome
  00E0A48D0BE9A7453F = unit
Depth: 5
  00AC2B738A374CF583 = actor
  0018DEAD01056B72E9 = bones
  00D7014A50477953E0 = cloth
  002BBCABE5074ADE9E = input
  002A690FD348FE9AC5 = level
  009D0A795BFE818D19 = scene
  0090641B51C98B7AAC = sound
  007B82D55316F98C9B = vista
  00B6B3099F78886FF9 = win32

I could let this run for unreasonable amounts of time (time spent is O(depth^depth^validchars)), but it's pretty much confirmed that the hash function i made is identical.

Some of these I have not seen in the game files, such as 00DE8C9D00247F8FC6. What file do you see it in?

Link to comment
Share on other sites

1 hour ago, Xaymar said:

Great news, everyone! Hellextractor is now at a reasonably advanced point, and can be used for more what it's originally intended for. Lots of thanks to everyone here, as well as the contributors on other platforms. Does not yet handle file conversion, but with the new modular code, more functionality is not far away anymore.

Usage: hellextractor <mode> ...
It'll print the help if you provide an invalid mode, or provide -h as the first argument to a mode

Features

  • Generate hashes for arbitrary text using the "hash" mode.
  • Extract files from container files using the "extract" mode.
    • Filter input files using the new "-i" option.
    • Filter ouput files using the new "-f" option.
    • Provide a Type Hash Translation file using the new "-t" option.
    • Provide a Name Hash Translation file using the new "-n" option.
    • Provide a generic String Hash Translation file using the new "-s" option.
    • Make the whole thing only print text, and not do anything with the "-d" option. (Dry run!)
  • Hash Translation files now allow converting hashes back into normal text, sample files are provided here.

Also thanks to ThePotato97 on Github for automating the build process. If you want a more up to date version, log in on github, click the actions tab, and find the most recent build, and click the asset to download.

Example:

  •  Extract all files and use translation files to convert back to normal names:
Hellextractor.exe extract -o output -t types.txt -n files.txt -s strings.txt "D:\Games\Steam\steamapps\common\Helldivers 2\data"

hellextractor.zip 84.75 kB · 16 downloads

Why are texture files labeled as .texture and not .dds, as they are all dds textures, nothing more.

Link to comment
Share on other sites

27 minutes ago, FunnyML said:

Does anybody have the same problem?

We have a miniscule fraction of the file names, but we have most of the type names, so this is normal. The game does not use file names at all, it only uses hashes - so we have to reverse the hashes back into names.

12 minutes ago, Helldiver said:

Some of these I have not seen in the game files, such as 00DE8C9D00247F8FC6. What file do you see it in?

The 00 is an error at the start, I typod a 16 into 18. These are also just the types mentioned all over the thread.

6 minutes ago, GameBreaker said:

Why are texture files labeled as .texture and not .dds, as they are all dds textures, nothing more.

Metadata. There's more to them, that's why. It extracts data as-is, without conversion for now. Edit: Also because the actual file type is ".texture", the hash is a 100% match. It's not just a .dds file, which has a completely different hash

Edited by Xaymar
Link to comment
Share on other sites

21 minutes ago, Xaymar said:

The 00 is an error at the start, I typod a 16 into 18. These are also just the types mentioned all over the thread.

What I meant was, I scan all package file for all file hash and type hashes. I only see 45 type hashes:

05106b81dcd58a13
0d972bab10b40fd3 strings
18dead01056b72e9 bones
1d59bd6687db6b33
250e0a11ac8e26f8 shading_environment_mapping
27862fe24795319c render_config
2a0a70acfe476e1d
2a690fd348fe9ac5 level
3b1fa9e8f6bac374 network_config
504b55235d21440e wwise_stream
535a7bd3e650d799 wwise_bank
57a13425279979d7
5f7203c8f280dab8 physics
5fdd5fe391076f9f
6592b918e67f082c
7910103158fc1de9
82645835e6b73232 config
9199bb50b6896f02
92d3ee038eeb610d flow
931e336d7646cc26
9831ca893b0d087d entity
9e5c3cc74575aeb5 shader_library_group
9efe0a916aae7880 font
a14e8dfa2cd117e2 lua
a486d4045106165c state_machine
a8193123526fad64 particles
aa5965f03029fa18 bik
ab2f78e885f513c6 prefab
ad9c6d9ed1e5e77a package
af32095c82f2b070 wwise_dep
b277b11fe4a61d37 mouse_cursor
b8fd4d2cede20ed7
c4f0f4be7fb0c8d6
cd4238c6a0c69e32 texture
d50a8b7e1c82b110 wwise_metadata
d7014a50477953e0 cloth
e0a48d0be9a7453f unit
e3f2851035957af5
e5ee32a477239a93
e985c5f61c169997 speedtree
eac0b497876adedf material
f7505933166d6755 vector_field
f7a09f8bb35a1d49
fcaaf813b4d3cc1e
fe73c7dcff8a7ca5 shading_environment

Edit: I see my mistake now. I see https://github.com/Xaymar/Hellextractor/blob/ac0076676060df95a855696ea6e558ceb4ff4bb9/source/hd2_data.hpp#L51 shows you kept VT2 types that do not exist in Helldivers 2.

Edited by Helldiver
Link to comment
Share on other sites

25 minutes ago, Helldiver said:

What I meant was, I scan all package file for all file hash and type hashes. I only see 45 type hashes:

25 minutes ago, Helldiver said:

Edit: I see my mistake now. I see https://github.com/Xaymar/Hellextractor/blob/ac0076676060df95a855696ea6e558ceb4ff4bb9/source/hd2_data.hpp#L51 shows you kept VT2 types that do not exist in Helldivers 2.

The enumeration there isn't used in actual decisions anymore, only translation tables are used. You're free to pick whichever types.txt you want. I don't really see a reason to restrict the types.txt to only things actually found in Helldivers 2.

Edit: Additionally I already mentioned that in my first reply.

Edited by Xaymar
Link to comment
Share on other sites

Just now, Xaymar said:

The enumeration there isn't used in actual decisions anymore, only translation tables are used. You're free to pick whichever types.txt you want. I don't really see a reason to restrict the types.txt to only things actually found in Helldivers 2.

Just confusion on my part, I thought since you kept them that they were in Helldivers 2 somewhere and that my code was not working properly.

Link to comment
Share on other sites

For those that love re-exporting content, the tool now supports renaming files!
 

Statistics:
    Exported: 0
    Renamed:  9
    Deleted:  0
    Skipped:  715
    Filtered: 50885

Whenever it detects that a newer name is available for a file, and the existing file matches the expected size, it'll rename instead. If the target file already exists, it'll delete the old file too, so no duplicate files in your export. It also now fixes my Big Endian / Little Endian mixup

Edit: For what it's worth, the current files.txt could be used to mount a massive dictionary attack on the hashes. It'll cut down on time required significantly, but not really by much. Just split all the strings by / and _, then merge arbitrarily until you find a match. For numbers, increment up to a reasonable amount (single digit is likely to end at 9, double at probably 99). If you know the type of the name hash, you can also do more specific trickery, like using keywords related to that type (_albedo _normal _nms _displacement)

Edited by Xaymar
Link to comment
Share on other sites

Possible suffixes for textures.

Spoiler

__tex_NAR
__tex_albedo
__tex_albedo_blend
__tex_albedo_blend_tex
__tex_albedo_color
__tex_albedo_flag
__tex_albedo_iridescence
__tex_albedo_tex
__tex_albedo_wear
__tex_albedoopacity_01
__tex_anterior_chamber_heightmap
__tex_anterior_chamber_normal
__tex_ao_heightmap
__tex_ao_map
__tex_atlas_tex
__tex_atmospheric_scattering_color
__tex_atmospheric_scattering_transmittance
__tex_background_alpha
__tex_background_texture
__tex_badge_flipbook
__tex_baker_material_atlas
__tex_base_color
__tex_base_color_metal_map
__tex_base_color_scrolled
__tex_base_data
__tex_base_map
__tex_base_mask
__tex_base_normal_ao
__tex_base_normal_ao_dirt
__tex_base_normal_ao_subsurface
__tex_basemap_highlands
__tex_basemap_lowlands
__tex_bca_tex
__tex_beam_tex_01
__tex_bgtexture
__tex_biome_a_66e90b33
__tex_biome_b_66e90b33
__tex_blocker_mask_target
__tex_blood_lut
__tex_blood_overlay_normal_grayscale
__tex_blood_scalar_field
__tex_blood_splatter_tiler
__tex_blood_tiler
__tex_bluenoise_texture
__tex_brdf_lut
__tex_brush_texture
__tex_bubble_tex
__tex_bubble_tex_02
__tex_bug_splatter_tiler
__tex_cape_gradient
__tex_cape_lut
__tex_cape_scalar_fields
__tex_cape_tear
__tex_cctv_feed
__tex_clearcoat_norm_xy_intensity_roughness_map
__tex_clearcoat_roughness_ao_spec
__tex_cliffs_target
__tex_cloaking_noise
__tex_closest_reflection_map
__tex_cloth_detail_normal
__tex_cloud_mask
__tex_color_lut
__tex_color_map
__tex_color_roughness
__tex_color_specular_b
__tex_color_tint_lookup
__tex_color_tint_mask_map
__tex_color_variation_mask
__tex_color_variation_mra
__tex_color_variation_normal
__tex_combined_fog_volumes_color_density
__tex_combined_fog_volumes_parameters
__tex_composite_array
__tex_concrete_sampler
__tex_continents_LUT
__tex_continents_texture_map
__tex_corporate_color_roughness_lut
__tex_cosmic_dust_lut
__tex_covering_albedo
__tex_covering_normal
__tex_crater_texture
__tex_cubemap
__tex_current_exposure
__tex_customization_camo_tiler_array
__tex_customization_material_detail_tiler_array
__tex_damage_tiler_data
__tex_damage_tiler_derivatives
__tex_data_map_fad03be1
__tex_data_tex
__tex_data_tex_02
__tex_data_texture
__tex_decal_sheet
__tex_deformable_terrain_mask
__tex_depth_color_lookup
__tex_depth_stencil_buffer
__tex_detail_mask_
__tex_detail_normal_1
__tex_detail_tex
__tex_diffuse_map
__tex_dir_map_fad03be1
__tex_dirt_map
__tex_displacement_map
__tex_displacement_tex
__tex_dist_tex
__tex_distortion_map
__tex_distortion_tex
__tex_distress
__tex_distress_texture
__tex_earth_continents
__tex_emissive
__tex_emissive_color
__tex_emissive_f_stop_10_intensity_map
__tex_emissive_map
__tex_emissive_mask
__tex_emissive_nebula_lut
__tex_emissive_pack
__tex_emissive_planet
__tex_emissive_texture
__tex_emissivemask
__tex_erode_map
__tex_erode_tex
__tex_erode_texture
__tex_eye_lut
__tex_far_fog
__tex_far_fog_div4_clouds
__tex_far_fog_div4_clouds_history
__tex_far_fog_div4_history
__tex_far_shadows
__tex_far_shadows_volume
__tex_fill_texture
__tex_flare_noise
__tex_flash_mask
__tex_flash_mask_02
__tex_flatten_texture
__tex_flattening_lookup
__tex_flicker_tex
__tex_flow_map_fad03be1
__tex_fog_volume_particle_texture
__tex_footstep_tiler
__tex_galaxy_dust
__tex_galaxy_shape
__tex_galaxy_stars
__tex_gas_giant_lookup
__tex_gbuffer0
__tex_gbuffer1
__tex_gbuffer1_copy
__tex_gbuffer2
__tex_gbuffer3
__tex_gbuffer_emissive
__tex_generated_global_shader_input
__tex_generated_heightmap
__tex_generated_heightmap_f
__tex_generated_heightmap_offset
__tex_generated_heightmap_properties
__tex_generated_heightmap_properties_height
__tex_generated_heightmap_slope
__tex_generated_heightmap_source
__tex_generated_heightmap_tmp_div4
__tex_generated_heightmap_unorm_div2
__tex_generated_materials
__tex_generated_minimap
__tex_generated_minimap_slot
__tex_generated_per_zone_shader_input
__tex_generated_route_distance
__tex_generated_terrain_albedo
__tex_generated_water_replace_lookup
__tex_glass_data
__tex_glint_sample
__tex_glint_sample_0dfb75cf
__tex_glint_sample_61c13523
__tex_global_diffuse_map
__tex_global_lens_dirt_map
__tex_global_radiance_map
__tex_global_specular_map
__tex_gradient
__tex_gradient_map
__tex_gradient_texture
__tex_graph_integral_texture
__tex_grid_texture_map
__tex_ground_albedo
__tex_ground_nar
__tex_grunge_mask
__tex_hdr0
__tex_hdr0_div4_fullres
__tex_hdr1
__tex_hdr2
__tex_hdr_ssr
__tex_height_noise
__tex_height_sample_247fa7cb
__tex_heightmap_highlands
__tex_heightmap_lowlands
__tex_high_altitude_clouds_color
__tex_high_altitude_clouds_color_probe
__tex_hmap
__tex_hologram_cylinder_texture_map
__tex_hologram_pattern
__tex_id_masks_array
__tex_ies_lookup
__tex_input_image
__tex_input_texture
__tex_input_texture0
__tex_input_texture1
__tex_input_texture2
__tex_input_texture3
__tex_input_texture4
__tex_input_texture5
__tex_input_texture6
__tex_input_texture7
__tex_input_texture8
__tex_iris_tiler
__tex_lens_cutout_texture
__tex_lens_emissive_texture
__tex_lens_occlusion_texture
__tex_light_bleed_map
__tex_light_probe_space_specular
__tex_linear_depth
__tex_linear_depth_mip6
__tex_local_lights_shadow_atlas
__tex_lut_emissive
__tex_mask
__tex_mask_atlas_tex
__tex_mask_tex
__tex_masktexture
__tex_material_lut
__tex_material_map
__tex_material_tiler
__tex_metallic_map
__tex_minimap_base_color
__tex_minimap_blocker_target0
__tex_minimap_blocker_target1
__tex_minimap_heightmap
__tex_minimap_lut
__tex_minimap_metallic_subsurface_roughness_specular
__tex_minimap_normal_wetness
__tex_minimap_routes
__tex_minimap_subsurface_color
__tex_moon_lut
__tex_motion_vectors
__tex_mra
__tex_mrae
__tex_msdf_texture
__tex_mud_normals_grayscale
__tex_nac
__tex_nar_tex
__tex_nar_texture
__tex_nms_tex
__tex_noise_01
__tex_noise_01_texture
__tex_noise_02
__tex_noise_02_tex
__tex_noise_3d
__tex_noise_array
__tex_noise_map_01
__tex_noise_map_02
__tex_noise_normal
__tex_noise_pack
__tex_noise_pack_01
__tex_noise_pack_02
__tex_noise_tex
__tex_noise_tex_01
__tex_noise_tex_02
__tex_noise_texture
__tex_normal
__tex_normal_ao_roughness
__tex_normal_map
__tex_normal_map_01
__tex_normal_map_02
__tex_normal_map_with_alpha
__tex_normal_opacity
__tex_normal_specular_ao
__tex_normal_xy_ao_rough_map
__tex_normal_xy_roughness_opacity
__tex_normals
__tex_nrm_01
__tex_nrm_02
__tex_offset_01
__tex_offset_02
__tex_offset_noise
__tex_offset_texture
__tex_opacity_clip_map
__tex_opacity_map
__tex_outside_map_vista_heightmap
__tex_outside_map_vista_heightmap_frequency_map
__tex_overlay_texture
__tex_overlay_texture_map
__tex_palette_lut
__tex_parallax_map
__tex_pattern_lut
__tex_pattern_masks_array
__tex_planet_noise
__tex_playercard_texture
__tex_prev_linear_depth_mip6
__tex_primary_color_variation_normal_mr
__tex_primary_material_albedo
__tex_primary_material_mask
__tex_pupil_heightmap
__tex_pupil_normal
__tex_reflection_map
__tex_reticle_texture
__tex_ripples_tiler
__tex_road_data_strip
__tex_road_direction_target
__tex_roughness_map
__tex_scatter_albedo_opacity
__tex_scatter_albedo_opacity_array
__tex_scatter_comparision_density
__tex_scatter_density
__tex_scatter_density_map
__tex_scatter_lookup
__tex_scatter_normal_array
__tex_scatter_rsh_array
__tex_scatter_subsurface_array
__tex_scenario_overlay_texture
__tex_sclar_field_opacity
__tex_scorch_marks
__tex_screen_effect_data_texture
__tex_screen_video_texture_b
__tex_screen_video_texture_r
__tex_screen_video_texture_y
__tex_scroll_noise
__tex_sdf
__tex_secondary_map
__tex_secondary_material_mask
__tex_shadow_minimap_heightmap
__tex_shadow_opacity
__tex_ship_hub_specular_array
__tex_ship_hub_specular_lerp_from_array
__tex_ship_hub_specular_lerp_to_array
__tex_ship_video_texture_b
__tex_ship_video_texture_r
__tex_ship_video_texture_y
__tex_skydome_map
__tex_slot_0
__tex_smoke_pack
__tex_snow_glints_tiler
__tex_snow_mask_texture
__tex_snow_mask_texture_01
__tex_snow_pnrb_array
__tex_space_probe_backdrop
__tex_space_star_lut
__tex_space_star_lut_tmp
__tex_spec_iri_intensity_iri_thickness_map
__tex_specular_brdf_lut
__tex_ssao_buffer
__tex_sss_lut
__tex_subsurface_opacity
__tex_sun_flare_image
__tex_sun_flare_visibility_lookup
__tex_sun_flare_visibility_lookup_sum
__tex_sun_shadow_map
__tex_survey_qr_code
__tex_sweep_tex
__tex_sweep_texture
__tex_tear_map
__tex_tear_normals_grayscale
__tex_terrain_blending_data
__tex_terrain_displacement_map
__tex_terrain_height_mask
__tex_terrain_trample
__tex_terrain_transparent
__tex_tex
__tex_tex0
__tex_tex1
__tex_tex2
__tex_tex3
__tex_tex4
__tex_tex_01
__tex_tex_02
__tex_tex_mask
__tex_tex_normals
__tex_texture_data
__tex_texture_grayscale_map
__tex_texture_map
__tex_texture_map_01
__tex_texture_map_02
__tex_texture_map_0b1b5dad
__tex_texture_map_166acea5
__tex_texture_map_2252c011
__tex_texture_map_2a3f3fe6
__tex_texture_map_2cfaf55e
__tex_texture_map_319d3bb5
__tex_texture_map_365947bd
__tex_texture_map_39265ef9
__tex_texture_map_58f9c7e1
__tex_texture_map_5bd36001
__tex_texture_map_64344fd2
__tex_texture_map_6535d7ee
__tex_texture_map_657f1762
__tex_texture_map_6a0d408c
__tex_texture_map_7936f626
__tex_texture_map_c8d18519
__tex_texture_map_d367c534
__tex_texture_map_db014a68
__tex_texture_map_e814f87a
__tex_texture_map_e835d457
__tex_texture_map_f11a51e5
__tex_texture_map_f5d1f42a
__tex_texture_pack
__tex_texture_rgba_map
__tex_thumbnail_image_to_copy
__tex_tiler_array
__tex_trim_decal
__tex_trim_sheet_height_c982efd3
__tex_triplanar_detail_albedo
__tex_triplanar_detail_data
__tex_ui_3d_shadows
__tex_ui_diffuse_cubemap
__tex_ui_specular_cubemap
__tex_ui_texture
__tex_ui_video_texture_b
__tex_ui_video_texture_r
__tex_ui_video_texture_y
__tex_uv_noise_tex
__tex_vegetation_bending
__tex_veins_data
__tex_vertex_normal_map
__tex_vista_cloud_atlas
__tex_vista_cloud_subsurface_atlas
__tex_vista_detail_albedo
__tex_vista_detail_nar
__tex_volumetric_cloud_current_weather_map
__tex_volumetric_cloud_detail_noise_combined
__tex_volumetric_cloud_noise_combined
__tex_volumetric_cloud_weather_map0
__tex_volumetric_cloud_weather_map1
__tex_volumetric_clouds_color
__tex_volumetric_clouds_color_probe
__tex_volumetric_clouds_depth
__tex_volumetric_clouds_depth_probe
__tex_volumetric_clouds_prev
__tex_volumetric_clouds_shadows_final
__tex_volumetric_clouds_weather_current
__tex_volumetric_current_high_altitude_clouds
__tex_volumetric_current_high_altitude_weather_map
__tex_volumetric_fog_3d_image
__tex_volumetric_fog_3d_image_history
__tex_volumetric_high_altitude_clouds0
__tex_volumetric_high_altitude_clouds1
__tex_volumetric_high_altitude_weather_map0
__tex_volumetric_high_altitude_weather_map1
__tex_water_caustics
__tex_water_height
__tex_water_patch
__tex_water_rt
__tex_water_target
__tex_wear
__tex_wear_mask
__tex_wear_mra
__tex_weathering_albedo
__tex_weathering_data_mask
__tex_weathering_dirt
__tex_weathering_nar
__tex_weathering_special
__tex_wind_noise
__tex_wind_vector_field
__tex_world_overlay_texture
__tex_worldmask
__tex_wound_data
__tex_wound_derivative
__tex_wound_lut_to_add
__tex_wound_normal
__tex_wounds_256
__tex_wounds_512

 

Link to comment
Share on other sites

Create an account or sign in to comment

You need to be a member in order to leave a comment

Create an account

Sign up for a new account in our community. It's easy!

Register a new account

Sign in

Already have an account? Sign in here.

Sign In Now
×
×
  • Create New...