Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
219 changes: 132 additions & 87 deletions src/main/java/in/dragonbra/javasteam/steam/cdn/DepotChunk.kt
Original file line number Diff line number Diff line change
@@ -1,87 +1,132 @@
package `in`.dragonbra.javasteam.steam.cdn

import `in`.dragonbra.javasteam.types.ChunkData
import `in`.dragonbra.javasteam.util.Strings
import `in`.dragonbra.javasteam.util.Utils
import `in`.dragonbra.javasteam.util.VZipUtil
import `in`.dragonbra.javasteam.util.ZipUtil
import `in`.dragonbra.javasteam.util.crypto.CryptoHelper
import `in`.dragonbra.javasteam.util.stream.MemoryStream
import java.io.IOException
import javax.crypto.Cipher
import javax.crypto.spec.IvParameterSpec
import javax.crypto.spec.SecretKeySpec

/**
* Provides a helper function to decrypt and decompress a single depot chunk.
*/
object DepotChunk {

/**
* Processes the specified depot key by decrypting the data with the given depot encryption key, and then by decompressing the data.
* If the chunk has already been processed, this function does nothing.
* @param info The depot chunk data representing.
* @param data The encrypted chunk data.
* @param destination The buffer to receive the decrypted chunk data.
* @param depotKey The depot decryption key.
* @exception IOException Thrown if the processed data does not match the expected checksum given in its chunk information.
* @exception IllegalArgumentException Thrown if the destination size is too small or the depot key is not 32 bytes long
*/
fun process(
info: ChunkData,
data: ByteArray,
destination: ByteArray,
depotKey: ByteArray,
): Int {
require(destination.size >= info.uncompressedLength) {
"The destination buffer must be longer than the chunk ${ChunkData::uncompressedLength.name}."
}

require(depotKey.size == 32) { "Tried to decrypt depot chunk with non 32 byte key!" }

// first 16 bytes of input is the ECB encrypted IV
val keySpec = SecretKeySpec(depotKey, "AES")
val ecbCipher = Cipher.getInstance("AES/ECB/NoPadding", CryptoHelper.SEC_PROV)
ecbCipher.init(Cipher.DECRYPT_MODE, keySpec)

val iv = ByteArray(16)
val ivBytesRead = ecbCipher.doFinal(data, 0, iv.size, iv)

require(iv.size == ivBytesRead) { "Failed to decrypt depot chunk iv (${iv.size} != $ivBytesRead)" }

// With CBC and padding, the decrypted size will always be smaller
val buffer = ByteArray(data.size - iv.size)
val cbcCipher = Cipher.getInstance("AES/CBC/PKCS7Padding", CryptoHelper.SEC_PROV)
cbcCipher.init(Cipher.DECRYPT_MODE, keySpec, IvParameterSpec(iv))

val writtenDecompressed: Int

try {
val bytesWrittenToBuffer = cbcCipher.doFinal(data, iv.size, data.size - iv.size, buffer)

writtenDecompressed = if (buffer.size > 1 && buffer[0] == 'V'.code.toByte() && buffer[1] == 'Z'.code.toByte()) {
MemoryStream(buffer, 0, bytesWrittenToBuffer).use { ms ->
VZipUtil.decompress(ms, destination, verifyChecksum = false)
}
} else {
MemoryStream(buffer, 0, bytesWrittenToBuffer).use { ms ->
ZipUtil.decompress(ms, destination, verifyChecksum = false)
}
}
} catch (e: Exception) {
throw IOException("Failed to decompress chunk ${Strings.toHex(info.chunkID)}: $e\n${e.stackTraceToString()}")
}

if (info.uncompressedLength != writtenDecompressed) {
throw IOException("Processed data checksum failed to decompress to the expected chunk uncompressed length. (was $writtenDecompressed, should be ${info.uncompressedLength})")
}

val dataCrc = Utils.adlerHash(destination.sliceArray(0 until writtenDecompressed))

if (dataCrc != info.checksum) {
throw IOException("Processed data checksum is incorrect ($dataCrc != ${info.checksum})! Downloaded depot chunk is corrupt or invalid/wrong depot key?")
}

return writtenDecompressed
}
}
package `in`.dragonbra.javasteam.steam.cdn

import `in`.dragonbra.javasteam.types.ChunkData
import `in`.dragonbra.javasteam.util.Strings
import `in`.dragonbra.javasteam.util.Utils
import `in`.dragonbra.javasteam.util.VZipUtil
import `in`.dragonbra.javasteam.util.ZipUtil
import `in`.dragonbra.javasteam.util.crypto.CryptoHelper
import `in`.dragonbra.javasteam.util.stream.MemoryStream
import java.io.IOException
import javax.crypto.Cipher
import javax.crypto.spec.IvParameterSpec
import javax.crypto.spec.SecretKeySpec

/**
* Provides a helper function to decrypt and decompress a single depot chunk.
*/
object DepotChunk {

/**
* Processes the specified depot key by decrypting the data with the given depot encryption key, and then by decompressing the data.
* If the chunk has already been processed, this function does nothing.
* @param info The depot chunk data representing.
* @param data The encrypted chunk data.
* @param destination The buffer to receive the decrypted chunk data.
* @param depotKey The depot decryption key.
* @exception IOException Thrown if the processed data does not match the expected checksum given in its chunk information.
* @exception IllegalArgumentException Thrown if the destination size is too small or the depot key is not 32 bytes long
*/
@JvmStatic
fun process(
info: ChunkData,
data: ByteArray,
destination: ByteArray,
depotKey: ByteArray,
): Int {
require(destination.size >= info.uncompressedLength) {
"The destination buffer must be longer than the chunk ${ChunkData::uncompressedLength.name}."
}

require(depotKey.size == 32) { "Tried to decrypt depot chunk with non 32 byte key!" }

// first 16 bytes of input is the ECB encrypted IV
val keySpec = SecretKeySpec(depotKey, "AES")
val ecbCipher = Cipher.getInstance("AES/ECB/NoPadding", CryptoHelper.SEC_PROV)
ecbCipher.init(Cipher.DECRYPT_MODE, keySpec)

val iv = ByteArray(16)
val ivBytesRead = ecbCipher.doFinal(data, 0, iv.size, iv)

require(iv.size == ivBytesRead) { "Failed to decrypt depot chunk iv (${iv.size} != $ivBytesRead)" }

// With CBC and padding, the decrypted size will always be smaller
val buffer = ByteArray(data.size - iv.size)
val cbcCipher = Cipher.getInstance("AES/CBC/PKCS7Padding", CryptoHelper.SEC_PROV)
cbcCipher.init(Cipher.DECRYPT_MODE, keySpec, IvParameterSpec(iv))

val writtenDecompressed: Int

try {
val written = cbcCipher.doFinal(data, iv.size, data.size - iv.size, buffer)

// Per SK:
// Steam client checks for like 20 bytes for pkzip, and 22 bytes for vzip,
// I'm just being safe and checking for a smaller value.
if (buffer.size < 16) {
throw IOException("Not enough data in the decrypted depot chunk (was ${buffer.size} bytes).")
}

if (buffer[0] == 'V'.code.toByte() &&
buffer[1] == 'S'.code.toByte() &&
buffer[2] == 'Z'.code.toByte() &&
buffer[3] == 'a'.code.toByte()
) {
// Zstd
throw RuntimeException("Zstd compressed chunks are not yet implemented in JavaSteam.")
} else if (buffer[0] == 'V'.code.toByte() &&
buffer[1] == 'Z'.code.toByte() &&
buffer[2] == 'a'.code.toByte()
) {
// LZMA
MemoryStream(buffer, 0, written).use { decryptedStream ->
writtenDecompressed = VZipUtil.decompress(
ms = decryptedStream,
destination = destination,
verifyChecksum = false
)
}
} else if (buffer[0] == 'P'.code.toByte() &&
buffer[1] == 'K'.code.toByte() &&
buffer[2].toInt() == 0x03 &&
buffer[3].toInt() == 0x04
) {
// Per SK:
// Steam client code performs the same check.

// PKzip
MemoryStream(buffer, 0, written).use { decryptedStream ->
writtenDecompressed = ZipUtil.decompress(
ms = decryptedStream,
destination = destination,
verifyChecksum = false
)
}
} else {
throw IOException(
"Unexpected depot chunk compression " +
"(first four bytes are ${Strings.toHex(buffer.copyOfRange(0, 4))})."
)
}
} catch (e: Exception) {
throw IOException("Failed to decompress chunk ${Strings.toHex(info.chunkID)}: $e\n${e.stackTraceToString()}")
} finally {
buffer.fill(0)
}

if (info.uncompressedLength != writtenDecompressed) {
throw IOException(
"Processed data checksum failed to decompress to the expected chunk uncompressed length. " +
"(was $writtenDecompressed, should be ${info.uncompressedLength})"
)
}

val dataCrc = Utils.adlerHash(destination.sliceArray(0 until writtenDecompressed))

if (dataCrc != info.checksum) {
throw IOException("Processed data checksum is incorrect ($dataCrc != ${info.checksum})! Downloaded depot chunk is corrupt or invalid/wrong depot key?")
}

return writtenDecompressed
}
}
8 changes: 5 additions & 3 deletions src/main/java/in/dragonbra/javasteam/util/VZipUtil.kt
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,16 @@ object VZipUtil {
private const val HEADER_LENGTH = 7 // magic + version + timestamp/crc
private const val FOOTER_LENGTH = 10 // crc + decompressed size + magic

private const val VERSION = 'a'
private const val VERSION: Byte = 'a'.code.toByte()

@JvmStatic
fun decompress(ms: MemoryStream, destination: ByteArray, verifyChecksum: Boolean = true): Int {
BinaryReader(ms).use { reader ->
if (reader.readShort() != VZIP_HEADER) {
throw IllegalArgumentException("Expecting VZipHeader at start of stream")
}

if (reader.readChar() != VERSION) {
if (reader.readByte() != VERSION) {
throw IllegalArgumentException("Expecting VZip version 'a'")
}

Expand Down Expand Up @@ -84,12 +85,13 @@ object VZipUtil {
/**
* Ported from SteamKit2 and is untested, use at your own risk
*/
@JvmStatic
fun compress(buffer: ByteArray): ByteArray {
ByteArrayOutputStream().use { ms ->
BinaryWriter(ms).use { writer ->
val crc = CryptoHelper.crcHash(buffer)
writer.writeShort(VZIP_HEADER)
writer.writeChar(VERSION)
writer.writeByte(VERSION)
writer.write(crc)

// Configure LZMA options to match SteamKit2's settings
Expand Down
1 change: 1 addition & 0 deletions src/main/java/in/dragonbra/javasteam/util/ZipUtil.kt
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import java.util.zip.ZipInputStream

object ZipUtil {

@JvmStatic
fun decompress(ms: MemoryStream, destination: ByteArray, verifyChecksum: Boolean = true): Int {
ZipInputStream(ms, Charsets.UTF_8).use { zip ->
val entry = zip.nextEntry
Expand Down
93 changes: 93 additions & 0 deletions src/test/java/in/dragonbra/javasteam/steam/cdn/DepotChunkTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
package in.dragonbra.javasteam.steam.cdn;

import in.dragonbra.javasteam.types.ChunkData;
import in.dragonbra.javasteam.util.stream.MemoryStream;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.apache.commons.codec.binary.Hex;

import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;

public class DepotChunkTest {

@Test
public void decryptsAndDecompressesDepotChunkPKZip() throws IOException, NoSuchAlgorithmException {
var stream = getClass().getClassLoader()
.getResourceAsStream("depot/depot_440_chunk_bac8e2657470b2eb70d6ddcd6c07004be8738697.bin");

var ms = new MemoryStream();
IOUtils.copy(stream, ms.asOutputStream());

var chunkData = ms.toByteArray();

var chunk = new ChunkData(
new byte[0], // id is not needed here
2130218374,
0,
320,
544
);

var destination = new byte[chunk.getUncompressedLength()];
var writtenLength = DepotChunk.process(
chunk,
chunkData,
destination,
new byte[]{
(byte) 0x44, (byte) 0xCE, (byte) 0x5C, (byte) 0x52, (byte) 0x97, (byte) 0xA4, (byte) 0x15, (byte) 0xA1,
(byte) 0xA6, (byte) 0xF6, (byte) 0x9C, (byte) 0x85, (byte) 0x60, (byte) 0x37, (byte) 0xA5, (byte) 0xA2,
(byte) 0xFD, (byte) 0xD8, (byte) 0x2C, (byte) 0xD4, (byte) 0x74, (byte) 0xFA, (byte) 0x65, (byte) 0x9E,
(byte) 0xDF, (byte) 0xB4, (byte) 0xD5, (byte) 0x9B, (byte) 0x2A, (byte) 0xBC, (byte) 0x55, (byte) 0xFC
}
);

Assertions.assertEquals(chunk.getCompressedLength(), chunkData.length);
Assertions.assertEquals(chunk.getUncompressedLength(), writtenLength);

var hash = Hex.encodeHexString(MessageDigest.getInstance("SHA-1").digest(destination), false);

Assertions.assertEquals("BAC8E2657470B2EB70D6DDCD6C07004BE8738697", hash);
}

@Test
public void decryptsAndDecompressesDepotChunkVZip() throws IOException, NoSuchAlgorithmException {
var stream = getClass().getClassLoader()
.getResourceAsStream("depot/depot_232250_chunk_7b8567d9b3c09295cdbf4978c32b348d8e76c750.bin");

var ms = new MemoryStream();
IOUtils.copy(stream, ms.asOutputStream());

var chunkData = ms.toByteArray();

var chunk = new ChunkData(
new byte[0], // id is not needed here
Integer.parseUnsignedInt("2894626744"),
0,
304,
798
);

var destination = new byte[chunk.getUncompressedLength()];
var writtenLength = DepotChunk.process(
chunk,
chunkData,
destination,
new byte[]{
(byte) 0xE5, (byte) 0xF6, (byte) 0xAE, (byte) 0xD5, (byte) 0x5E, (byte) 0x9E, (byte) 0xCE, (byte) 0x42,
(byte) 0x9E, (byte) 0x56, (byte) 0xB8, (byte) 0x13, (byte) 0xFB, (byte) 0xF6, (byte) 0xBF, (byte) 0xE9,
(byte) 0x24, (byte) 0xF3, (byte) 0xCF, (byte) 0x72, (byte) 0x97, (byte) 0x2F, (byte) 0xDB, (byte) 0xD0,
(byte) 0x57, (byte) 0x1F, (byte) 0xFC, (byte) 0xAD, (byte) 0x9F, (byte) 0x2F, (byte) 0x7D, (byte) 0xAA,
}
);

Assertions.assertEquals(chunk.getCompressedLength(), chunkData.length);
Assertions.assertEquals(chunk.getUncompressedLength(), writtenLength);

var hash = Hex.encodeHexString(MessageDigest.getInstance("SHA-1").digest(destination), false);

Assertions.assertEquals("7B8567D9B3C09295CDBF4978C32B348D8E76C750", hash);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
½ÐȔ·Y%‘OY.¦i֐ϭԐŎ Å‚¢õ
çc¬òÄß*»¬,‡N>™nÏTsOØ¢ª¶Ë ù ÚØÞªXlˆ¸oÓ`,’4ß±…pŠE/—[ˆ£ùƒ#ð’ T=É<ú“yӃ»ÙR©"ã€ù#ò ÕìÔþƒ<ŽF½7˜ñ.²w‘½“bK)§š*}¯ÝCIӞә1”Ö†tGÀ±ò[Õ¡Ù^Nm )O'+‘Û¥«Z¶häsuô(Ö»Uh7Uã1s’gØa*å1FöÆ‹/õ >7$ä‘^¾9Êt?¡Y‰ù·µ/_Y™Uuœ;ùsPbw8 µ€wòÞª)” Ï\Q|ýÊÆAñ¾µ ܋-ùþ" Cι¢1ZWP$
Expand Down
Binary file not shown.
Loading