summaryrefslogtreecommitdiff
path: root/src/com/pkrandom/ctr
diff options
context:
space:
mode:
Diffstat (limited to 'src/com/pkrandom/ctr')
-rw-r--r--src/com/pkrandom/ctr/AMX.java227
-rw-r--r--src/com/pkrandom/ctr/BFLIM.java203
-rw-r--r--src/com/pkrandom/ctr/GARCArchive.java388
-rw-r--r--src/com/pkrandom/ctr/Mini.java102
-rw-r--r--src/com/pkrandom/ctr/NCCH.java1024
-rw-r--r--src/com/pkrandom/ctr/RomfsFile.java121
-rw-r--r--src/com/pkrandom/ctr/SMDH.java118
7 files changed, 2183 insertions, 0 deletions
diff --git a/src/com/pkrandom/ctr/AMX.java b/src/com/pkrandom/ctr/AMX.java
new file mode 100644
index 0000000..d99ba7f
--- /dev/null
+++ b/src/com/pkrandom/ctr/AMX.java
@@ -0,0 +1,227 @@
+package com.pkrandom.ctr;
+
+/*----------------------------------------------------------------------------*/
+/*-- AMX.java - class for handling AMX script archives --*/
+/*-- --*/
+/*-- Contains code based on "pk3DS", copyright (C) Kaphotics --*/
+/*-- Contains code based on "pkNX", copyright (C) Kaphotics --*/
+/*-- Contains code based on "poketools", copyright (C) FireyFly --*/
+/*-- Additional contributions by the UPR-ZX team --*/
+/*-- --*/
+/*-- This program is free software: you can redistribute it and/or modify --*/
+/*-- it under the terms of the GNU General Public License as published by --*/
+/*-- the Free Software Foundation, either version 3 of the License, or --*/
+/*-- (at your option) any later version. --*/
+/*-- --*/
+/*-- This program is distributed in the hope that it will be useful, --*/
+/*-- but WITHOUT ANY WARRANTY; without even the implied warranty of --*/
+/*-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --*/
+/*-- GNU General Public License for more details. --*/
+/*-- --*/
+/*-- You should have received a copy of the GNU General Public License --*/
+/*-- along with this program. If not, see <http://www.gnu.org/licenses/>. --*/
+/*----------------------------------------------------------------------------*/
+
+import com.pkrandom.FileFunctions;
+import com.pkrandom.exceptions.RandomizerIOException;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class AMX {
+
+ public byte[] decData;
+ public int scriptOffset = 0;
+
+ private int amxMagic = 0x0A0AF1E0;
+ private int amxMagicDebug = 0x0A0AF1EF;
+ private long mask = 0xFF;
+
+ private int length;
+
+ private int scriptInstrStart;
+ private int scriptMovementStart;
+ private int finalOffset;
+ private int allocatedMemory;
+
+ private int compLength;
+ private int decompLength;
+
+ private int ptrOffset;
+ private int ptrCount;
+
+ private byte[] extraData;
+
+ public AMX(byte[] data, int scriptNum) throws IOException {
+ int found = 0;
+ for (int i = 0; i < data.length - 3; i++) {
+ int val = FileFunctions.readFullInt(data,i);
+ if (val == amxMagic) {
+ if (found == scriptNum) {
+ int length = FileFunctions.readFullInt(data,i-4);
+ readHeaderAndDecompress(Arrays.copyOfRange(data,i-4,i-4+length));
+ scriptOffset = i-4;
+ break;
+ } else {
+ found++;
+ }
+ }
+ }
+ }
+
+ public AMX(byte[] encData) throws IOException {
+ readHeaderAndDecompress(encData);
+ }
+
+ // Credit to the creators of pk3DS (Kaphotics et al)
+ private void readHeaderAndDecompress(byte[] encData) throws IOException {
+ length = FileFunctions.readFullInt(encData,0);
+ int magic = FileFunctions.readFullInt(encData,4);
+ if (magic != amxMagic) {
+ throw new IOException();
+ }
+
+ ptrOffset = FileFunctions.read2ByteInt(encData,8);
+ ptrCount = FileFunctions.read2ByteInt(encData,0xA);
+
+ scriptInstrStart = FileFunctions.readFullInt(encData,0xC);
+ scriptMovementStart = FileFunctions.readFullInt(encData,0x10);
+ finalOffset = FileFunctions.readFullInt(encData,0x14);
+ allocatedMemory = FileFunctions.readFullInt(encData,0x18);
+
+ compLength = length - scriptInstrStart;
+ byte[] compressedBytes = Arrays.copyOfRange(encData,scriptInstrStart,length);
+ decompLength = finalOffset - scriptInstrStart;
+
+ decData = decompressBytes(compressedBytes, decompLength);
+ extraData = Arrays.copyOfRange(encData,0x1C,scriptInstrStart);
+ }
+
+ // Credit to FireyFly
+ private byte[] decompressBytes(byte[] data, int length) {
+ byte[] code = new byte[length];
+ int i = 0, j = 0, x = 0, f = 0;
+ while (i < code.length) {
+ int b = data[f++];
+ int v = b & 0x7F;
+ if (++j == 1) {
+ x = ((((v >>> 6 == 0 ? 1 : 0) - 1 ) << 6) | v);
+ } else {
+ x = (x << 7) | (v & 0xFF);
+ }
+ if ((b & 0x80) != 0) continue;
+ code[i++] = (byte)(x & 0xFF);
+ code[i++] = (byte)((x >>> 8) & 0xFF);
+ code[i++] = (byte)((x >>> 16) & 0xFF);
+ code[i++] = (byte)((x >>> 24) & 0xFF);
+ j = 0;
+ }
+ return code;
+ }
+
+ public byte[] getBytes() {
+
+ ByteBuffer bbuf = ByteBuffer.allocate(length*2);
+
+ bbuf.order(ByteOrder.LITTLE_ENDIAN);
+
+ bbuf.putInt(length);
+ bbuf.putInt(amxMagic);
+ bbuf.putShort((short)ptrOffset);
+ bbuf.putShort((short)ptrCount);
+ bbuf.putInt(scriptInstrStart);
+ bbuf.putInt(scriptMovementStart);
+ bbuf.putInt(finalOffset);
+ bbuf.putInt(allocatedMemory);
+ bbuf.put(extraData);
+ bbuf.put(compressScript(decData));
+ bbuf.flip();
+ bbuf.putInt(bbuf.limit());
+
+ return Arrays.copyOfRange(bbuf.array(),0,bbuf.limit());
+ }
+
+ private byte[] compressScript(byte[] data) {
+ if (data == null || data.length % 4 != 0) {
+ return null;
+ }
+ ByteBuffer inBuf = ByteBuffer.wrap(data);
+ inBuf.order(ByteOrder.LITTLE_ENDIAN);
+
+ ByteArrayOutputStream out = new ByteArrayOutputStream(compLength);
+
+ try {
+ while (inBuf.position() < data.length) {
+ compressBytes(inBuf, out);
+ }
+ } catch (IOException e) {
+ throw new RandomizerIOException(e);
+ }
+
+ return out.toByteArray();
+ }
+
+ // Modified version of the AMX script compression algorithm from pkNX
+ private void compressBytes(ByteBuffer inBuf, ByteArrayOutputStream out) throws IOException {
+ List<Byte> bytes = new ArrayList<>();
+ int instructionTemp = inBuf.getInt(inBuf.position());
+ long instruction = Integer.toUnsignedLong(instructionTemp);
+ boolean sign = (instruction & 0x80000000) > 0;
+
+ // Signed (negative) values are handled opposite of unsigned (positive) values.
+ // Positive values are "done" when we've shifted the value down to zero, but
+ // we don't need to store the highest 1s in a signed value. We handle this by
+ // tracking the loop via a NOTed shadow copy of the instruction if it's signed.
+ int shadowTemp = sign ? ~instructionTemp : instructionTemp;
+ long shadow = Integer.toUnsignedLong(shadowTemp);
+ do
+ {
+ long least7 = instruction & 0b01111111;
+ byte byteVal = (byte)least7;
+
+ if (bytes.size() > 0)
+ {
+ // Continuation bit on all but the lowest byte
+ byteVal |= 0x80;
+ }
+
+ bytes.add(byteVal);
+
+ instruction >>= 7;
+ shadow >>= 7;
+ }
+ while (shadow != 0);
+
+ if (bytes.size() < 5)
+ {
+ // Ensure "sign bit" (bit just to the right of highest continuation bit) is
+ // correct. Add an extra empty continuation byte if we need to. Values can't
+ // be longer than 5 bytes, though.
+
+ int signBit = sign ? 0x40 : 0x00;
+
+ if ((bytes.get(bytes.size() - 1) & 0x40) != signBit)
+ bytes.add((byte)(sign ? 0xFF : 0x80));
+ }
+
+ // Reverse for endianess
+ for (int i = 0; i < bytes.size() / 2; i++) {
+ byte temp = bytes.get(i);
+ bytes.set(i, bytes.get(bytes.size() - i - 1));
+ bytes.set(bytes.size() - i - 1, temp);
+ }
+
+ byte[] ret = new byte[bytes.size()];
+ for (int i = 0; i < ret.length; i++) {
+ ret[i] = bytes.get(i);
+ }
+
+ inBuf.position(inBuf.position() + 4);
+ out.write(ret);
+ }
+}
diff --git a/src/com/pkrandom/ctr/BFLIM.java b/src/com/pkrandom/ctr/BFLIM.java
new file mode 100644
index 0000000..5bbb71a
--- /dev/null
+++ b/src/com/pkrandom/ctr/BFLIM.java
@@ -0,0 +1,203 @@
+package com.pkrandom.ctr;
+
+/*----------------------------------------------------------------------------*/
+/*-- BFLIM.java - class for reading/parsing BFLIM images. --*/
+/*-- Note that this class is optimized around handling Gen 7 --*/
+/*-- Pokemon icons, and won't work for all types of BFLIMs --*/
+/*-- --*/
+/*-- Code based on "Switch Toolbox", copyright (C) KillzXGaming --*/
+/*-- --*/
+/*-- Ported to Java by UPR-ZX Team under the terms of the GPL: --*/
+/*-- --*/
+/*-- This program is free software: you can redistribute it and/or modify --*/
+/*-- it under the terms of the GNU General Public License as published by --*/
+/*-- the Free Software Foundation, either version 3 of the License, or --*/
+/*-- (at your option) any later version. --*/
+/*-- --*/
+/*-- This program is distributed in the hope that it will be useful, --*/
+/*-- but WITHOUT ANY WARRANTY; without even the implied warranty of --*/
+/*-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --*/
+/*-- GNU General Public License for more details. --*/
+/*-- --*/
+/*-- You should have received a copy of the GNU General Public License --*/
+/*-- along with this program. If not, see <http://www.gnu.org/licenses/>. --*/
+/*----------------------------------------------------------------------------*/
+
+import com.pkrandom.FileFunctions;
+
+import java.awt.image.BufferedImage;
+
+public class BFLIM {
+
+ private int width;
+ private int height;
+ private byte[] imageData;
+ private Header header;
+ private Image image;
+
+ public BFLIM(byte[] bflimBytes) {
+ if (bflimBytes.length < 0x28) {
+ throw new IllegalArgumentException("Invalid BFLIM: not long enough to contain a header");
+ }
+ header = new Header(bflimBytes);
+ image = new Image(bflimBytes);
+ width = image.width;
+ height = image.height;
+ imageData = new byte[image.imageSize];
+ System.arraycopy(bflimBytes, 0, imageData, 0, image.imageSize);
+ }
+
+ @SuppressWarnings("SuspiciousNameCombination")
+ public BufferedImage getImage() {
+ // Swap width and height, because the image is rendered on its side
+ int swappedWidth = height;
+ int swappedHeight = width;
+ int[] decodedImageData = decodeBlock(imageData, swappedWidth, swappedHeight);
+ int[] colorData = convertToColorData(decodedImageData);
+ int[] correctedColorData = rearrangeImage(colorData, width, height);
+ BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
+ for (int y = 0; y < height; y++) {
+ for (int x = 0; x < width; x++) {
+ int color = correctedColorData[x + (y * this.width)];
+ image.setRGB(x, y, color);
+ }
+ }
+ return image;
+ }
+
+ private static int[] SwizzleLUT = {
+ 0, 1, 8, 9, 2, 3, 10, 11,
+ 16, 17, 24, 25, 18, 19, 26, 27,
+ 4, 5, 12, 13, 6, 7, 14, 15,
+ 20, 21, 28, 29, 22, 23, 30, 31,
+ 32, 33, 40, 41, 34, 35, 42, 43,
+ 48, 49, 56, 57, 50, 51, 58, 59,
+ 36, 37, 44, 45, 38, 39, 46, 47,
+ 52, 53, 60, 61, 54, 55, 62, 63
+ };
+
+ private int[] decodeBlock(byte[] data, int width, int height) {
+ int[] output = new int[width * height * 4];
+ int inputOffset = 0;
+ for (int ty = 0; ty < height; ty += 8) {
+ for (int tx = 0; tx < width; tx += 8) {
+ for (int px = 0; px < 64; px++) {
+ int x = SwizzleLUT[px] & 7;
+ int y = (SwizzleLUT[px] - x) >> 3;
+ int outputOffset = (tx + x + ((height - 1 - (ty + y)) * width)) * 4;
+ int value = FileFunctions.read2ByteInt(data, inputOffset);
+ if (image.format == 7) {
+ decodeRGBA5551(output, outputOffset, value);
+ } else if (image.format == 8) {
+ decodeRGBA4(output, outputOffset, value);
+ } else {
+ throw new IllegalArgumentException("Unsupported BFLIM: unsupported image format");
+ }
+ inputOffset += 2;
+ }
+ }
+ }
+ return output;
+ }
+
+ private int[] convertToColorData(int[] decodedImageData) {
+ int[] output = new int[decodedImageData.length / 4];
+ for (int i = 0; i < decodedImageData.length; i += 4) {
+ int a = decodedImageData[i];
+ int b = decodedImageData[i + 1];
+ int g = decodedImageData[i + 2];
+ int r = decodedImageData[i + 3];
+ int color = (a << 24) | (b << 16) | (g << 8) | r;
+ output[i / 4] = color;
+ }
+ return output;
+ }
+
+ private int[] rearrangeImage(int[] colorData, int width, int height) {
+ int[] output = new int[colorData.length];
+ for (int destY = 0; destY < height; destY++) {
+ for (int destX = 0; destX < width; destX++) {
+ int srcX = height - destY - 1;
+ int srcY = width - destX - 1;
+ int srcIndex = srcX + (srcY * height);
+ int destIndex = destX + (destY * width);
+ output[destIndex] = colorData[srcIndex];
+ }
+ }
+ return output;
+ }
+
+ private static void decodeRGBA5551(int[] output, int outputOffset, int value) {
+ int R = ((value >> 1) & 0x1f) << 3;
+ int G = ((value >> 6) & 0x1f) << 3;
+ int B = ((value >> 11) & 0x1f) << 3;
+ int A = (value & 1) * 0xFF;
+ R = R | (R >> 5);
+ G = G | (G >> 5);
+ B = B | (B >> 5);
+ output[outputOffset] = A;
+ output[outputOffset + 1] = B;
+ output[outputOffset + 2] = G;
+ output[outputOffset + 3] = R;
+ }
+
+ private static void decodeRGBA4(int[] output, int outputOffset, int value) {
+ int R = ((value >> 4) & 0xf);
+ int G = ((value >> 8) & 0xf);
+ int B = ((value >> 12) & 0xf);
+ int A = (value & 1) | (value << 4);
+ R = R | (R << 4);
+ G = G | (G << 4);
+ B = B | (B << 4);
+ output[outputOffset] = A;
+ output[outputOffset + 1] = B;
+ output[outputOffset + 2] = G;
+ output[outputOffset + 3] = R;
+ }
+
+ private class Header {
+ public int version;
+
+ public Header(byte[] bflimBytes) {
+ int headerOffset = bflimBytes.length - 0x28;
+ int signature = FileFunctions.readFullIntBigEndian(bflimBytes, headerOffset);
+ if (signature != 0x464C494D) {
+ throw new IllegalArgumentException("Invalid BFLIM: cannot find FLIM header");
+ }
+ boolean bigEndian = FileFunctions.read2ByteInt(bflimBytes, headerOffset + 4) == 0xFFFE;
+ if (bigEndian) {
+ throw new IllegalArgumentException("Unsupported BFLIM: this is a big endian BFLIM");
+ }
+ int headerSize = FileFunctions.read2ByteInt(bflimBytes, headerOffset + 6);
+ if (headerSize != 0x14) {
+ throw new IllegalArgumentException("Invalid BFLIM: header length does not equal 0x14");
+ }
+ version = FileFunctions.readFullInt(bflimBytes, headerOffset + 8);
+ }
+ }
+
+ private class Image {
+ public int size;
+ public short width;
+ public short height;
+ public short alignment;
+ public byte format;
+ public byte flags;
+ public int imageSize;
+
+ public Image(byte[] bflimBytes) {
+ int imageHeaderOffset = bflimBytes.length - 0x14;
+ int signature = FileFunctions.readFullIntBigEndian(bflimBytes, imageHeaderOffset);
+ if (signature != 0x696D6167) {
+ throw new IllegalArgumentException("Invalid BFLIM: cannot find imag header");
+ }
+ size = FileFunctions.readFullInt(bflimBytes, imageHeaderOffset + 4);
+ width = (short) FileFunctions.read2ByteInt(bflimBytes, imageHeaderOffset + 8);
+ height = (short) FileFunctions.read2ByteInt(bflimBytes, imageHeaderOffset + 10);
+ alignment = (short) FileFunctions.read2ByteInt(bflimBytes, imageHeaderOffset + 12);
+ format = bflimBytes[imageHeaderOffset + 14];
+ flags = bflimBytes[imageHeaderOffset + 15];
+ imageSize = FileFunctions.readFullInt(bflimBytes, imageHeaderOffset + 16);
+ }
+ }
+}
diff --git a/src/com/pkrandom/ctr/GARCArchive.java b/src/com/pkrandom/ctr/GARCArchive.java
new file mode 100644
index 0000000..cd504c4
--- /dev/null
+++ b/src/com/pkrandom/ctr/GARCArchive.java
@@ -0,0 +1,388 @@
+package com.pkrandom.ctr;
+
+/*----------------------------------------------------------------------------*/
+/*-- GARCArchive.java - class for packing/unpacking GARC archives --*/
+/*-- --*/
+/*-- Code based on "pk3DS", copyright (C) Kaphotics --*/
+/*-- --*/
+/*-- Ported to Java by UPR-ZX Team under the terms of the GPL: --*/
+/*-- --*/
+/*-- This program is free software: you can redistribute it and/or modify --*/
+/*-- it under the terms of the GNU General Public License as published by --*/
+/*-- the Free Software Foundation, either version 3 of the License, or --*/
+/*-- (at your option) any later version. --*/
+/*-- --*/
+/*-- This program is distributed in the hope that it will be useful, --*/
+/*-- but WITHOUT ANY WARRANTY; without even the implied warranty of --*/
+/*-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --*/
+/*-- GNU General Public License for more details. --*/
+/*-- --*/
+/*-- You should have received a copy of the GNU General Public License --*/
+/*-- along with this program. If not, see <http://www.gnu.org/licenses/>. --*/
+/*----------------------------------------------------------------------------*/
+
+import cuecompressors.BLZCoder;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.*;
+
+public class GARCArchive {
+
+ private final int VER_4 = 0x0400;
+ private final int VER_6 = 0x0600;
+ private int version;
+ private final int garcHeaderSize_4 = 0x1C;
+ private final int garcHeaderSize_6 = 0x24;
+ private final String garcMagic = "CRAG";
+ private final String fatoMagic = "OTAF";
+ private final String fatbMagic = "BTAF";
+ private final String fimbMagic = "BMIF";
+ private boolean skipDecompression = true;
+
+ public List<Map<Integer,byte[]>> files = new ArrayList<>();
+ private Map<Integer,Boolean> isCompressed = new TreeMap<>();
+ private List<Boolean> compressThese = null;
+
+ private GARCFrame garc;
+ private FATOFrame fato;
+ private FATBFrame fatb;
+ private FIMBFrame fimb;
+
+ public GARCArchive() {
+
+ }
+
+ public GARCArchive(byte[] data, boolean skipDecompression) throws IOException {
+ this.skipDecompression = skipDecompression;
+ boolean success = readFrames(data);
+ if (!success) {
+ throw new IOException("Invalid GARC file");
+ }
+ files = fimb.files;
+ }
+
+ public GARCArchive(byte[] data, List<Boolean> compressedThese) throws IOException {
+ this.compressThese = compressedThese;
+ boolean success = readFrames(data);
+ if (!success) {
+ throw new IOException("Invalid GARC file");
+ }
+ files = fimb.files;
+ }
+
+ private boolean readFrames(byte[] data) {
+ if (data.length <= 0) {
+ System.out.println("Empty GARC");
+ return false;
+ }
+ ByteBuffer bbuf = ByteBuffer.wrap(data);
+ bbuf.order(ByteOrder.LITTLE_ENDIAN);
+ // GARC
+ byte[] magicBuf = new byte[4];
+ bbuf.get(magicBuf);
+ String magic = new String(magicBuf);
+ if (!magic.equals(garcMagic)) {
+ return false;
+ }
+ garc = new GARCFrame();
+ garc.headerSize = bbuf.getInt();
+ garc.endianness = bbuf.getShort();
+ garc.version = bbuf.getShort();
+ int frameCount = bbuf.getInt();
+ if (frameCount != 4) {
+ return false;
+ }
+ garc.dataOffset = bbuf.getInt();
+ garc.fileSize = bbuf.getInt();
+ if (garc.version == VER_4) {
+ garc.contentLargestUnpadded = bbuf.getInt();
+ garc.contentPadToNearest = 4;
+ version = 4;
+ } else if (garc.version == VER_6) {
+ garc.contentLargestPadded = bbuf.getInt();
+ garc.contentLargestUnpadded = bbuf.getInt();
+ garc.contentPadToNearest = bbuf.getInt();
+ version = 6;
+ } else {
+ return false;
+ }
+
+ // FATO
+ fato = new FATOFrame();
+ bbuf.get(magicBuf);
+ magic = new String(magicBuf);
+ if (!magic.equals(fatoMagic)) {
+ return false;
+ }
+ fato.headerSize = bbuf.getInt();
+ fato.entryCount = bbuf.getShort();
+ fato.padding = bbuf.getShort();
+ fato.entries = new int[fato.entryCount];
+ for (int i = 0; i < fato.entryCount; i++) {
+ fato.entries[i] = bbuf.getInt();
+ }
+
+ // FATB
+ fatb = new FATBFrame();
+ bbuf.get(magicBuf);
+ magic = new String(magicBuf);
+ if (!magic.equals(fatbMagic)) {
+ return false;
+ }
+ fatb.headerSize = bbuf.getInt();
+ fatb.fileCount = bbuf.getInt();
+ fatb.entries = new FATBEntry[fatb.fileCount];
+ for (int i = 0; i < fatb.fileCount; i++) {
+ fatb.entries[i] = new FATBEntry();
+ fatb.entries[i].vector = bbuf.getInt();
+ fatb.entries[i].subEntries = new TreeMap<>();
+ int bitVector = fatb.entries[i].vector;
+ int counter = 0;
+ for (int b = 0; b < 32; b++) {
+ boolean exists = (bitVector & 1) == 1;
+ bitVector >>>= 1;
+ if (!exists) continue;
+ FATBSubEntry subEntry = new FATBSubEntry();
+ subEntry.start = bbuf.getInt();
+ subEntry.end = bbuf.getInt();
+ subEntry.length = bbuf.getInt();
+ fatb.entries[i].subEntries.put(b,subEntry);
+ counter++;
+ }
+ fatb.entries[i].isFolder = counter > 1;
+ }
+
+ // FIMB
+ fimb = new FIMBFrame();
+ bbuf.get(magicBuf);
+ magic = new String(magicBuf);
+ if (!magic.equals(fimbMagic)) {
+ return false;
+ }
+ fimb.headerSize = bbuf.getInt();
+ fimb.dataSize = bbuf.getInt();
+ fimb.files = new ArrayList<>();
+ for (int i = 0; i < fatb.fileCount; i++) {
+ FATBEntry entry = fatb.entries[i];
+ Map<Integer,byte[]> files = new TreeMap<>();
+ for (int k: entry.subEntries.keySet()) {
+ FATBSubEntry subEntry = entry.subEntries.get(k);
+ bbuf.position(garc.dataOffset + subEntry.start);
+ byte[] file = new byte[subEntry.length];
+ boolean compressed = compressThese == null ?
+ bbuf.get(bbuf.position()) == 0x11 && !skipDecompression :
+ bbuf.get(bbuf.position()) == 0x11 && compressThese.get(i);
+ bbuf.get(file);
+ if (compressed) {
+ try {
+ files.put(k,new BLZCoder(null).BLZ_DecodePub(file,"GARC"));
+ isCompressed.put(i,true);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return false;
+ }
+ } else {
+ files.put(k,file);
+ isCompressed.put(i,false);
+ }
+ }
+ fimb.files.add(files);
+ }
+ return true;
+ }
+
+ public void updateFiles(List<Map<Integer,byte[]>> files) {
+ fimb.files = files;
+ }
+
+ public byte[] getBytes() throws IOException {
+ int garcHeaderSize = garc.version == VER_4 ? garcHeaderSize_4 : garcHeaderSize_6;
+ ByteBuffer garcBuf = ByteBuffer.allocate(garcHeaderSize);
+ garcBuf.order(ByteOrder.LITTLE_ENDIAN);
+ garcBuf.put(garcMagic.getBytes());
+ garcBuf.putInt(garcHeaderSize);
+ garcBuf.putShort((short)0xFEFF);
+ garcBuf.putShort(version == 4 ? (short)VER_4 : (short)VER_6);
+ garcBuf.putInt(4);
+
+ ByteBuffer fatoBuf = ByteBuffer.allocate(fato.headerSize);
+ fatoBuf.order(ByteOrder.LITTLE_ENDIAN);
+ fatoBuf.put(fatoMagic.getBytes());
+ fatoBuf.putInt(fato.headerSize);
+ fatoBuf.putShort((short)fato.entryCount);
+ fatoBuf.putShort((short)fato.padding);
+
+ ByteBuffer fatbBuf = ByteBuffer.allocate(fatb.headerSize);
+ fatbBuf.order(ByteOrder.LITTLE_ENDIAN);
+ fatbBuf.put(fatbMagic.getBytes());
+ fatbBuf.putInt(fatb.headerSize);
+ fatbBuf.putInt(fatb.fileCount);
+
+ ByteBuffer fimbHeaderBuf = ByteBuffer.allocate(fimb.headerSize);
+ fimbHeaderBuf.order(ByteOrder.LITTLE_ENDIAN);
+ fimbHeaderBuf.put(fimbMagic.getBytes());
+ fimbHeaderBuf.putInt(fimb.headerSize);
+
+ ByteArrayOutputStream fimbPayloadStream = new ByteArrayOutputStream(); // Unknown size, can't use ByteBuffer
+
+ int fimbOffset = 0;
+ int largestSize = 0;
+ int largestPadded = 0;
+ for (int i = 0; i < fimb.files.size(); i++) {
+ Map<Integer,byte[]> directory = fimb.files.get(i);
+ int bitVector = 0;
+ int totalLength = 0;
+ for (int k: directory.keySet()) {
+ bitVector |= (1 << k);
+ byte[] file = directory.get(k);
+ if (isCompressed.get(i)) {
+ file = new BLZCoder(null).BLZ_EncodePub(file,false,false,"GARC");
+ }
+ fimbPayloadStream.write(file);
+ totalLength += file.length;
+ }
+
+ int paddingRequired = totalLength % garc.contentPadToNearest;
+ if (paddingRequired != 0) {
+ paddingRequired = garc.contentPadToNearest - paddingRequired;
+ }
+
+ if (totalLength > largestSize) {
+ largestSize = totalLength;
+ }
+ if (totalLength + paddingRequired > largestPadded) {
+ largestPadded = totalLength + paddingRequired;
+ }
+
+ for (int j = 0; j < paddingRequired; j++) {
+ fimbPayloadStream.write(fato.padding & 0xFF);
+ }
+
+ fatoBuf.putInt(fatbBuf.position() - 12);
+
+ fatbBuf.putInt(bitVector);
+ fatbBuf.putInt(fimbOffset);
+ fimbOffset = fimbPayloadStream.size();
+ fatbBuf.putInt(fimbOffset);
+ fatbBuf.putInt(totalLength);
+ }
+
+ int dataOffset = garcHeaderSize + fatoBuf.position() + fatbBuf.position() + fimb.headerSize;
+ garcBuf.putInt(dataOffset);
+ garcBuf.putInt(dataOffset + fimbOffset);
+ if (garc.version == VER_4) {
+ garcBuf.putInt(largestSize);
+ } else if (garc.version == VER_6) {
+ garcBuf.putInt(largestPadded);
+ garcBuf.putInt(largestSize);
+ garcBuf.putInt(garc.contentPadToNearest);
+ }
+ fimbHeaderBuf.putInt(fimbPayloadStream.size());
+
+ garcBuf.flip();
+ fatoBuf.flip();
+ fatbBuf.flip();
+ fimbHeaderBuf.flip();
+
+ byte[] fullArray = new byte[garcBuf.limit() + fatoBuf.limit() + fatbBuf.limit() + fimbHeaderBuf.limit() + fimbPayloadStream.size()];
+ System.arraycopy(garcBuf.array(),
+ 0,
+ fullArray,
+ 0,
+ garcBuf.limit());
+ System.arraycopy(fatoBuf.array(),
+ 0,
+ fullArray,
+ garcBuf.limit(),
+ fatoBuf.limit());
+ System.arraycopy(fatbBuf.array(),
+ 0,
+ fullArray,
+ garcBuf.limit()+fatoBuf.limit(),
+ fatbBuf.limit());
+ System.arraycopy(fimbHeaderBuf.array(),
+ 0,
+ fullArray,
+ garcBuf.limit()+fatoBuf.limit()+fatbBuf.limit(),
+ fimbHeaderBuf.limit());
+// garcBuf.get(fullArray);
+// fatoBuf.get(fullArray,garcBuf.limit(),fatoBuf.limit());
+// fatbBuf.get(fullArray,garcBuf.limit()+fatoBuf.limit(),fatbBuf.limit());
+// fimbHeaderBuf.get(fullArray,garcBuf.limit()+fatoBuf.limit()+fatbBuf.limit(),fimbHeaderBuf.limit());
+ System.arraycopy(fimbPayloadStream.toByteArray(),
+ 0,
+ fullArray,
+ garcBuf.limit()+fatoBuf.limit()+fatbBuf.limit()+fimbHeaderBuf.limit(),
+ fimbPayloadStream.size());
+ return fullArray;
+ }
+
+
+
+ public byte[] getFile(int index) {
+ return fimb.files.get(index).get(0);
+ }
+
+ public byte[] getFile(int index, int subIndex) {
+ return fimb.files.get(index).get(subIndex);
+ }
+
+ public void setFile(int index, byte[] data) {
+ fimb.files.get(index).put(0,data);
+ }
+
+ public Map<Integer,byte[]> getDirectory(int index) {
+ return fimb.files.get(index);
+ }
+
+ private class GARCFrame {
+ int headerSize;
+ int endianness;
+ int version;
+ int dataOffset;
+ int fileSize;
+
+ int contentLargestPadded;
+ int contentLargestUnpadded;
+ int contentPadToNearest;
+ }
+
+ private class FATOFrame {
+ int headerSize;
+ int entryCount;
+ int padding;
+
+ int[] entries;
+ }
+
+ private class FATBFrame {
+ int headerSize;
+ int fileCount;
+ FATBEntry[] entries;
+ }
+
+ private class FATBEntry {
+ int vector;
+ boolean isFolder;
+ Map<Integer,FATBSubEntry> subEntries;
+ }
+
+ private class FATBSubEntry {
+ boolean exists;
+ int start;
+ int end;
+ int length;
+ int padding;
+ }
+
+ private class FIMBFrame {
+ int headerSize;
+ int dataSize;
+ List<Map<Integer,byte[]>> files;
+ }
+}
diff --git a/src/com/pkrandom/ctr/Mini.java b/src/com/pkrandom/ctr/Mini.java
new file mode 100644
index 0000000..82daa08
--- /dev/null
+++ b/src/com/pkrandom/ctr/Mini.java
@@ -0,0 +1,102 @@
+package com.pkrandom.ctr;
+
+/*----------------------------------------------------------------------------*/
+/*-- Mini.java - class for packing/unpacking Mini archives --*/
+/*-- --*/
+/*-- Code based on "pk3DS", copyright (C) Kaphotics --*/
+/*-- --*/
+/*-- Ported to Java by UPR-ZX Team under the terms of the GPL: --*/
+/*-- --*/
+/*-- This program is free software: you can redistribute it and/or modify --*/
+/*-- it under the terms of the GNU General Public License as published by --*/
+/*-- the Free Software Foundation, either version 3 of the License, or --*/
+/*-- (at your option) any later version. --*/
+/*-- --*/
+/*-- This program is distributed in the hope that it will be useful, --*/
+/*-- but WITHOUT ANY WARRANTY; without even the implied warranty of --*/
+/*-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --*/
+/*-- GNU General Public License for more details. --*/
+/*-- --*/
+/*-- You should have received a copy of the GNU General Public License --*/
+/*-- along with this program. If not, see <http://www.gnu.org/licenses/>. --*/
+/*----------------------------------------------------------------------------*/
+
+import com.pkrandom.FileFunctions;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class Mini {
+ public static byte[] PackMini(byte[][] fileData, String identifier) throws IOException {
+ // Create new Binary with the relevant header bytes
+ byte[] data = new byte[4];
+ data[0] = (byte) identifier.charAt(0);
+ data[1] = (byte) identifier.charAt(1);
+ ByteBuffer buf = ByteBuffer.allocate(2);
+ buf.order(ByteOrder.LITTLE_ENDIAN);
+ buf.putShort((short) fileData.length);
+ System.arraycopy(buf.array(), 0, data, 2, 2);
+
+ int count = fileData.length;
+ int dataOffset = 4 + 4 + (count * 4);
+
+ // Start the data filling
+ ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
+ ByteArrayOutputStream offsetMap = new ByteArrayOutputStream();
+ // For each file...
+ for (int i = 0; i < count; i++) {
+ int fileOffset = dataOut.size() + dataOffset;
+ buf = ByteBuffer.allocate(4);
+ buf.order(ByteOrder.LITTLE_ENDIAN);
+ buf.putInt(fileOffset);
+ offsetMap.write(buf.array());
+ dataOut.write(fileData[i]);
+
+ // Pad with zeroes until len % 4 == 0
+ while (dataOut.size() % 4 != 0) {
+ dataOut.write((byte) 0);
+ }
+ }
+ // Cap the file
+ buf = ByteBuffer.allocate(4);
+ buf.order(ByteOrder.LITTLE_ENDIAN);
+ buf.putInt(dataOut.size() + dataOffset);
+ offsetMap.write(buf.array());
+
+ ByteArrayOutputStream newPack = new ByteArrayOutputStream();
+ ByteArrayOutputStream header = new ByteArrayOutputStream();
+ header.write(data);
+ header.writeTo(newPack);
+ offsetMap.writeTo(newPack);
+ dataOut.writeTo(newPack);
+ return newPack.toByteArray();
+ }
+
+ public static byte[][] UnpackMini(byte[] fileData, String identifier) {
+ if (fileData == null || fileData.length < 4) {
+ return null;
+ }
+
+ if (identifier.charAt(0) != fileData[0] || identifier.charAt(1) != fileData[1]) {
+ return null;
+ }
+
+ int count = FileFunctions.read2ByteInt(fileData, 2);
+ int ctr = 4;
+ int start = FileFunctions.readFullInt(fileData, ctr);
+ ctr += 4;
+ byte[][] returnData = new byte[count][];
+ for (int i = 0; i < count; i++) {
+ int end = FileFunctions.readFullInt(fileData, ctr);
+ ctr += 4;
+ int len = end - start;
+ byte[] data = new byte[len];
+ System.arraycopy(fileData, start, data, 0, len);
+ returnData[i] = data;
+ start = end;
+ }
+ return returnData;
+ }
+}
diff --git a/src/com/pkrandom/ctr/NCCH.java b/src/com/pkrandom/ctr/NCCH.java
new file mode 100644
index 0000000..9a326a5
--- /dev/null
+++ b/src/com/pkrandom/ctr/NCCH.java
@@ -0,0 +1,1024 @@
+package com.pkrandom.ctr;
+
+/*----------------------------------------------------------------------------*/
+/*-- NCCH.java - a base class for dealing with 3DS NCCH ROM images. --*/
+/*-- --*/
+/*-- Part of "Universal Pokemon Randomizer ZX" by the UPR-ZX team --*/
+/*-- Pokemon and any associated names and the like are --*/
+/*-- trademark and (C) Nintendo 1996-2020. --*/
+/*-- --*/
+/*-- This program is free software: you can redistribute it and/or modify --*/
+/*-- it under the terms of the GNU General Public License as published by --*/
+/*-- the Free Software Foundation, either version 3 of the License, or --*/
+/*-- (at your option) any later version. --*/
+/*-- --*/
+/*-- This program is distributed in the hope that it will be useful, --*/
+/*-- but WITHOUT ANY WARRANTY; without even the implied warranty of --*/
+/*-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --*/
+/*-- GNU General Public License for more details. --*/
+/*-- --*/
+/*-- You should have received a copy of the GNU General Public License --*/
+/*-- along with this program. If not, see <http://www.gnu.org/licenses/>. --*/
+/*----------------------------------------------------------------------------*/
+
+import com.pkrandom.FileFunctions;
+import com.pkrandom.SysConstants;
+import com.pkrandom.exceptions.CannotWriteToLocationException;
+import com.pkrandom.exceptions.EncryptedROMException;
+import com.pkrandom.exceptions.RandomizerIOException;
+import cuecompressors.BLZCoder;
+
+import java.io.*;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.*;
+import java.util.*;
+
+public class NCCH {
+ private String romFilename;
+ private RandomAccessFile baseRom;
+ private long ncchStartingOffset;
+ private String productCode;
+ private String titleId;
+ private int version;
+ private long exefsOffset, romfsOffset, fileDataOffset;
+ private ExefsFileHeader codeFileHeader;
+ private SMDH smdh;
+ private List<ExefsFileHeader> extraExefsFiles;
+ private List<FileMetadata> fileMetadataList;
+ private Map<String, RomfsFile> romfsFiles;
+ private boolean romOpen;
+ private String tmpFolder;
+ private boolean writingEnabled;
+ private boolean codeCompressed, codeOpen, codeChanged;
+ private byte[] codeRamstored;
+
+ // Public so the base game can read it from the game update NCCH
+ public long originalCodeCRC, originalRomfsHeaderCRC;
+
+ private static final int media_unit_size = 0x200;
+ private static final int header_and_exheader_size = 0xA00;
+ private static final int ncsd_magic = 0x4E435344;
+ private static final int cia_header_size = 0x2020;
+ private static final int ncch_magic = 0x4E434348;
+ private static final int ncch_and_ncsd_magic_offset = 0x100;
+ private static final int exefs_header_size = 0x200;
+ private static final int romfs_header_size = 0x5C;
+ private static final int romfs_magic_1 = 0x49564643;
+ private static final int romfs_magic_2 = 0x00000100;
+ private static final int level3_header_size = 0x28;
+ private static final int metadata_unused = 0xFFFFFFFF;
+
+ public NCCH(String filename, String productCode, String titleId) throws IOException {
+ this.romFilename = filename;
+ this.baseRom = new RandomAccessFile(filename, "r");
+ this.ncchStartingOffset = NCCH.getCXIOffsetInFile(filename);
+ this.productCode = productCode;
+ this.titleId = titleId;
+ this.romOpen = true;
+
+ if (this.ncchStartingOffset != -1) {
+ this.version = this.readVersionFromFile();
+ }
+
+ // TMP folder?
+ String rawFilename = new File(filename).getName();
+ String dataFolder = "tmp_" + rawFilename.substring(0, rawFilename.lastIndexOf('.'));
+ // remove nonsensical chars
+ dataFolder = dataFolder.replaceAll("[^A-Za-z0-9_]+", "");
+ File tmpFolder = new File(SysConstants.ROOT_PATH + dataFolder);
+ tmpFolder.mkdirs();
+ if (tmpFolder.canWrite()) {
+ writingEnabled = true;
+ this.tmpFolder = SysConstants.ROOT_PATH + dataFolder + File.separator;
+ tmpFolder.deleteOnExit();
+ } else {
+ writingEnabled = false;
+ }
+
+ // The below code handles things "wrong" with regards to encrypted ROMs. We just
+ // blindly treat the ROM as decrypted and try to parse all of its data, when we
+ // *should* be looking at the header of the ROM to determine if the ROM is encrypted.
+ // Unfortunately, many people have poorly-decrypted ROMs that do not properly set
+ // the bytes on the NCCH header, so we can't assume that the header is telling the
+ // truth. If we read the whole ROM without crashing, then it's probably decrypted.
+ try {
+ readFileSystem();
+ } catch (Exception ex) {
+ if (!this.isDecrypted()) {
+ throw new EncryptedROMException(ex);
+ } else {
+ throw ex;
+ }
+ }
+ }
+
+ public void reopenROM() throws IOException {
+ if (!this.romOpen) {
+ baseRom = new RandomAccessFile(this.romFilename, "r");
+ romOpen = true;
+ }
+ }
+
+ public void closeROM() throws IOException {
+ if (this.romOpen && baseRom != null) {
+ baseRom.close();
+ baseRom = null;
+ romOpen = false;
+ }
+ }
+
+ private void readFileSystem() throws IOException {
+ exefsOffset = ncchStartingOffset + FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x1A0) * media_unit_size;
+ romfsOffset = ncchStartingOffset + FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x1B0) * media_unit_size;
+ baseRom.seek(ncchStartingOffset + 0x20D);
+ byte systemControlInfoFlags = baseRom.readByte();
+ codeCompressed = (systemControlInfoFlags & 0x01) != 0;
+ readExefs();
+ readRomfs();
+ }
+
+ private void readExefs() throws IOException {
+ System.out.println("NCCH: Reading exefs...");
+ byte[] exefsHeaderData = new byte[exefs_header_size];
+ baseRom.seek(exefsOffset);
+ baseRom.readFully(exefsHeaderData);
+
+ ExefsFileHeader[] fileHeaders = new ExefsFileHeader[10];
+ for (int i = 0; i < 10; i++) {
+ fileHeaders[i] = new ExefsFileHeader(exefsHeaderData, i * 0x10);
+ }
+
+ extraExefsFiles = new ArrayList<>();
+ for (ExefsFileHeader fileHeader : fileHeaders) {
+ if (fileHeader.isValid() && fileHeader.filename.equals(".code")) {
+ codeFileHeader = fileHeader;
+ } else if (fileHeader.isValid()) {
+ extraExefsFiles.add(fileHeader);
+ }
+
+ if (fileHeader.isValid() && fileHeader.filename.equals("icon")) {
+ byte[] smdhBytes = new byte[fileHeader.size];
+ baseRom.seek(exefsOffset + 0x200 + fileHeader.offset);
+ baseRom.readFully(smdhBytes);
+ smdh = new SMDH(smdhBytes);
+ }
+ }
+ System.out.println("NCCH: Done reading exefs");
+ }
+
+ private void readRomfs() throws IOException {
+ System.out.println("NCCH: Reading romfs...");
+ byte[] romfsHeaderData = new byte[romfs_header_size];
+ baseRom.seek(romfsOffset);
+ baseRom.readFully(romfsHeaderData);
+ originalRomfsHeaderCRC = FileFunctions.getCRC32(romfsHeaderData);
+ int magic1 = FileFunctions.readFullIntBigEndian(romfsHeaderData, 0x00);
+ int magic2 = FileFunctions.readFullIntBigEndian(romfsHeaderData, 0x04);
+ if (magic1 != romfs_magic_1 || magic2 != romfs_magic_2) {
+ System.err.println("NCCH: romfs does not contain magic values");
+ // Not a valid romfs
+ return;
+ }
+ int masterHashSize = FileFunctions.readFullInt(romfsHeaderData, 0x08);
+ int level3HashBlockSize = 1 << FileFunctions.readFullInt(romfsHeaderData, 0x4C);
+ long level3Offset = romfsOffset + alignLong(0x60 + masterHashSize, level3HashBlockSize);
+
+ byte[] level3HeaderData = new byte[level3_header_size];
+ baseRom.seek(level3Offset);
+ baseRom.readFully(level3HeaderData);
+ int headerLength = FileFunctions.readFullInt(level3HeaderData, 0x00);
+ if (headerLength != level3_header_size) {
+ // Not a valid romfs
+ System.err.println("NCCH: romfs does not have a proper level 3 header");
+ return;
+ }
+ int directoryMetadataOffset = FileFunctions.readFullInt(level3HeaderData, 0x0C);
+ int directoryMetadataLength = FileFunctions.readFullInt(level3HeaderData, 0x10);
+ int fileMetadataOffset = FileFunctions.readFullInt(level3HeaderData, 0x1c);
+ int fileMetadataLength = FileFunctions.readFullInt(level3HeaderData, 0x20);
+ int fileDataOffsetFromHeaderStart = FileFunctions.readFullInt(level3HeaderData, 0x24);
+ fileDataOffset = level3Offset + fileDataOffsetFromHeaderStart;
+
+ byte[] directoryMetadataBlock = new byte[directoryMetadataLength];
+ baseRom.seek(level3Offset + directoryMetadataOffset);
+ baseRom.readFully(directoryMetadataBlock);
+ byte[] fileMetadataBlock = new byte[fileMetadataLength];
+ baseRom.seek(level3Offset + fileMetadataOffset);
+ baseRom.readFully(fileMetadataBlock);
+ fileMetadataList = new ArrayList<>();
+ romfsFiles = new TreeMap<>();
+ visitDirectory(0, "", directoryMetadataBlock, fileMetadataBlock);
+ System.out.println("NCCH: Done reading romfs");
+ }
+
+ private void visitDirectory(int offset, String rootPath, byte[] directoryMetadataBlock, byte[] fileMetadataBlock) {
+ DirectoryMetadata metadata = new DirectoryMetadata(directoryMetadataBlock, offset);
+ String currentPath = rootPath;
+ if (!metadata.name.equals("")) {
+ currentPath = rootPath + metadata.name + "/";
+ }
+
+ if (metadata.firstFileOffset != metadata_unused) {
+ visitFile(metadata.firstFileOffset, currentPath, fileMetadataBlock);
+ }
+ if (metadata.firstChildDirectoryOffset != metadata_unused) {
+ visitDirectory(metadata.firstChildDirectoryOffset, currentPath, directoryMetadataBlock, fileMetadataBlock);
+ }
+ if (metadata.siblingDirectoryOffset != metadata_unused) {
+ visitDirectory(metadata.siblingDirectoryOffset, rootPath, directoryMetadataBlock, fileMetadataBlock);
+ }
+ }
+
+ private void visitFile(int offset, String rootPath, byte[] fileMetadataBlock) {
+ FileMetadata metadata = new FileMetadata(fileMetadataBlock, offset);
+ String currentPath = rootPath + metadata.name;
+ System.out.println("NCCH: Visiting file " + currentPath);
+ RomfsFile file = new RomfsFile(this);
+ file.offset = fileDataOffset + metadata.fileDataOffset;
+ file.size = (int) metadata.fileDataLength; // no Pokemon game has a file larger than unsigned int max
+ file.fullPath = currentPath;
+ metadata.file = file;
+ fileMetadataList.add(metadata);
+ romfsFiles.put(currentPath, file);
+ if (metadata.siblingFileOffset != metadata_unused) {
+ visitFile(metadata.siblingFileOffset, rootPath, fileMetadataBlock);
+ }
+ }
+
+ public void saveAsNCCH(String filename, String gameAcronym, long seed) throws IOException, NoSuchAlgorithmException {
+ this.reopenROM();
+
+ // Initialize new ROM
+ RandomAccessFile fNew = new RandomAccessFile(filename, "rw");
+
+ // Read the header and exheader and write it to the output ROM
+ byte[] header = new byte[header_and_exheader_size];
+ baseRom.seek(ncchStartingOffset);
+ baseRom.readFully(header);
+ fNew.write(header);
+
+ // Just in case they were set wrong in the original header, let's correctly set the
+ // bytes in the header to indicate the output ROM is decrypted
+ byte[] flags = new byte[8];
+ baseRom.seek(ncchStartingOffset + 0x188);
+ baseRom.readFully(flags);
+ flags[3] = 0;
+ flags[7] = 4;
+ fNew.seek(0x188);
+ fNew.write(flags);
+
+ // The logo is small enough (8KB) to just read the whole thing into memory. Write it to the new ROM directly
+ // after the header, then update the new ROM's logo offset
+ long logoOffset = ncchStartingOffset + FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x198) * media_unit_size;
+ long logoLength = FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x19C) * media_unit_size;
+ if (logoLength > 0) {
+ byte[] logo = new byte[(int) logoLength];
+ baseRom.seek(logoOffset);
+ baseRom.readFully(logo);
+ long newLogoOffset = header_and_exheader_size;
+ fNew.seek(newLogoOffset);
+ fNew.write(logo);
+ fNew.seek(0x198);
+ fNew.write((int) newLogoOffset / media_unit_size);
+ }
+
+ // The plain region is even smaller (1KB) so repeat the same process
+ long plainOffset = ncchStartingOffset + FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x190) * media_unit_size;
+ long plainLength = FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x194) * media_unit_size;
+ if (plainLength > 0) {
+ byte[] plain = new byte[(int) plainLength];
+ baseRom.seek(plainOffset);
+ baseRom.readFully(plain);
+ long newPlainOffset = header_and_exheader_size + logoLength;
+ fNew.seek(newPlainOffset);
+ fNew.write(plain);
+ fNew.seek(0x190);
+ fNew.write((int) newPlainOffset / media_unit_size);
+ }
+
+ // Update the SMDH so that Citra displays the seed in the title
+ smdh.setAllDescriptions(gameAcronym + " randomizer seed: " + seed);
+ smdh.setAllPublishers("Universal Pokemon Randomizer ZX");
+
+ // Now, reconstruct the exefs based on our new version of .code and our new SMDH
+ long newExefsOffset = header_and_exheader_size + logoLength + plainLength;
+ long newExefsLength = rebuildExefs(fNew, newExefsOffset);
+ fNew.seek(0x1A0);
+ fNew.write((int) newExefsOffset / media_unit_size);
+ fNew.seek(0x1A4);
+ fNew.write((int) newExefsLength / media_unit_size);
+
+ // Then, reconstruct the romfs
+ // TODO: Fix the yet-unsolved alignment issues in rebuildRomfs when you remove this align
+ long newRomfsOffset = alignLong(header_and_exheader_size + logoLength + plainLength + newExefsLength, 4096);
+ long newRomfsLength = rebuildRomfs(fNew, newRomfsOffset);
+ fNew.seek(0x1B0);
+ fNew.write((int) newRomfsOffset / media_unit_size);
+ fNew.seek(0x1B4);
+ fNew.write((int) newRomfsLength / media_unit_size);
+
+ // Lastly, reconstruct the superblock hashes
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ int exefsHashRegionSize = FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x1A8) * media_unit_size;
+ byte[] exefsDataToHash = new byte[exefsHashRegionSize];
+ fNew.seek(newExefsOffset);
+ fNew.readFully(exefsDataToHash);
+ byte[] exefsSuperblockHash = digest.digest(exefsDataToHash);
+ fNew.seek(0x1C0);
+ fNew.write(exefsSuperblockHash);
+ int romfsHashRegionSize = FileFunctions.readIntFromFile(baseRom, ncchStartingOffset + 0x1B8) * media_unit_size;
+ byte[] romfsDataToHash = new byte[romfsHashRegionSize];
+ fNew.seek(newRomfsOffset);
+ fNew.readFully(romfsDataToHash);
+ byte[] romfsSuperblockHash = digest.digest(romfsDataToHash);
+ fNew.seek(0x1E0);
+ fNew.write(romfsSuperblockHash);
+
+ // While totally optional, let's zero out the NCCH signature so that
+ // it's clear this isn't a properly-signed ROM
+ byte[] zeroedSignature = new byte[0x100];
+ fNew.seek(0x0);
+ fNew.write(zeroedSignature);
+ fNew.close();
+ }
+
+ private long rebuildExefs(RandomAccessFile fNew, long newExefsOffset) throws IOException, NoSuchAlgorithmException {
+ System.out.println("NCCH: Rebuilding exefs...");
+ byte[] code = getCode();
+ if (codeCompressed) {
+ code = new BLZCoder(null).BLZ_EncodePub(code, false, true, ".code");
+ }
+
+ // Create a new ExefsFileHeader for our updated .code
+ ExefsFileHeader newCodeHeader = new ExefsFileHeader();
+ newCodeHeader.filename = codeFileHeader.filename;
+ newCodeHeader.size = code.length;
+ newCodeHeader.offset = 0;
+
+ // For all the file headers, write them to the new ROM and store them in order for hashing later
+ ExefsFileHeader[] newHeaders = new ExefsFileHeader[10];
+ newHeaders[0] = newCodeHeader;
+ fNew.seek(newExefsOffset);
+ fNew.write(newCodeHeader.asBytes());
+ for (int i = 0; i < extraExefsFiles.size(); i++) {
+ ExefsFileHeader header = extraExefsFiles.get(i);
+ newHeaders[i + 1] = header;
+ fNew.write(header.asBytes());
+ }
+
+ // Write the file data, then hash the data and write the hashes in reverse order
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ long endingOffset = 0;
+ for (int i = 0; i < newHeaders.length; i++) {
+ ExefsFileHeader header = newHeaders[i];
+ if (header != null) {
+ byte[] data;
+ if (header.filename.equals(".code")) {
+ data = code;
+ } else if (header.filename.equals("icon")) {
+ data = smdh.getBytes();
+ } else {
+ long dataOffset = exefsOffset + 0x200 + header.offset;
+ data = new byte[header.size];
+ baseRom.seek(dataOffset);
+ baseRom.readFully(data);
+ }
+ fNew.seek(newExefsOffset + 0x200 + header.offset);
+ fNew.write(data);
+ byte[] hash = digest.digest(data);
+ fNew.seek(newExefsOffset + 0x200 - ((i + 1) * 0x20));
+ fNew.write(hash);
+ endingOffset = newExefsOffset + 0x200 + header.offset + header.size;
+ }
+ }
+
+ // Pad to media unit size
+ fNew.seek(endingOffset);
+ long exefsLength = endingOffset - newExefsOffset;
+ while (exefsLength % media_unit_size != 0) {
+ fNew.writeByte(0);
+ exefsLength++;
+ }
+
+ System.out.println("NCCH: Done rebuilding exefs");
+ return exefsLength;
+ }
+
+ private long rebuildRomfs(RandomAccessFile fNew, long newRomfsOffset) throws IOException, NoSuchAlgorithmException {
+ System.out.println("NCCH: Rebuilding romfs...");
+
+ // Start by copying the romfs header straight from the original ROM. We'll update the
+ // header as we continue to build the romfs
+ byte[] romfsHeaderData = new byte[romfs_header_size];
+ baseRom.seek(romfsOffset);
+ baseRom.readFully(romfsHeaderData);
+ fNew.seek(newRomfsOffset);
+ fNew.write(romfsHeaderData);
+
+ // Now find the level 3 (file data) offset, since the first thing we need to do is write the
+ // updated file data. We're assuming here that the master hash size is smaller than the level 3
+ // hash block size, which it almost certainly will because we're not adding large amounts of data
+ // to the romfs
+ int masterHashSize = FileFunctions.readFullInt(romfsHeaderData, 0x08);
+ int level3HashBlockSize = 1 << FileFunctions.readFullInt(romfsHeaderData, 0x4C);
+ long level3Offset = romfsOffset + alignLong(0x60 + masterHashSize, level3HashBlockSize);
+ long newLevel3Offset = newRomfsOffset + alignLong(0x60 + masterHashSize, level3HashBlockSize);
+
+ // Copy the level 3 header straight from the original ROM. Since we're not adding or
+ // removing any files, the File/Directory tables should have the same offsets and lengths
+ byte[] level3HeaderData = new byte[level3_header_size];
+ baseRom.seek(level3Offset);
+ baseRom.readFully(level3HeaderData);
+ fNew.seek(newLevel3Offset);
+ fNew.write(level3HeaderData);
+
+ // Write out both hash tables and the directory metadata table. Since we're not adding or removing
+ // any files/directories, we can just use what's in the base ROM for this.
+ int directoryHashTableOffset = FileFunctions.readFullInt(level3HeaderData, 0x04);
+ int directoryHashTableLength = FileFunctions.readFullInt(level3HeaderData, 0x08);
+ int directoryMetadataTableOffset = FileFunctions.readFullInt(level3HeaderData, 0x0C);
+ int directoryMetadataTableLength = FileFunctions.readFullInt(level3HeaderData, 0x10);
+ int fileHashTableOffset = FileFunctions.readFullInt(level3HeaderData, 0x14);
+ int fileHashTableLength = FileFunctions.readFullInt(level3HeaderData, 0x18);
+ byte[] directoryHashTable = new byte[directoryHashTableLength];
+ baseRom.seek(level3Offset + directoryHashTableOffset);
+ baseRom.readFully(directoryHashTable);
+ fNew.seek(newLevel3Offset + directoryHashTableOffset);
+ fNew.write(directoryHashTable);
+ byte[] directoryMetadataTable = new byte[directoryMetadataTableLength];
+ baseRom.seek(level3Offset + directoryMetadataTableOffset);
+ baseRom.readFully(directoryMetadataTable);
+ fNew.seek(newLevel3Offset + directoryMetadataTableOffset);
+ fNew.write(directoryMetadataTable);
+ byte[] fileHashTable = new byte[fileHashTableLength];
+ baseRom.seek(level3Offset + fileHashTableOffset);
+ baseRom.readFully(fileHashTable);
+ fNew.seek(newLevel3Offset + fileHashTableOffset);
+ fNew.write(fileHashTable);
+
+ // Now reconstruct the file metadata table. It may need to be changed if any file grew or shrunk
+ int fileMetadataTableOffset = FileFunctions.readFullInt(level3HeaderData, 0x1C);
+ int fileMetadataTableLength = FileFunctions.readFullInt(level3HeaderData, 0x20);
+ byte[] newFileMetadataTable = updateFileMetadataTable(fileMetadataTableLength);
+ fNew.seek(newLevel3Offset + fileMetadataTableOffset);
+ fNew.write(newFileMetadataTable);
+
+ // Using the new file metadata table, output the file data
+ int fileDataOffset = FileFunctions.readFullInt(level3HeaderData, 0x24);
+ long endOfFileDataOffset = 0;
+ for (FileMetadata metadata : fileMetadataList) {
+ System.out.println("NCCH: Writing file " + metadata.file.fullPath + " to romfs");
+ // Users have sent us bug reports with really bizarre errors here that seem to indicate
+ // broken metadata; do this in a try-catch solely so we can log the metadata if we fail
+ try {
+ byte[] fileData;
+ if (metadata.file.fileChanged) {
+ fileData = metadata.file.getOverrideContents();
+ } else {
+ fileData = new byte[metadata.file.size];
+ baseRom.seek(metadata.file.offset);
+ baseRom.readFully(fileData);
+ }
+ long currentDataOffset = newLevel3Offset + fileDataOffset + metadata.fileDataOffset;
+ fNew.seek(currentDataOffset);
+ fNew.write(fileData);
+ endOfFileDataOffset = currentDataOffset + fileData.length;
+ } catch (Exception e) {
+ String message = String.format("Error when building romfs: File: %s, offset: %s, size: %s",
+ metadata.file.fullPath, metadata.offset, metadata.file.size);
+ throw new RandomizerIOException(message, e);
+ }
+ }
+
+ // Now that level 3 (file data) is done, construct level 2 (hashes of file data)
+ // Note that in the ROM, level 1 comes *before* level 2, so we need to calculate
+ // level 1 length and offset as well.
+ long newLevel3EndingOffset = endOfFileDataOffset;
+ long newLevel3HashdataSize = newLevel3EndingOffset - newLevel3Offset;
+ long numberOfLevel3HashBlocks = alignLong(newLevel3HashdataSize, level3HashBlockSize) / level3HashBlockSize;
+ int level2HashBlockSize = 1 << FileFunctions.readFullInt(romfsHeaderData, 0x34);
+ long newLevel2HashdataSize = numberOfLevel3HashBlocks * 0x20;
+ long numberOfLevel2HashBlocks = alignLong(newLevel2HashdataSize, level2HashBlockSize) / level2HashBlockSize;
+ int level1HashBlockSize = 1 << FileFunctions.readFullInt(romfsHeaderData, 0x1C);
+ long newLevel1HashdataSize = numberOfLevel2HashBlocks * 0x20;
+ long newLevel1Offset = newLevel3Offset + alignLong(newLevel3HashdataSize, level3HashBlockSize);
+ long newLevel2Offset = newLevel1Offset + alignLong(newLevel1HashdataSize, level1HashBlockSize);
+ long newFileEndingOffset = alignLong(newLevel2Offset + newLevel2HashdataSize, level2HashBlockSize);
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ byte[] dataToHash = new byte[level3HashBlockSize];
+ for (long i = 0; i < numberOfLevel3HashBlocks; i++) {
+ fNew.seek(newLevel3Offset + (i * level3HashBlockSize));
+ fNew.readFully(dataToHash);
+ byte[] hash = digest.digest(dataToHash);
+ fNew.seek(newLevel2Offset + (i * 0x20));
+ fNew.write(hash);
+ }
+ while (fNew.getFilePointer() != newFileEndingOffset) {
+ fNew.writeByte(0);
+ }
+
+ // Now that level 2 (hashes of file data) is done, construct level 1 (hashes of
+ // hashes of file data) and the master hash/level 0 (hashes of level 1)
+ dataToHash = new byte[level2HashBlockSize];
+ for (long i = 0; i < numberOfLevel2HashBlocks; i++) {
+ fNew.seek(newLevel2Offset + (i * level2HashBlockSize));
+ fNew.readFully(dataToHash);
+ byte[] hash = digest.digest(dataToHash);
+ fNew.seek(newLevel1Offset + (i * 0x20));
+ fNew.write(hash);
+ }
+ long numberOfLevel1HashBlocks = alignLong(newLevel1HashdataSize, level1HashBlockSize) / level1HashBlockSize;
+ dataToHash = new byte[level1HashBlockSize];
+ for (long i = 0; i < numberOfLevel1HashBlocks; i++) {
+ fNew.seek(newLevel1Offset + (i * level1HashBlockSize));
+ fNew.readFully(dataToHash);
+ byte[] hash = digest.digest(dataToHash);
+ fNew.seek(newRomfsOffset + 0x60 + (i * 0x20));
+ fNew.write(hash);
+ }
+
+ // Lastly, update the header and return the size of the new romfs
+ long level1LogicalOffset = 0;
+ long level2LogicalOffset = alignLong(newLevel1HashdataSize, level1HashBlockSize);
+ long level3LogicalOffset = alignLong(level2LogicalOffset + newLevel2HashdataSize, level2HashBlockSize);
+ FileFunctions.writeFullInt(romfsHeaderData, 0x08, (int) numberOfLevel1HashBlocks * 0x20);
+ FileFunctions.writeFullLong(romfsHeaderData, 0x0C, level1LogicalOffset);
+ FileFunctions.writeFullLong(romfsHeaderData, 0x14, newLevel1HashdataSize);
+ FileFunctions.writeFullLong(romfsHeaderData, 0x24, level2LogicalOffset);
+ FileFunctions.writeFullLong(romfsHeaderData, 0x2C, newLevel2HashdataSize);
+ FileFunctions.writeFullLong(romfsHeaderData, 0x3C, level3LogicalOffset);
+ FileFunctions.writeFullLong(romfsHeaderData, 0x44, newLevel3HashdataSize);
+ fNew.seek(newRomfsOffset);
+ fNew.write(romfsHeaderData);
+ long currentLength = newFileEndingOffset - newRomfsOffset;
+ long newRomfsLength = alignLong(currentLength, media_unit_size);
+ fNew.seek(newFileEndingOffset);
+ while (fNew.getFilePointer() < newRomfsOffset + newRomfsLength) {
+ fNew.writeByte(0);
+ }
+
+ System.out.println("NCCH: Done rebuilding romfs");
+ return newRomfsLength;
+ }
+
+ private byte[] updateFileMetadataTable(int fileMetadataTableLength) {
+ fileMetadataList.sort((FileMetadata f1, FileMetadata f2) -> (int) (f1.fileDataOffset - f2.fileDataOffset));
+ byte[] fileMetadataTable = new byte[fileMetadataTableLength];
+ int currentTableOffset = 0;
+ long currentFileDataOffset = 0;
+ for (FileMetadata metadata : fileMetadataList) {
+ metadata.fileDataOffset = currentFileDataOffset;
+ if (metadata.file.fileChanged) {
+ metadata.fileDataLength = metadata.file.size;
+ }
+ byte[] metadataBytes = metadata.asBytes();
+ System.arraycopy(metadataBytes, 0, fileMetadataTable, currentTableOffset, metadataBytes.length);
+ currentTableOffset += metadataBytes.length;
+ currentFileDataOffset += metadata.fileDataLength;
+ }
+ return fileMetadataTable;
+ }
+
+ public void saveAsLayeredFS(String outputPath) throws IOException {
+ String layeredFSRootPath = outputPath + File.separator + titleId + File.separator;
+ File layeredFSRootDir = new File(layeredFSRootPath);
+ if (!layeredFSRootDir.exists()) {
+ layeredFSRootDir.mkdirs();
+ } else {
+ purgeDirectory(layeredFSRootDir);
+ }
+ String romfsRootPath = layeredFSRootPath + "romfs" + File.separator;
+ File romfsDir = new File(romfsRootPath);
+ if (!romfsDir.exists()) {
+ romfsDir.mkdirs();
+ }
+
+ if (codeChanged) {
+ byte[] code = getCode();
+ FileOutputStream fos = new FileOutputStream(new File(layeredFSRootPath + "code.bin"));
+ fos.write(code);
+ fos.close();
+ }
+
+ for (Map.Entry<String, RomfsFile> entry : romfsFiles.entrySet()) {
+ RomfsFile file = entry.getValue();
+ if (file.fileChanged) {
+ writeRomfsFileToLayeredFS(file, romfsRootPath);
+ }
+ }
+ }
+
+ private void purgeDirectory(File directory) {
+ for (File file : directory.listFiles()) {
+ if (file.isDirectory()) {
+ purgeDirectory(file);
+ }
+ file.delete();
+ }
+ }
+
+ private void writeRomfsFileToLayeredFS(RomfsFile file, String layeredFSRootPath) throws IOException {
+ String[] romfsPathComponents = file.fullPath.split("/");
+ StringBuffer buffer = new StringBuffer(layeredFSRootPath);
+ for (int i = 0; i < romfsPathComponents.length - 1; i++) {
+ buffer.append(romfsPathComponents[i]);
+ buffer.append(File.separator);
+ File currentDir = new File(buffer.toString());
+ if (!currentDir.exists()) {
+ currentDir.mkdirs();
+ }
+ }
+ buffer.append(romfsPathComponents[romfsPathComponents.length - 1]);
+ String romfsFilePath = buffer.toString();
+ FileOutputStream fos = new FileOutputStream(new File(romfsFilePath));
+ fos.write(file.getOverrideContents());
+ fos.close();
+ }
+
+ public boolean isDecrypted() throws IOException {
+ // This is the way you're *supposed* to tell if a ROM is decrypted. Specifically, this
+ // is checking the noCrypto flag on the NCCH bitflags.
+ long ncchFlagOffset = ncchStartingOffset + 0x188;
+ byte[] ncchFlags = new byte[8];
+ baseRom.seek(ncchFlagOffset);
+ baseRom.readFully(ncchFlags);
+ if ((ncchFlags[7] & 0x4) != 0) {
+ return true;
+ }
+
+ // However, some poorly-decrypted ROMs don't set this flag. So our heuristic for detecting
+ // if they're decrypted is to check whether the battle CRO exists, since all 3DS Pokemon
+ // games and updates have this file. If the game is *really* encrypted, then the odds of us
+ // successfully extracting this exact name from the metadata tables is like one in a billion.
+ return romfsFiles != null && (romfsFiles.containsKey("DllBattle.cro") || romfsFiles.containsKey("Battle.cro"));
+ }
+
+ // Retrieves a decompressed version of .code (the game's executable).
+ // The first time this is called, it will retrieve it straight from the
+ // exefs. Future calls will rely on a cached version to speed things up.
+ // If writing is enabled, it will cache the decompressed version to the
+ // tmpFolder; otherwise, it will store it in RAM.
+ public byte[] getCode() throws IOException {
+ if (!codeOpen) {
+ codeOpen = true;
+ byte[] code = new byte[codeFileHeader.size];
+
+ // File header offsets are from the start of the exefs but *exclude* the
+ // size of the exefs header, so we need to add it back ourselves.
+ baseRom.seek(exefsOffset + exefs_header_size + codeFileHeader.offset);
+ baseRom.readFully(code);
+ originalCodeCRC = FileFunctions.getCRC32(code);
+
+ if (codeCompressed) {
+ code = new BLZCoder(null).BLZ_DecodePub(code, ".code");
+ }
+
+ // Now actually make the copy or w/e
+ if (writingEnabled) {
+ File arm9file = new File(tmpFolder + ".code");
+ FileOutputStream fos = new FileOutputStream(arm9file);
+ fos.write(code);
+ fos.close();
+ arm9file.deleteOnExit();
+ this.codeRamstored = null;
+ return code;
+ } else {
+ this.codeRamstored = code;
+ byte[] newcopy = new byte[code.length];
+ System.arraycopy(code, 0, newcopy, 0, code.length);
+ return newcopy;
+ }
+ } else {
+ if (writingEnabled) {
+ return FileFunctions.readFileFullyIntoBuffer(tmpFolder + ".code");
+ } else {
+ byte[] newcopy = new byte[this.codeRamstored.length];
+ System.arraycopy(this.codeRamstored, 0, newcopy, 0, this.codeRamstored.length);
+ return newcopy;
+ }
+ }
+ }
+
+ public void writeCode(byte[] code) throws IOException {
+ if (!codeOpen) {
+ getCode();
+ }
+ codeChanged = true;
+ if (writingEnabled) {
+ FileOutputStream fos = new FileOutputStream(new File(tmpFolder + ".code"));
+ fos.write(code);
+ fos.close();
+ } else {
+ if (this.codeRamstored.length == code.length) {
+ // copy new in
+ System.arraycopy(code, 0, this.codeRamstored, 0, code.length);
+ } else {
+ // make new array
+ this.codeRamstored = null;
+ this.codeRamstored = new byte[code.length];
+ System.arraycopy(code, 0, this.codeRamstored, 0, code.length);
+ }
+ }
+ }
+
+ public boolean hasFile(String filename) {
+ return romfsFiles.containsKey(filename);
+ }
+
+ // returns null if file doesn't exist
+ public byte[] getFile(String filename) throws IOException {
+ if (romfsFiles.containsKey(filename)) {
+ return romfsFiles.get(filename).getContents();
+ } else {
+ return null;
+ }
+ }
+
+ public void writeFile(String filename, byte[] data) throws IOException {
+ if (romfsFiles.containsKey(filename)) {
+ romfsFiles.get(filename).writeOverride(data);
+ }
+ }
+
+ public void printRomDiagnostics(PrintStream logStream, NCCH gameUpdate) {
+ Path p = Paths.get(this.romFilename);
+ logStream.println("File name: " + p.getFileName().toString());
+ if (gameUpdate == null) {
+ logStream.println(".code: " + String.format("%08X", this.originalCodeCRC));
+ } else {
+ logStream.println(".code: " + String.format("%08X", gameUpdate.originalCodeCRC));
+ }
+ logStream.println("romfs header: " + String.format("%08X", this.originalRomfsHeaderCRC));
+ if (gameUpdate != null) {
+ logStream.println("romfs header (game update): " + String.format("%08X", gameUpdate.originalRomfsHeaderCRC));
+ }
+ List<String> fileList = new ArrayList<>();
+ Map<String, String> baseRomfsFileDiagnostics = this.getRomfsFilesDiagnostics();
+ Map<String, String> updateRomfsFileDiagnostics = new HashMap<>();
+ if (gameUpdate != null) {
+ updateRomfsFileDiagnostics = gameUpdate.getRomfsFilesDiagnostics();
+ }
+ for (Map.Entry<String, String> entry : updateRomfsFileDiagnostics.entrySet()) {
+ baseRomfsFileDiagnostics.remove(entry.getKey());
+ fileList.add(entry.getValue());
+ }
+ for (Map.Entry<String, String> entry : baseRomfsFileDiagnostics.entrySet()) {
+ fileList.add(entry.getValue());
+ }
+ Collections.sort(fileList);
+ for (String fileLog : fileList) {
+ logStream.println(fileLog);
+ }
+ }
+
+ public Map<String, String> getRomfsFilesDiagnostics() {
+ Map<String, String> fileDiagnostics = new HashMap<>();
+ for (Map.Entry<String, RomfsFile> entry : romfsFiles.entrySet()) {
+ if (entry.getValue().originalCRC != 0) {
+ fileDiagnostics.put(entry.getKey(), entry.getKey() + ": " + String.format("%08X", entry.getValue().originalCRC));
+ }
+ }
+ return fileDiagnostics;
+ }
+
+ public String getTmpFolder() {
+ return tmpFolder;
+ }
+
+ public RandomAccessFile getBaseRom() {
+ return baseRom;
+ }
+
+ public boolean isWritingEnabled() {
+ return writingEnabled;
+ }
+
+ public String getProductCode() {
+ return productCode;
+ }
+
+ public String getTitleId() {
+ return titleId;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ public static int alignInt(int num, int alignment) {
+ int mask = ~(alignment - 1);
+ return (num + (alignment - 1)) & mask;
+ }
+
+ public static long alignLong(long num, long alignment) {
+ long mask = ~(alignment - 1);
+ return (num + (alignment - 1)) & mask;
+ }
+
+ private int readVersionFromFile() {
+ try {
+ // Only CIAs can define a version in their TMD. If this is a different ROM type,
+ // just exit out early.
+ int magic = FileFunctions.readBigEndianIntFromFile(this.baseRom, ncch_and_ncsd_magic_offset);
+ if (magic == ncch_magic || magic == ncsd_magic) {
+ return 0;
+ }
+
+ // For CIAs, we need to read the title metadata (TMD) in order to retrieve the version.
+ // The TMD is after the certificate chain and ticket.
+ int certChainSize = FileFunctions.readIntFromFile(this.baseRom, 0x08);
+ int ticketSize = FileFunctions.readIntFromFile(this.baseRom, 0x0C);
+ long certChainOffset = NCCH.alignLong(cia_header_size, 64);
+ long ticketOffset = NCCH.alignLong(certChainOffset + certChainSize, 64);
+ long tmdOffset = NCCH.alignLong(ticketOffset + ticketSize, 64);
+
+ // At the start of the TMD is a signature whose length varies based on what type of signature it is.
+ int signatureType = FileFunctions.readBigEndianIntFromFile(this.baseRom, tmdOffset);
+ int signatureSize, paddingSize;
+ switch (signatureType) {
+ case 0x010003:
+ signatureSize = 0x200;
+ paddingSize = 0x3C;
+ break;
+ case 0x010004:
+ signatureSize = 0x100;
+ paddingSize = 0x3C;
+ break;
+ case 0x010005:
+ signatureSize = 0x3C;
+ paddingSize = 0x40;
+ break;
+ default:
+ signatureSize = -1;
+ paddingSize = -1;
+ break;
+ }
+ if (signatureSize == -1) {
+ // This shouldn't happen in practice, since all used and valid signature types are represented
+ // in the above switch. However, if we can't find the right signature type, then it's probably
+ // an invalid CIA anyway, so we're unlikely to get good version information out of it.
+ return 0;
+ }
+
+ // After the signature is the TMD header, which actually contains the version information.
+ long tmdHeaderOffset = tmdOffset + 4 + signatureSize + paddingSize;
+ return FileFunctions.read2ByteBigEndianIntFromFile(this.baseRom, tmdHeaderOffset + 0x9C);
+ } catch (IOException e) {
+ throw new RandomizerIOException(e);
+ }
+ }
+
+ // At the bare minimum, a 3DS game consists of what's known as a CXI file, which
+ // is just an NCCH that contains executable code. However, 3DS games are packaged
+ // in various containers that can hold other NCCH files like the game manual and
+ // firmware updates, among other things. This function's determines the location
+ // of the CXI regardless of the container.
+ public static long getCXIOffsetInFile(String filename) {
+ try {
+ RandomAccessFile rom = new RandomAccessFile(filename, "r");
+ int ciaHeaderSize = FileFunctions.readIntFromFile(rom, 0x00);
+ if (ciaHeaderSize == cia_header_size) {
+ // This *might* be a CIA; let's do our best effort to try to get
+ // a CXI out of this.
+ int certChainSize = FileFunctions.readIntFromFile(rom, 0x08);
+ int ticketSize = FileFunctions.readIntFromFile(rom, 0x0C);
+ int tmdFileSize = FileFunctions.readIntFromFile(rom, 0x10);
+
+ // If this is *really* a CIA, we'll find our CXI at the beginning of the
+ // content section, which is after the certificate chain, ticket, and TMD
+ long certChainOffset = NCCH.alignLong(ciaHeaderSize, 64);
+ long ticketOffset = NCCH.alignLong(certChainOffset + certChainSize, 64);
+ long tmdOffset = NCCH.alignLong(ticketOffset + ticketSize, 64);
+ long contentOffset = NCCH.alignLong(tmdOffset + tmdFileSize, 64);
+ int magic = FileFunctions.readBigEndianIntFromFile(rom, contentOffset + ncch_and_ncsd_magic_offset);
+ if (magic == ncch_magic) {
+ // This CIA's content contains a valid CXI!
+ return contentOffset;
+ }
+ }
+
+ // We don't put the following code in an else-block because there *might*
+ // exist a totally-valid CXI or CCI whose first four bytes just so
+ // *happen* to be the same as the first four bytes of a CIA file.
+ int magic = FileFunctions.readBigEndianIntFromFile(rom, ncch_and_ncsd_magic_offset);
+ rom.close();
+ if (magic == ncch_magic) {
+ // Magic is NCCH, so this just a straight-up NCCH/CXI; there is no container
+ // around the game data. Thus, the CXI offset is the beginning of the file.
+ return 0;
+ } else if (magic == ncsd_magic) {
+ // Magic is NCSD, so this is almost certainly a CCI. The CXI is always
+ // a fixed distance away from the start.
+ return 0x4000;
+ } else {
+ // This doesn't seem to be a valid 3DS file.
+ return -1;
+ }
+ } catch (IOException e) {
+ throw new RandomizerIOException(e);
+ }
+ }
+
+ private class ExefsFileHeader {
+ public String filename;
+ public int offset;
+ public int size;
+
+ public ExefsFileHeader() { }
+
+ public ExefsFileHeader(byte[] exefsHeaderData, int fileHeaderOffset) {
+ byte[] filenameBytes = new byte[0x8];
+ System.arraycopy(exefsHeaderData, fileHeaderOffset, filenameBytes, 0, 0x8);
+ this.filename = new String(filenameBytes, StandardCharsets.UTF_8).trim();
+ this.offset = FileFunctions.readFullInt(exefsHeaderData, fileHeaderOffset + 0x08);
+ this.size = FileFunctions.readFullInt(exefsHeaderData, fileHeaderOffset + 0x0C);
+ }
+
+ public boolean isValid() {
+ return this.filename != "" && this.size != 0;
+ }
+
+ public byte[] asBytes() {
+ byte[] output = new byte[0x10];
+ byte[] filenameBytes = this.filename.getBytes(StandardCharsets.UTF_8);
+ System.arraycopy(filenameBytes, 0, output, 0, filenameBytes.length);
+ FileFunctions.writeFullInt(output, 0x08, this.offset);
+ FileFunctions.writeFullInt(output, 0x0C, this.size);
+ return output;
+ }
+ }
+
+ private class DirectoryMetadata {
+ public int parentDirectoryOffset;
+ public int siblingDirectoryOffset;
+ public int firstChildDirectoryOffset;
+ public int firstFileOffset;
+ public int nextDirectoryInHashBucketOffset;
+ public int nameLength;
+ public String name;
+
+ public DirectoryMetadata(byte[] directoryMetadataBlock, int offset) {
+ parentDirectoryOffset = FileFunctions.readFullInt(directoryMetadataBlock, offset);
+ siblingDirectoryOffset = FileFunctions.readFullInt(directoryMetadataBlock, offset + 0x04);
+ firstChildDirectoryOffset = FileFunctions.readFullInt(directoryMetadataBlock, offset + 0x08);
+ firstFileOffset = FileFunctions.readFullInt(directoryMetadataBlock, offset + 0x0C);
+ nextDirectoryInHashBucketOffset = FileFunctions.readFullInt(directoryMetadataBlock, offset + 0x10);
+ nameLength = FileFunctions.readFullInt(directoryMetadataBlock, offset + 0x14);
+ name = "";
+ if (nameLength != metadata_unused) {
+ byte[] nameBytes = new byte[nameLength];
+ System.arraycopy(directoryMetadataBlock, offset + 0x18, nameBytes, 0, nameLength);
+ name = new String(nameBytes, StandardCharsets.UTF_16LE).trim();
+ }
+ }
+ }
+
+ private class FileMetadata {
+ public int offset;
+ public int parentDirectoryOffset;
+ public int siblingFileOffset;
+ public long fileDataOffset;
+ public long fileDataLength;
+ public int nextFileInHashBucketOffset;
+ public int nameLength;
+ public String name;
+ public RomfsFile file; // used only for rebuilding CXI
+
+ public FileMetadata(byte[] fileMetadataBlock, int offset) {
+ this.offset = offset;
+ parentDirectoryOffset = FileFunctions.readFullInt(fileMetadataBlock, offset);
+ siblingFileOffset = FileFunctions.readFullInt(fileMetadataBlock, offset + 0x04);
+ fileDataOffset = FileFunctions.readFullLong(fileMetadataBlock, offset + 0x08);
+ fileDataLength = FileFunctions.readFullLong(fileMetadataBlock, offset + 0x10);
+ nextFileInHashBucketOffset = FileFunctions.readFullInt(fileMetadataBlock, offset + 0x18);
+ nameLength = FileFunctions.readFullInt(fileMetadataBlock, offset + 0x1C);
+ name = "";
+ if (nameLength != metadata_unused) {
+ byte[] nameBytes = new byte[nameLength];
+ System.arraycopy(fileMetadataBlock, offset + 0x20, nameBytes, 0, nameLength);
+ name = new String(nameBytes, StandardCharsets.UTF_16LE).trim();
+ }
+ }
+
+ public byte[] asBytes() {
+ int metadataLength = 0x20;
+ if (nameLength != metadata_unused) {
+ metadataLength += alignInt(nameLength, 4);
+ }
+ byte[] output = new byte[metadataLength];
+ FileFunctions.writeFullInt(output, 0x00, this.parentDirectoryOffset);
+ FileFunctions.writeFullInt(output, 0x04, this.siblingFileOffset);
+ FileFunctions.writeFullLong(output, 0x08, this.fileDataOffset);
+ FileFunctions.writeFullLong(output, 0x10, this.fileDataLength);
+ FileFunctions.writeFullInt(output, 0x18, this.nextFileInHashBucketOffset);
+ FileFunctions.writeFullInt(output, 0x1C, this.nameLength);
+ if (!name.equals("")) {
+ byte[] nameBytes = name.getBytes(StandardCharsets.UTF_16LE);
+ System.arraycopy(nameBytes, 0, output, 0x20, nameBytes.length);
+ }
+ return output;
+ }
+ }
+}
diff --git a/src/com/pkrandom/ctr/RomfsFile.java b/src/com/pkrandom/ctr/RomfsFile.java
new file mode 100644
index 0000000..30e9f7c
--- /dev/null
+++ b/src/com/pkrandom/ctr/RomfsFile.java
@@ -0,0 +1,121 @@
+package com.pkrandom.ctr;
+
+/*----------------------------------------------------------------------------*/
+/*-- RomfsFile.java - an entry in the romfs filesystem --*/
+/*-- --*/
+/*-- Part of "Universal Pokemon Randomizer ZX" by the UPR-ZX team --*/
+/*-- Pokemon and any associated names and the like are --*/
+/*-- trademark and (C) Nintendo 1996-2020. --*/
+/*-- --*/
+/*-- This program is free software: you can redistribute it and/or modify --*/
+/*-- it under the terms of the GNU General Public License as published by --*/
+/*-- the Free Software Foundation, either version 3 of the License, or --*/
+/*-- (at your option) any later version. --*/
+/*-- --*/
+/*-- This program is distributed in the hope that it will be useful, --*/
+/*-- but WITHOUT ANY WARRANTY; without even the implied warranty of --*/
+/*-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --*/
+/*-- GNU General Public License for more details. --*/
+/*-- --*/
+/*-- You should have received a copy of the GNU General Public License --*/
+/*-- along with this program. If not, see <http://www.gnu.org/licenses/>. --*/
+/*----------------------------------------------------------------------------*/
+
+import com.pkrandom.FileFunctions;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+public class RomfsFile {
+
+ private NCCH parent;
+ public long offset;
+ public int size;
+ public String fullPath;
+ private Extracted status = Extracted.NOT;
+ private String extFilename;
+ public byte[] data;
+ public boolean fileChanged = false;
+ public long originalCRC;
+
+ public RomfsFile(NCCH parent) {
+ this.parent = parent;
+ }
+
+ public byte[] getContents() throws IOException {
+ if (this.status == Extracted.NOT) {
+ // extract file
+ parent.reopenROM();
+ RandomAccessFile rom = parent.getBaseRom();
+ byte[] buf = new byte[this.size];
+ rom.seek(this.offset);
+ rom.readFully(buf);
+ originalCRC = FileFunctions.getCRC32(buf);
+ if (parent.isWritingEnabled()) {
+ // make a file
+ String tmpDir = parent.getTmpFolder();
+ this.extFilename = fullPath.replaceAll("[^A-Za-z0-9_\\.]+", "");
+ File tmpFile = new File(tmpDir + extFilename);
+ FileOutputStream fos = new FileOutputStream(tmpFile);
+ fos.write(buf);
+ fos.close();
+ tmpFile.deleteOnExit();
+ this.status = Extracted.TO_FILE;
+ this.data = null;
+ return buf;
+ } else {
+ this.status = Extracted.TO_RAM;
+ this.data = buf;
+ byte[] newcopy = new byte[buf.length];
+ System.arraycopy(buf, 0, newcopy, 0, buf.length);
+ return newcopy;
+ }
+ } else if (this.status == Extracted.TO_RAM) {
+ byte[] newcopy = new byte[this.data.length];
+ System.arraycopy(this.data, 0, newcopy, 0, this.data.length);
+ return newcopy;
+ } else {
+ String tmpDir = parent.getTmpFolder();
+ return FileFunctions.readFileFullyIntoBuffer(tmpDir + this.extFilename);
+ }
+ }
+
+ public void writeOverride(byte[] data) throws IOException {
+ if (status == Extracted.NOT) {
+ // temp extract
+ getContents();
+ }
+ fileChanged = true;
+ size = data.length;
+ if (status == Extracted.TO_FILE) {
+ String tmpDir = parent.getTmpFolder();
+ FileOutputStream fos = new FileOutputStream(new File(tmpDir + this.extFilename));
+ fos.write(data);
+ fos.close();
+ } else {
+ if (this.data.length == data.length) {
+ // copy new in
+ System.arraycopy(data, 0, this.data, 0, data.length);
+ } else {
+ // make new array
+ this.data = null;
+ this.data = new byte[data.length];
+ System.arraycopy(data, 0, this.data, 0, data.length);
+ }
+ }
+ }
+
+ // returns null if no override
+ public byte[] getOverrideContents() throws IOException {
+ if (status == Extracted.NOT) {
+ return null;
+ }
+ return getContents();
+ }
+
+ private enum Extracted {
+ NOT, TO_FILE, TO_RAM
+ }
+}
diff --git a/src/com/pkrandom/ctr/SMDH.java b/src/com/pkrandom/ctr/SMDH.java
new file mode 100644
index 0000000..070809b
--- /dev/null
+++ b/src/com/pkrandom/ctr/SMDH.java
@@ -0,0 +1,118 @@
+package com.pkrandom.ctr;
+
+/*----------------------------------------------------------------------------*/
+/*-- NCCH.java - a base class for dealing with 3DS SMDH (icon.bin) files. --*/
+/*-- --*/
+/*-- Part of "Universal Pokemon Randomizer ZX" by the UPR-ZX team --*/
+/*-- Pokemon and any associated names and the like are --*/
+/*-- trademark and (C) Nintendo 1996-2020. --*/
+/*-- --*/
+/*-- This program is free software: you can redistribute it and/or modify --*/
+/*-- it under the terms of the GNU General Public License as published by --*/
+/*-- the Free Software Foundation, either version 3 of the License, or --*/
+/*-- (at your option) any later version. --*/
+/*-- --*/
+/*-- This program is distributed in the hope that it will be useful, --*/
+/*-- but WITHOUT ANY WARRANTY; without even the implied warranty of --*/
+/*-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --*/
+/*-- GNU General Public License for more details. --*/
+/*-- --*/
+/*-- You should have received a copy of the GNU General Public License --*/
+/*-- along with this program. If not, see <http://www.gnu.org/licenses/>. --*/
+/*----------------------------------------------------------------------------*/
+
+import com.pkrandom.FileFunctions;
+
+import java.nio.charset.StandardCharsets;
+
+public class SMDH {
+
+ private byte[] data;
+ private String[] shortDescriptions = new String[12];
+ private String[] longDescriptions = new String[12];
+ private String[] publishers = new String[12];
+
+ private static final int smdh_magic = 0x48444D53;
+ private static final int length_of_title = 0x200;
+ private static final int short_description_length = 0x80;
+ private static final int long_description_length = 0x100;
+ private static final int publisher_length = 0x80;
+
+ public SMDH(byte[] smdhData) {
+ data = smdhData;
+ if (this.isValid()) {
+ readDescriptionsAndPublishers();
+ }
+ }
+
+ public byte[] getBytes() {
+ return data;
+ }
+
+ public void setAllDescriptions(String newDescription) {
+ byte[] newDescriptionBytes = newDescription.getBytes(StandardCharsets.UTF_16LE);
+ if (newDescriptionBytes.length <= short_description_length) {
+ for (int i = 0; i < 12; i++) {
+ shortDescriptions[i] = newDescription;
+ longDescriptions[i] = newDescription;
+ }
+ writeDescriptionsAndPublishers();
+ }
+ }
+
+ public void setAllPublishers(String newPublisher) {
+ byte[] newPublisherBytes = newPublisher.getBytes(StandardCharsets.UTF_16LE);
+ if (newPublisherBytes.length <= publisher_length) {
+ for (int i = 0; i < 12; i++) {
+ publishers[i] = newPublisher;
+ }
+ writeDescriptionsAndPublishers();
+ }
+ }
+
+ private boolean isValid() {
+ int magic = FileFunctions.readFullInt(data, 0x0);
+ return magic == smdh_magic;
+ }
+
+ private void readDescriptionsAndPublishers() {
+ for (int i = 0; i < 12; i++) {
+ int shortDescriptionOffset = 0x08 + (length_of_title * i);
+ byte[] shortDescriptionBytes = new byte[short_description_length];
+ System.arraycopy(data, shortDescriptionOffset, shortDescriptionBytes, 0, short_description_length);
+ shortDescriptions[i] = new String(shortDescriptionBytes, StandardCharsets.UTF_16LE).trim();
+
+ int longDescriptionOffset = 0x88 + (length_of_title * i);
+ byte[] longDescriptionBytes = new byte[long_description_length];
+ System.arraycopy(data, longDescriptionOffset, longDescriptionBytes, 0, long_description_length);
+ longDescriptions[i] = new String(longDescriptionBytes, StandardCharsets.UTF_16LE).trim();
+
+ int publisherOffset = 0x188 + (length_of_title * i);
+ byte[] publisherBytes = new byte[publisher_length];
+ System.arraycopy(data, publisherOffset, publisherBytes, 0, publisher_length);
+ publishers[i] = new String(publisherBytes, StandardCharsets.UTF_16LE).trim();
+ }
+ }
+
+ private void writeDescriptionsAndPublishers() {
+ for (int i = 0; i < 12; i++) {
+ byte[] emptyShortDescription = new byte[short_description_length];
+ int shortDescriptionOffset = 0x08 + (length_of_title * i);
+ byte[] shortDescriptionBytes = shortDescriptions[i].getBytes(StandardCharsets.UTF_16LE);
+ System.arraycopy(emptyShortDescription, 0, data, shortDescriptionOffset, short_description_length);
+ System.arraycopy(shortDescriptionBytes, 0, data, shortDescriptionOffset, shortDescriptionBytes.length);
+
+ byte[] emptyLongDescription = new byte[long_description_length];
+ int longDescriptionOffset = 0x88 + (length_of_title * i);
+ byte[] longDescriptionBytes = longDescriptions[i].getBytes(StandardCharsets.UTF_16LE);
+ System.arraycopy(emptyLongDescription, 0, data, longDescriptionOffset, long_description_length);
+ System.arraycopy(longDescriptionBytes, 0, data, longDescriptionOffset, longDescriptionBytes.length);
+
+ byte[] emptyPublisher = new byte[publisher_length];
+ int publisherOffset = 0x188 + (length_of_title * i);
+ byte[] publisherBytes = publishers[i].getBytes(StandardCharsets.UTF_16LE);
+ System.arraycopy(emptyPublisher, 0, data, publisherOffset, publisher_length);
+ System.arraycopy(publisherBytes, 0, data, publisherOffset, publisherBytes.length);
+ }
+ }
+}