From 304f23a36c5a66868ff269bd051d8686185cdf5a Mon Sep 17 00:00:00 2001 From: Amrsatrio Date: Fri, 19 Jun 2020 13:05:31 +0700 Subject: [PATCH] More material support No more guessing algorithms --- README.md | 27 + build.gradle | 4 +- .../com/tb24/blenderumap/ByteArrayUtils.java | 36 + .../com/tb24/blenderumap/JWPSerializer.kt | 85 ++- src/main/java/com/tb24/blenderumap/Main.java | 673 +++++++++--------- src/main/resources/config.json | 20 + src/main/resources/config.properties | 21 - src/main/resources/log4j.properties | 18 - src/main/resources/log4j2.xml | 17 + umap.py | 634 ++++++++++------- 10 files changed, 917 insertions(+), 618 deletions(-) create mode 100644 README.md create mode 100644 src/main/java/com/tb24/blenderumap/ByteArrayUtils.java create mode 100644 src/main/resources/config.json delete mode 100644 src/main/resources/config.properties delete mode 100644 src/main/resources/log4j.properties create mode 100644 src/main/resources/log4j2.xml diff --git a/README.md b/README.md new file mode 100644 index 0000000..e1a2301 --- /dev/null +++ b/README.md @@ -0,0 +1,27 @@ +# BlenderUmap +A Java tool to export Fortnite .umaps and a Python script to import it. More games will be supported as time goes on. + +## Usage +* Before running the tool you need to have 64-bit Java installed. [Get it here. (choose 64-bit Offline)](https://www.java.com/en/download/manual.jsp)\ + **32-bit Java won't work with this tool!** +* Extract the zip file +* Edit config.json to suit your needs +* Execute the bat +* Make sure you have the [Blender PSK import/export plugin](https://github.com/Befzz/blender3d_import_psk_psa) **at least version 2.7.13** installed. If you use prior versions of the plugin the scaling of the props will appear broken. +* In Blender, import the python file as a script (via scripting tab) +* (Optional) If you want to see the output of the script, show the system console (Window > Toggle System Console) +* Click Run Script (Alt+P), Blender will freeze if all goes well +* Profit! + +## config.json +* **`PaksDirectory`: Path to the Paks folder.** +* `UEVersion`: Unreal Engine version. Supports up to UE4.25. +* **`EncryptionKeys`: List of AES keys to use for loading the paks** + * `Guid`: Identify a pak by its encryption key GUID. Use `00000000000000000000000000000000` (32 0's) to refer to the main paks. + * `FileName`: Alternatively, you can use this to identify a pak by its file name. + * `Key`: The pak's encryption key, in either hex (starting with "0x") or base64. +* `bReadMaterials`: Export materials. Materials are experimental! Not all imported materials will be perfect. **Min. 24GB of RAM recommended!** +* `bRunUModel`: Run UModel within the exporting process to export meshes, materials, and textures. +* `UModelAdditionalArgs`: Additional command line args when starting UModel. +* `bDumpAssets`: Save assets as JSON format, useful for debugging. +* **`ExportPackage`: The .umap you want to export.** \ No newline at end of file diff --git a/build.gradle b/build.gradle index 8ddaaf4..266dd5e 100644 --- a/build.gradle +++ b/build.gradle @@ -5,7 +5,7 @@ plugins { } group 'com.tb24' -version '0.1.1' +version '0.2.0' sourceCompatibility = 1.8 @@ -38,6 +38,7 @@ repositories { mavenCentral() maven { url 'https://dl.bintray.com/fungamesleaks/mavenRepo' } // maven { url 'https://libraries.minecraft.net' } + maven { url 'https://jitpack.io' } } dependencies { @@ -45,6 +46,7 @@ dependencies { implementation 'com.google.code.gson:gson:2.8.6' // implementation 'com.mojang:brigadier:1.0.17' implementation 'me.fungames:JFortniteParse:+' // :3.0.2' + implementation 'org.slf4j:slf4j-api:1.7.30' testImplementation 'junit:junit:4.12' } diff --git a/src/main/java/com/tb24/blenderumap/ByteArrayUtils.java b/src/main/java/com/tb24/blenderumap/ByteArrayUtils.java new file mode 100644 index 0000000..cdb37fd --- /dev/null +++ b/src/main/java/com/tb24/blenderumap/ByteArrayUtils.java @@ -0,0 +1,36 @@ +/* + * (C) 2020 amrsatrio. All rights reserved. + */ +package com.tb24.blenderumap; + +import java.nio.ByteOrder; + +public class ByteArrayUtils { + private static final char[] LOOKUP_TABLE_LOWER = new char[]{0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66}; + private static final char[] LOOKUP_TABLE_UPPER = new char[]{0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46}; + + // https://stackoverflow.com/a/58118078 + public static String encode(byte[] byteArray, boolean upperCase, ByteOrder byteOrder) { + // our output size will be exactly 2x byte-array length + final char[] buffer = new char[byteArray.length * 2]; + + // choose lower or uppercase lookup table + final char[] lookup = upperCase ? LOOKUP_TABLE_UPPER : LOOKUP_TABLE_LOWER; + + int index; + for (int i = 0; i < byteArray.length; i++) { + // for little endian we count from last to first + index = (byteOrder == ByteOrder.BIG_ENDIAN) ? i : byteArray.length - i - 1; + + // extract the upper 4 bit and look up char (0-A) + buffer[i << 1] = lookup[(byteArray[index] >> 4) & 0xF]; + // extract the lower 4 bit and look up char (0-A) + buffer[(i << 1) + 1] = lookup[(byteArray[index] & 0xF)]; + } + return new String(buffer); + } + + public static String encode(byte[] byteArray) { + return encode(byteArray, false, ByteOrder.BIG_ENDIAN); + } +} diff --git a/src/main/java/com/tb24/blenderumap/JWPSerializer.kt b/src/main/java/com/tb24/blenderumap/JWPSerializer.kt index 7ebb860..ad05cca 100644 --- a/src/main/java/com/tb24/blenderumap/JWPSerializer.kt +++ b/src/main/java/com/tb24/blenderumap/JWPSerializer.kt @@ -1,3 +1,6 @@ +/* + * (C) 2020 amrsatrio. All rights reserved. + */ package com.tb24.blenderumap import com.google.gson.* @@ -6,9 +9,10 @@ import me.fungames.jfortniteparse.ue4.assets.exports.UDataTable import me.fungames.jfortniteparse.ue4.assets.exports.UExport import me.fungames.jfortniteparse.ue4.assets.exports.UObject import me.fungames.jfortniteparse.ue4.assets.objects.* -import me.fungames.jfortniteparse.ue4.assets.objects.FTextHistory.Base import me.fungames.jfortniteparse.ue4.assets.util.FName +import me.fungames.jfortniteparse.util.parseHexBinary import java.lang.reflect.Type +import java.util.* /** * Provides John Wick Parse JSON data structure for JFortniteParse objects. @@ -21,25 +25,38 @@ object JWPSerializer { .disableHtmlEscaping() .setPrettyPrinting() .serializeNulls() + .registerTypeAdapter(ByteArray::class.java, ByteArraySerializer()) .registerTypeAdapter(UByte::class.java, JsonSerializer { src, typeOfSrc, context -> JsonPrimitive(src.toByte()) }) .registerTypeHierarchyAdapter(UExport::class.java, ExportSerializer()) .registerTypeHierarchyAdapter(FPropertyTagType::class.java, JsonSerializer { src, typeOfSrc, context -> if (src is FPropertyTagType.DelegateProperty) { JsonObject().apply { addProperty("object", src.`object`) - addProperty("name", src.name.text) + add("name", context.serialize(src.name)) } } else { context.serialize(src.getTagTypeValue()) } }) + .registerTypeAdapter(FBox::class.java, JsonSerializer { src, typeOfSrc, context -> + JsonObject().apply { + add("min", context.serialize(src.min)) + add("max", context.serialize(src.max)) + } + }) .registerTypeAdapter(FGameplayTagContainer::class.java, JsonSerializer { src, typeOfSrc, context -> JsonObject().apply { - add("gameplay_tags", JsonArray().apply { src.gameplayTags.forEach { add(it.text) } }) + add("gameplay_tags", JsonArray().apply { src.gameplayTags.forEach { add(context.serialize(it)) } }) } }) - .registerTypeAdapter(FGuid::class.java, JsonSerializer { src, typeOfSrc, context -> - JsonPrimitive("%08x%08x%08x%08x".format(src.part1.toInt(), src.part2.toInt(), src.part3.toInt(), src.part4.toInt())) + .registerTypeAdapter(FGuid::class.java, GuidSerializer()) + .registerTypeAdapter(FLinearColor::class.java, JsonSerializer { src, typeOfSrc, context -> + JsonObject().apply { + addProperty("r", src.r) + addProperty("g", src.g) + addProperty("b", src.b) + addProperty("a", src.a) + } }) .registerTypeAdapter(FName::class.java, JsonSerializer { src, typeOfSrc, context -> JsonPrimitive(src.text) @@ -53,11 +70,11 @@ object JWPSerializer { out.addProperty("export", src.index) } else if (importObject != null) { out = JsonArray() - out.add(importObject.objectName.text) - out.add(src.outerImportObject!!.objectName.text) + out.add(context.serialize(importObject.objectName)) + out.add(context.serialize(src.outerImportObject!!.objectName)) if (src.outerImportObject!!.outerIndex.importObject != null) { - out.add(src.outerImportObject!!.outerIndex.importObject!!.objectName.text) + out.add(context.serialize(src.outerImportObject!!.outerIndex.importObject!!.objectName)) } } @@ -73,6 +90,16 @@ object JWPSerializer { .registerTypeAdapter(UScriptArray::class.java, JsonSerializer { src, typeOfSrc, context -> JsonArray().apply { src.contents.forEach { add(context.serialize(it)) } } }) + .registerTypeAdapter(UScriptMap::class.java, JsonSerializer { src, typeOfSrc, context -> + JsonArray().apply { + for ((k, v) in src.mapData) { + add(JsonObject().apply { + add("key", context.serialize(k)) + add("value", context.serialize(v)) + }) + } + } + }) .registerTypeAdapter(FSoftObjectPath::class.java, JsonSerializer { src, typeOfSrc, context -> JsonObject().apply { addProperty("asset_path_name", src.assetPathName.text) @@ -83,11 +110,19 @@ object JWPSerializer { JsonObject().apply { serializeProperties(this, src.properties, context) } }) .registerTypeAdapter(FText::class.java, JsonSerializer { src, typeOfSrc, context -> - val h = if (src.textHistory is FTextHistory.None) Base("", "", "") else src.textHistory + val h = if (src.textHistory is FTextHistory.None) FTextHistory.Base("", "", "") else src.textHistory JsonObject().apply { - addProperty("string", (h as Base).sourceString) - addProperty("namespace", h.nameSpace) - addProperty("key", h.key) + addProperty("string", h.text) + when (h) { + is FTextHistory.Base -> { + addProperty("namespace", h.nameSpace) + addProperty("key", h.key) + } + is FTextHistory.StringTableEntry -> { + add("table_id", context.serialize(h.tableId)) + addProperty("key", h.key) + } + } } }) .registerTypeAdapter(FVector::class.java, JsonSerializer { src, typeOfSrc, context -> @@ -127,4 +162,30 @@ object JWPSerializer { return obj } } + + private class GuidSerializer : JsonSerializer, JsonDeserializer { + override fun serialize(src: FGuid, typeOfSrc: Type, context: JsonSerializationContext): JsonElement? { + return JsonPrimitive("%08x%08x%08x%08x".format(src.part1.toInt(), src.part2.toInt(), src.part3.toInt(), src.part4.toInt())) + } + + override fun deserialize(json: JsonElement, typeOfT: Type, context: JsonDeserializationContext): FGuid? { + return FGuid(json.asString) + } + } + + private class ByteArraySerializer : JsonSerializer, JsonDeserializer { + override fun serialize(src: ByteArray, typeOfSrc: Type, context: JsonSerializationContext): JsonElement? { + return context.serialize(ByteArrayUtils.encode(src)) + } + + override fun deserialize(json: JsonElement, typeOfT: Type, context: JsonDeserializationContext): ByteArray? { + val s = json.asString + + return if (s.startsWith("0x")) { + s.substring(2).parseHexBinary() + } else { + Base64.getDecoder().decode(s); + } + } + } } diff --git a/src/main/java/com/tb24/blenderumap/Main.java b/src/main/java/com/tb24/blenderumap/Main.java index cbc20b7..e53bde9 100644 --- a/src/main/java/com/tb24/blenderumap/Main.java +++ b/src/main/java/com/tb24/blenderumap/Main.java @@ -1,159 +1,146 @@ /* * (C) 2020 amrsatrio. All rights reserved. */ - package com.tb24.blenderumap; import com.google.gson.JsonArray; import com.google.gson.JsonElement; -import com.google.gson.JsonNull; +import com.google.gson.JsonObject; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.awt.Desktop; import java.io.File; -import java.io.FileInputStream; +import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; -import java.net.URI; -import java.nio.file.Files; +import java.io.PrintWriter; +import java.lang.reflect.Array; import java.util.ArrayList; -import java.util.Collections; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Properties; +import java.util.Optional; import java.util.Set; import java.util.UUID; -import java.util.stream.Collectors; -import kotlin.collections.CollectionsKt; import kotlin.collections.MapsKt; import kotlin.text.StringsKt; import me.fungames.jfortniteparse.fileprovider.DefaultFileProvider; import me.fungames.jfortniteparse.ue4.FGuid; import me.fungames.jfortniteparse.ue4.assets.Package; import me.fungames.jfortniteparse.ue4.assets.exports.UExport; -import me.fungames.jfortniteparse.ue4.assets.exports.UObject; import me.fungames.jfortniteparse.ue4.assets.objects.FPackageIndex; import me.fungames.jfortniteparse.ue4.assets.objects.FPropertyTag; -import me.fungames.jfortniteparse.ue4.assets.objects.FPropertyTagType; import me.fungames.jfortniteparse.ue4.assets.objects.FRotator; import me.fungames.jfortniteparse.ue4.assets.objects.FSoftObjectPath; import me.fungames.jfortniteparse.ue4.assets.objects.FStructFallback; import me.fungames.jfortniteparse.ue4.assets.objects.FVector; -import me.fungames.jfortniteparse.ue4.assets.objects.UScriptArray; +import me.fungames.jfortniteparse.ue4.assets.util.FName; +import me.fungames.jfortniteparse.ue4.assets.util.StructFallbackReflectionUtilKt; import me.fungames.jfortniteparse.ue4.pak.GameFile; +import me.fungames.jfortniteparse.ue4.pak.PakFileReader; +import me.fungames.jfortniteparse.ue4.versions.GameKt; import me.fungames.jfortniteparse.ue4.versions.Ue4Version; +import static com.tb24.blenderumap.JWPSerializer.GSON; + public class Main { - private static File gamePath; - private static Ue4Version gameVersion; - private static String aes; - private static boolean readMaterials; - private static boolean runUmodel; - private static boolean useGltf; - private static Properties properties; + private static final Logger LOGGER = LoggerFactory.getLogger("BlenderUmap"); + private static Config config; private static File jsonsFolder = new File("jsons"); + private static DefaultFileProvider provider; private static Map loaded = new HashMap<>(); private static Set toExport = new HashSet<>(); - private static Map parsedMaterials = new HashMap<>(); - private static List warnings = new ArrayList<>(); private static long start = System.currentTimeMillis(); public static void main(String[] args) { try { - File configFile = new File("config.properties"); + File configFile = new File("config.json"); if (!configFile.exists()) { - System.err.println("config.properties not found"); + LOGGER.error("config.json not found"); return; } - properties = new Properties(); - properties.load(new FileInputStream(configFile)); - gamePath = new File(properties.getProperty("gamePath", "C:\\Program Files\\Epic Games\\Fortnite\\FortniteGame\\Content\\Paks")); - gameVersion = Ue4Version.values()[Integer.parseInt(properties.getProperty("gameVersion", "2"))]; - aes = properties.getProperty("aes"); - readMaterials = Boolean.parseBoolean(properties.getProperty("readMaterials", "false")); - runUmodel = Boolean.parseBoolean(properties.getProperty("runUmodel", "true")); - useGltf = Boolean.parseBoolean(properties.getProperty("useGltf", "false")); - String exportPackage = properties.getProperty("package"); - - if (exportPackage == null || exportPackage.isEmpty()) { - System.err.println("Please specify a package."); - return; + try (FileReader reader = new FileReader(configFile)) { + config = GSON.fromJson(reader, Config.class); } - if (aes == null || aes.isEmpty()) { - System.out.println("No AES key provided. Please modify config.properties to include the AES key using \"aes=\"."); - System.out.println("Opening https://fnbot.shop/api/aes"); - Desktop.getDesktop().browse(new URI("https://fnbot.shop/api/aes")); - return; - // the solution below returns 403 for some reason - /*System.out.println("AES is not defined, getting one from fnbot.shop..."); + File paksDir = new File(config.PaksDirectory); - try (Scanner scanner = new Scanner(new URL("https://fnbot.shop/api/aes").openStream(), "UTF-8").useDelimiter("\\A")) { - if (scanner.hasNext()) { - aes = scanner.next(); - System.out.println(aes); - } - }*/ + if (!paksDir.exists()) { + throw new MainException("Directory " + paksDir.getAbsolutePath() + " not found."); } - jsonsFolder.mkdir(); + if (config.UEVersion == null) { + throw new MainException("Please specify a valid UE version. Must be either of: " + Arrays.toString(Ue4Version.values())); + } - DefaultFileProvider provider = new DefaultFileProvider(gamePath, gameVersion); - provider.submitKey(FGuid.Companion.getMainGuid(), aes); - JsonArray bruh = exportAndProduceProcessed(provider, exportPackage); + if (config.ExportPackage == null || config.ExportPackage.isEmpty()) { + throw new MainException("Please specify ExportPackage."); + } - if (bruh == null) return; + provider = new DefaultFileProvider(paksDir, config.UEVersion); + Map keysToSubmit = new HashMap<>(); - if (runUmodel && !toExport.isEmpty()) { - exportUmodel(); + for (Config.EncryptionKey entry : config.EncryptionKeys) { + if (isEmpty(entry.FileName)) { + keysToSubmit.put(entry.Guid, entry.Key); + } else { + Optional foundGuid = provider.getUnloadedPaks().stream().filter(it -> it.getFileName().equals(entry.FileName)).findFirst(); + + if (foundGuid.isPresent()) { + keysToSubmit.put(foundGuid.get().getPakInfo().getEncryptionKeyGuid(), entry.Key); + } else { + LOGGER.warn("PAK file not found: " + entry.FileName); + } + } } - if (readMaterials) { - resolveMaterials(provider, bruh); + provider.submitKeys(keysToSubmit); + JsonArray components = exportAndProduceProcessed(config.ExportPackage); + + if (components == null) return; + + if (config.bRunUModel && !toExport.isEmpty()) { + exportUmodel(); } File file = new File("processed.json"); - System.out.println("Writing to " + file.getAbsolutePath()); + LOGGER.info("Writing to " + file.getAbsolutePath()); try (FileWriter writer = new FileWriter(file)) { - JWPSerializer.GSON.toJson(bruh, writer); + GSON.toJson(components, writer); } - try (FileWriter writer = new FileWriter(new File("summary.json"))) { - JWPSerializer.GSON.toJson(warnings, writer); + LOGGER.info(String.format("All done in %,.1f sec. In the Python script, replace the line with data_dir with this line below:\n\ndata_dir = r\"%s\"", (System.currentTimeMillis() - start) / 1000.0F, new File("").getAbsolutePath())); + } catch (Exception e) { + if (e instanceof MainException) { + LOGGER.info(e.getMessage()); + } else { + LOGGER.error("Uncaught exception", e); } - System.out.println(String.format("All done in %,.1fs. In the Python script, replace the line with data_dir with this line below:\n", (System.currentTimeMillis() - start) / 1000.0F)); - System.out.println("data_dir = \"" + (new File("").getAbsolutePath() + File.separatorChar).replace("\\", "\\\\") + "\""); - } catch (Exception e) { - e.printStackTrace(); + System.exit(1); } } - @Nullable - private static JsonArray exportAndProduceProcessed(DefaultFileProvider provider, String s) throws IOException { - System.out.println("\nExporting " + s); - Package pkg = loadIfNot(provider, s); - List exports = pkg.getExports(); - File file = new File(jsonsFolder, s.substring(s.lastIndexOf('/') + 1, s.lastIndexOf('.')) + ".json"); - System.out.println("Writing to " + file.getAbsolutePath()); + private static JsonArray exportAndProduceProcessed(String s) { + Package pkg = loadIfNot(s); - try (FileWriter writer = new FileWriter(file)) { - JWPSerializer.GSON.toJson(exports, writer); + if (pkg == null) { + return null; + } else if (!s.endsWith(".umap")) { + LOGGER.info(s + " is not an .umap, won't try to export"); + return null; } - if (!s.endsWith(".umap")) return null; - - JsonArray bruh = new JsonArray(); + JsonArray comps = new JsonArray(); - for (UExport export : exports) { + for (UExport export : pkg.getExports()) { String exportType = export.getExportType(); if (exportType.equals("LODActor")) { @@ -166,22 +153,23 @@ private static JsonArray exportAndProduceProcessed(DefaultFileProvider provider, continue; } - UExport refSMC = exports.get(smc.getIndex() - 1); - JsonArray obj = new JsonArray(); - bruh.add(obj); + UExport refSMC = pkg.getExports().get(smc.getIndex() - 1); + + // identifiers + JsonArray comp = new JsonArray(); + comps.add(comp); FGuid guid = getProp(export, "MyGuid", FGuid.class); - obj.add(guid != null ? guidAsString(guid) : UUID.randomUUID().toString().replace("-", "")); - obj.add(exportType); - FPackageIndex mesh = getProp(refSMC, "StaticMesh", FPackageIndex.class); + comp.add(guid != null ? guidAsString(guid) : UUID.randomUUID().toString().replace("-", "")); + comp.add(exportType); + + // region mesh String meshS = null; - FPackageIndex meshMat = null; + FPackageIndex mesh = getProp(refSMC, "StaticMesh", FPackageIndex.class); if (mesh == null || mesh.getIndex() == 0) { // read the actor class to find the mesh - GameFile actorPath = findBuildingActor(provider, exportType); - - if (actorPath != null) { - Package actorPkg = loadIfNot(provider, actorPath); + Package actorPkg = loadIfNot(export.getExport().getClassIndex().getOuterImportObject().getObjectName().getText()); + if (actorPkg != null) { for (UExport actorExp : actorPkg.getExports()) { if (actorExp.getExportType().endsWith("StaticMeshComponent")) { mesh = getProp(actorExp, "StaticMesh", FPackageIndex.class); @@ -193,280 +181,181 @@ private static JsonArray exportAndProduceProcessed(DefaultFileProvider provider, } } } + // endregion + + JsonObject matsObj = new JsonObject(); + JsonArray textureDataArr = new JsonArray(); + List materials = new ArrayList<>(); if (mesh != null && mesh.getIndex() != 0) { - meshS = mesh.getOuterImportObject().getObjectName().getText(); - /*String fixed = provider.fixPath(meshS); - fixed = fixed.substring(0, fixed.lastIndexOf('.')); - String finalFixed = fixed; - List matches = new ArrayList<>(MapsKt.filter(provider.getFiles(), entry -> entry.getKey().startsWith(finalFixed)).values()); - matches.sort((o1, o2) -> o2.getPathWithoutExtension().length() - o1.getPathWithoutExtension().length()); - if (matches.size() > 1) - System.err.println("More than 1 matches: " + Arrays.toString(matches.toArray())); - meshS = matches.get(0).getPathWithoutExtension().replace("FortniteGame/", "/Game/");*/ - - Package meshPkg = loadIfNot(provider, meshS = fix(exportType, meshS)); - - if (meshPkg != null) { - for (UExport meshExport : meshPkg.getExports()) { - if (meshExport.getExportType().equals("StaticMesh")) { - UScriptArray staticMaterials = getProp(meshExport, "StaticMaterials", UScriptArray.class); - - if (staticMaterials != null) { - for (FPropertyTagType staticMaterial : staticMaterials.getContents()) { - if ((meshMat = getProp(((FStructFallback) staticMaterial.getTagTypeValue()).getProperties(), "MaterialInterface", FPackageIndex.class)) != null) { - break; + toExport.add(meshS = mesh.getOuterImportObject().getObjectName().getText()); + + if (config.bReadMaterials) { + Package meshPkg = loadIfNot(meshS); + + if (meshPkg != null) { + for (UExport meshExport : meshPkg.getExports()) { + if (meshExport.getExportType().equals("StaticMesh")) { + //ExportStaticMeshKt.export(StaticMeshesKt.convertMesh((UStaticMesh) meshExport)).writeToDir(new File("TestExportMesh/" + meshS.substring(1)).getParentFile()); + FStructFallback[] staticMaterials = getProp(meshExport, "StaticMaterials", FStructFallback[].class); + + if (staticMaterials != null) { + for (FStructFallback staticMaterial : staticMaterials) { + materials.add(new Mat(getProp(staticMaterial.getProperties(), "MaterialInterface", FPackageIndex.class))); } } - } - } - if (meshMat != null) { - break; + break; + } } } - - toExport.add(meshS); } } - FPackageIndex overrideMats = getProp(refSMC, "OverrideMaterials", FPackageIndex.class); - String matToUse = overrideMats != null && overrideMats.getIndex() != 0 ? overrideMats.getOuterImportObject().getObjectName().getText() : meshMat != null && meshMat.getIndex() != 0 ? meshMat.getOuterImportObject().getObjectName().getText() : null; -// String[] matTex = new String[4]; - - if (matToUse != null) { - /*Package matPkg = loadIfNot(provider, matToUse = fix(null, matToUse)); - UExport matFirstExp = matPkg.getExports().get(0); - UScriptArray textureParameterValues = getProp(matFirstExp, "TextureParameterValues", UScriptArray.class); - - for (FPropertyTagType textureParameterValue : textureParameterValues.getContents()) { - FStructFallback textureParameterValueS = (FStructFallback) textureParameterValue.getTagTypeValue(); - String name = getProp(getProp(textureParameterValueS.getProperties(), "ParameterInfo", FStructFallback.class).getProperties(), "Name", FName.class).getText(); - - if (name != null && (name.equals("Diffuse") || name.equals("SpecularMasks") || name.equals("Normals"))) { - String texPath = getProp(textureParameterValueS.getProperties(), "ParameterValue", FPackageIndex.class).getOuterImportObject().getObjectName().getText(); - - if (name.equals("Diffuse")) { - matTex[0] = texPath; - } else if (name.equals("Normals")) { - matTex[1] = texPath; - } else if (name.equals("SpecularMasks")) { - matTex[2] = texPath; - } else if (name.equals("EmissiveTexture")) { - matTex[3] = texPath; + if (config.bReadMaterials) { + FPackageIndex material = getProp(refSMC, "BaseMaterial", FPackageIndex.class); + FPackageIndex[] overrideMaterials = getProp(export, "OverrideMaterials", FPackageIndex[].class); + + for (FPackageIndex textureData : getProps(export.getBaseObject().getProperties(), "TextureData", FPackageIndex.class)) { + if (textureData != null && textureData.getIndex() != 0) { + String textureDataPath = textureData.getOuterImportObject().getObjectName().getText(); + Package texDataPkg = loadIfNot(textureDataPath); + + if (texDataPkg != null) { + BuildingTextureData td = StructFallbackReflectionUtilKt.mapToClass(texDataPkg.getExports().get(0).getBaseObject(), BuildingTextureData.class, null); + JsonArray textures = new JsonArray(); + textures.add(td.Diffuse != null && td.Diffuse.getIndex() != 0 ? td.Diffuse.getOuterImportObject().getObjectName().getText() : null); + textures.add(td.Normal != null && td.Normal.getIndex() != 0 ? td.Normal.getOuterImportObject().getObjectName().getText() : null); + textures.add(td.Specular != null && td.Specular.getIndex() != 0 ? td.Specular.getOuterImportObject().getObjectName().getText() : null); + textures.add(td.Emissive != null && td.Emissive.getIndex() != 0 ? td.Emissive.getOuterImportObject().getObjectName().getText() : null); + textures.add(td.Mask != null && td.Mask.getIndex() != 0 ? td.Mask.getOuterImportObject().getObjectName().getText() : null); + JsonArray entry = new JsonArray(); + entry.add(textureDataPath); + entry.add(textures); + textureDataArr.add(entry); + + if (td.OverrideMaterial != null && td.OverrideMaterial.getIndex() != 0) { + material = td.OverrideMaterial; + } } - - toExport.add(texPath);*/ - toExport.add(matToUse); -// } -// } - } - - JsonArray additional = new JsonArray(); - UScriptArray additionalWorlds = getProp(export, "AdditionalWorlds", UScriptArray.class); - - if (additionalWorlds != null) { - for (FPropertyTagType additionalWorld : additionalWorlds.getContents()) { - FSoftObjectPath additionalWorldS = (FSoftObjectPath) additionalWorld.getTagTypeValue(); - String text = additionalWorldS.getAssetPathName().getText(); - JsonArray result = exportAndProduceProcessed(provider, StringsKt.substringBeforeLast(text, '.', text) + ".umap"); - additional.add(result != null ? result : new JsonNull()); + } else { + textureDataArr.add((JsonElement) null); + } } - } - - FVector loc = getProp(refSMC, "RelativeLocation", FVector.class); - FRotator rot = getProp(refSMC, "RelativeRotation", FRotator.class); - FVector sc = getProp(refSMC, "RelativeScale3D", FVector.class); - - obj.add(mesh != null ? meshS : null); - obj.add(matToUse); - obj.add(JWPSerializer.GSON.toJsonTree(new String[4])); - obj.add(loc != null ? vector(loc) : null); - obj.add(rot != null ? rotator(rot) : null); - obj.add(sc != null ? vector(sc) : null); - obj.add(additional); - } - return bruh; - } + for (int i = 0; i < materials.size(); i++) { + Mat mat = materials.get(i); - private static void resolveMaterials(DefaultFileProvider provider, JsonArray array) { - for (JsonElement entry : array) { - JsonArray entry1 = entry.getAsJsonArray(); - JsonElement mat = entry1.get(3); - JsonElement children = entry1.get(8); - - if (mat.isJsonPrimitive()) { - entry1.set(4, JWPSerializer.GSON.toJsonTree(MapsKt.getOrPut(parsedMaterials, mat.getAsString(), () -> { - File matFile = new File(fix(null, mat.getAsString().substring(1)) + ".mat"); - String[] matTex = new String[4]; - - try { - for (String s1 : Files.readAllLines(matFile.toPath())) { - String[] split = s1.split("="); - - if (split.length > 1) { - String assign = split[1].toLowerCase() + ".ubulk"; - List> filtered = provider.getFiles().entrySet().stream().filter(entry2 -> entry2.getKey().contains(assign)).collect(Collectors.toList()); - - if (!filtered.isEmpty()) { - String full = '/' + filtered.get(0).getValue().getPathWithoutExtension().replace("FortniteGame/Content", "Game"); - - switch (split[0]) { - case "Diffuse": - matTex[0] = full; - break; - case "Normal": - matTex[1] = full; - break; - case "Specular": - matTex[2] = full; - break; - case "Emissive": // emissive broke - // matTex[3] = full; - break; - } - } - } - } - } catch (IOException e) { - // throw new RuntimeException("Failed when reading material", e); - warn("Material failed to load: " + matFile); + if (material != null) { + mat.name = overrideMaterials != null && i < overrideMaterials.length && overrideMaterials[i].getIndex() != 0 ? overrideMaterials[i] : material; } - return matTex; - }))); + mat.populateTextures(); + mat.addToObj(matsObj); + } } - if (children.isJsonArray()) { - for (JsonElement childEntry : children.getAsJsonArray()) { - resolveMaterials(provider, childEntry.getAsJsonArray()); + // region additional worlds + JsonArray children = new JsonArray(); + FSoftObjectPath[] additionalWorlds = getProp(export, "AdditionalWorlds", FSoftObjectPath[].class); + + if (additionalWorlds != null) { + for (FSoftObjectPath additionalWorld : additionalWorlds) { + String text = additionalWorld.getAssetPathName().getText(); + children.add(exportAndProduceProcessed(StringsKt.substringBeforeLast(text, '.', text) + ".umap")); } } + // endregion + + comp.add(mesh != null && mesh.getIndex() != 0 ? meshS : null); + comp.add(matsObj); + comp.add(textureDataArr); + comp.add(vector(getProp(refSMC, "RelativeLocation", FVector.class))); + comp.add(rotator(getProp(refSMC, "RelativeRotation", FRotator.class))); + comp.add(vector(getProp(refSMC, "RelativeScale3D", FVector.class))); + comp.add(children); } + + return comps; } - private static Package loadIfNot(DefaultFileProvider provider, String pkg) { + private static Package loadIfNot(String pkg) { GameFile gameFile = provider.findGameFile(pkg); if (gameFile != null) { - return loadIfNot(provider, gameFile); + return loadIfNot(gameFile); } else { - warn("Requested package " + pkg + " was not found"); + LOGGER.warn("Package " + pkg + " not found"); return null; } } - private static Package loadIfNot(DefaultFileProvider provider, GameFile pkg) { - return MapsKt.getOrPut(loaded, pkg, () -> provider.loadGameFile(pkg)); - } + private static Package loadIfNot(GameFile pkg) { + return MapsKt.getOrPut(loaded, pkg, () -> { + LOGGER.info("Loading " + pkg); + Package loadedPkg = provider.loadGameFile(pkg); - @NotNull - private static String fix(String exportType, String fixS) { - if (fixS.endsWith("WildWest_RockingChair")) { - fixS += "_1"; - } else if (fixS.endsWith("Medium_Tree")) { - fixS += "_2"; - } else if (fixS.endsWith("Treasure_Chest")) { - fixS += "_2"; - } else if (fixS.endsWith("M_Rural_Garage")) { - fixS += "_1"; - } else if ("Prop_WildWest_SimpleChair_01_C".equals(exportType)) { - fixS += "_1"; - } else if ("Prop_WildWest_SimpleChair_02_C".equals(exportType)) { - fixS += "_2"; - } else if ("Prop_WildWest_SimpleChair_03_C".equals(exportType)) { - fixS += "_3"; - } else if ("Garage_Door_01_C".equals(exportType)) { - fixS += "_1"; - } else if ("Apollo_Fac_Pipe_S_128_C".equals(exportType)) { - fixS += "_128"; - } else if ("Apollo_Fac_Pipe_S_256_C".equals(exportType)) { - fixS += "_256"; - } else if ("Apollo_Fac_Pipe_S_512_C".equals(exportType)) { - fixS += "_512"; - } else if ("CornField_Rectangle_C".equals(exportType)) { - fixS += "_2"; - } + if (loadedPkg != null && config.bDumpAssets) { + File file = new File(jsonsFolder, pkg.getPathWithoutExtension() + ".json"); + LOGGER.info("Writing JSON to " + file.getAbsolutePath()); + file.getParentFile().mkdirs(); - return fixS; - } + try (FileWriter writer = new FileWriter(file)) { + GSON.toJson(loadedPkg.getExports(), writer); + } catch (IOException e) { + LOGGER.error("Writing failed", e); + } + } - /*public static void main(String[] args) throws Exception { - meshesSet.add("/Game/Packages/Fortress_Roofs/Meshes/SM_Fort_Roofs_Generic01"); - exportMeshes(); - }*/ + return loadedPkg; + }); + } private static void exportUmodel() throws InterruptedException, IOException { - for (List chunk : CollectionsKt.chunked(toExport, 250)) { - List parts = new ArrayList<>(); - parts.add("umodel"); - h(parts, "-path=\"" + gamePath + '\"'); - parts.add("-game=ue" + (gameVersion == Ue4Version.GAME_UE4_22 ? "4.22" : gameVersion == Ue4Version.GAME_UE4_23 ? "4.23" : gameVersion == Ue4Version.GAME_UE4_24 ? "4.24" : gameVersion == Ue4Version.GAME_UE4_25 ? "4.25" : /* Fortnite 12.61 */ "4.24")); - parts.add("-aes=" + (aes.startsWith("0x") ? aes : "0x" + aes)); - h(parts, "-out=\"" + new File("").getAbsolutePath() + '\"'); - - if (useGltf) { - parts.add("-gltf"); + try (PrintWriter pw = new PrintWriter("umodel_cmd.txt")) { + pw.println("-path=\"" + config.PaksDirectory + '\"'); + pw.println("-game=ue4." + GameKt.GAME_UE4_GET_MINOR(config.UEVersion.getGame())); + + if (config.EncryptionKeys.length > 0) { + pw.println("-aes=0x" + ByteArrayUtils.encode(config.EncryptionKeys[0].Key)); + } + + pw.println("-out=\"" + new File("").getAbsolutePath() + '\"'); + + if (!isEmpty(config.UModelAdditionalArgs)) { + pw.println(config.UModelAdditionalArgs); } boolean bFirst = true; - for (String export : chunk) { + for (String export : toExport) { if (bFirst) { bFirst = false; - parts.add("-export"); - parts.add(export); + pw.println("-export"); + pw.println(export); } else { - parts.add("-pkg=" + export); + pw.println("-pkg=" + export); } } - - System.out.println("Invoking UModel: " + CollectionsKt.joinToString(parts, " ", "", "", -1, "...", null)); - ProcessBuilder pb = new ProcessBuilder(parts); - pb.redirectOutput(ProcessBuilder.Redirect.INHERIT); - pb.redirectError(ProcessBuilder.Redirect.INHERIT); - pb.start().waitFor(); - } - } - - private static void h(List command, String s) { - Collections.addAll(command, s.split(" ")); - } - - private static GameFile findBuildingActor(DefaultFileProvider provider, String actorName) { - if (actorName.endsWith("_C")) { - actorName = actorName.substring(0, actorName.length() - 2); - } - - String check = '/' + actorName.toLowerCase() + ".uasset"; - List> filtered = provider.getFiles().entrySet().stream().filter(entry -> entry.getKey().endsWith(check)).collect(Collectors.toList()); - - if (filtered.size() == 1) { - return filtered.get(0).getValue(); } - filtered = filtered.stream().filter(entry -> entry.getKey().contains("Actor".toLowerCase())).collect(Collectors.toList()); // keys are lower cased + ProcessBuilder pb = new ProcessBuilder(Arrays.asList("umodel", "@umodel_cmd.txt")); + pb.redirectOutput(ProcessBuilder.Redirect.INHERIT); + pb.redirectError(ProcessBuilder.Redirect.INHERIT); + LOGGER.info("Starting UModel process"); + int exitCode = pb.start().waitFor(); - if (!filtered.isEmpty()) { - GameFile out = filtered.get(0).getValue(); - - if (filtered.size() > 1) { - warn(actorName + ": Found " + filtered.size() + " actors. Picked the first one: " + out); - } - - return out; + if (exitCode == 0) { + toExport.clear(); + } else { + LOGGER.warn("UModel returned exit code " + exitCode + ", some assets might weren't exported successfully"); } - - warn("Actor not found: " + actorName); - return null; } private static T getProp(List properties, String name, Class clazz) { for (FPropertyTag prop : properties) { if (name.equals(prop.getName().getText())) { - Object tagTypeValue = prop.getTagTypeValue(); - return clazz.isInstance(tagTypeValue) ? (T) tagTypeValue : null; + return (T) prop.getTagTypeValue(clazz, null); } } @@ -474,12 +363,27 @@ private static T getProp(List properties, String name, Class T getProp(UExport export, String name, Class clazz) { - if (export instanceof UObject) { - return getProp(((UObject) export).getProperties(), name, clazz); - } else { - System.out.println("Skipping " + export.getExportType()); - return null; + return getProp(export.getBaseObject().getProperties(), name, clazz); + } + + public static T[] getProps(List properties, String name, Class clazz) { + List collected = new ArrayList<>(); + int maxIndex = -1; + + for (FPropertyTag prop : properties) { + if (prop.getName().getText().equals(name)) { + collected.add(prop); + maxIndex = Math.max(maxIndex, prop.getArrayIndex()); + } + } + + T[] out = (T[]) Array.newInstance(clazz, maxIndex + 1); + + for (FPropertyTag prop : collected) { + out[prop.getArrayIndex()] = (T) prop.getTagTypeValue(clazz, null); } + + return out; } private static String guidAsString(FGuid guid) { @@ -487,6 +391,7 @@ private static String guidAsString(FGuid guid) { } private static JsonArray vector(FVector vector) { + if (vector == null) return null; JsonArray array = new JsonArray(3); array.add(vector.getX()); array.add(vector.getY()); @@ -495,6 +400,7 @@ private static JsonArray vector(FVector vector) { } private static JsonArray rotator(FRotator rotator) { + if (rotator == null) return null; JsonArray array = new JsonArray(3); array.add(rotator.getPitch()); array.add(rotator.getYaw()); @@ -502,8 +408,133 @@ private static JsonArray rotator(FRotator rotator) { return array; } - public static void warn(String message) { - System.out.println("WARNING: " + message); - warnings.add(message); + private static boolean isEmpty(String s) { + return s == null || s.isEmpty(); + } + + private static class Mat { + public FPackageIndex name; + public Map textureMap = new HashMap<>(); + + public Mat(FPackageIndex name) { + this.name = name; + } + + public void populateTextures() { + populateTextures(name); + } + + public void populateTextures(FPackageIndex pkgIndex) { + if (pkgIndex.getIndex() == 0) return; + Package matPkg = loadIfNot(pkgIndex.getOuterImportObject().getObjectName().getText()); + if (matPkg == null) return; + UExport matFirstExp = matPkg.getExports().get(0); + FStructFallback[] textureParameterValues = getProp(matFirstExp, "TextureParameterValues", FStructFallback[].class); + + if (textureParameterValues != null) { + for (FStructFallback textureParameterValue : textureParameterValues) { + FName name = getProp(getProp(textureParameterValue.getProperties(), "ParameterInfo", FStructFallback.class).getProperties(), "Name", FName.class); + + if (name != null) { + FPackageIndex parameterValue = getProp(textureParameterValue.getProperties(), "ParameterValue", FPackageIndex.class); + + if (parameterValue != null && parameterValue.getIndex() != 0 && !textureMap.containsKey(name.getText())) { + textureMap.put(name.getText(), parameterValue.getOuterImportObject().getObjectName().getText()); + } + } + } + } + + FPackageIndex parent = getProp(matFirstExp, "Parent", FPackageIndex.class); + + if (parent != null && parent.getIndex() != 0) { + populateTextures(parent); + } + } + + public void addToObj(JsonObject obj) { + String[][] textures = { // d n s e a + { + textureMap.getOrDefault("Trunk_BaseColor", textureMap.get("Diffuse")), + textureMap.getOrDefault("Trunk_Normal", textureMap.get("Normals")), + textureMap.getOrDefault("Trunk_Specular", textureMap.get("SpecularMasks")), + textureMap.get("EmissiveTexture"), + textureMap.get("MaskTexture") + }, + { + textureMap.get("Diffuse_Texture_3"), + textureMap.get("Normals_Texture_3"), + textureMap.get("SpecularMasks_3"), + textureMap.get("EmissiveTexture_3"), + textureMap.get("MaskTexture_3") + }, + { + textureMap.get("Diffuse_Texture_4"), + textureMap.get("Normals_Texture_4"), + textureMap.get("SpecularMasks_4"), + textureMap.get("EmissiveTexture_4"), + textureMap.get("MaskTexture_4") + }, + { + textureMap.get("Diffuse_Texture_2"), + textureMap.get("Normals_Texture_2"), + textureMap.get("SpecularMasks_2"), + textureMap.get("EmissiveTexture_2"), + textureMap.get("MaskTexture_2") + } + }; + + for (int i = 0; i < textures.length; i++) { + boolean empty = true; + + for (int j = 0; j < textures[i].length; j++) { + empty &= textures[i][j] == null; + + if (textures[i][j] != null) { + toExport.add(textures[i][j]); + } + } + + if (empty) { + textures[i] = new String[0]; + } + } + + obj.add(name.getIndex() != 0 ? name.getOuterImportObject().getObjectName().getText() : Integer.toHexString(hashCode()), GSON.toJsonTree(textures)); + } + } + + private static class BuildingTextureData { + public FPackageIndex Diffuse; + public FPackageIndex Normal; + public FPackageIndex Specular; + public FPackageIndex Emissive; + public FPackageIndex Mask; + public FPackageIndex OverrideMaterial; + // public EFortResourceType ResourceType; + // public Float ResourceCost; + } + + private static class Config { + public String PaksDirectory = "C:\\Program Files\\Epic Games\\Fortnite\\FortniteGame\\Content\\Paks"; + public Ue4Version UEVersion = Ue4Version.GAME_UE4_LATEST; + public EncryptionKey[] EncryptionKeys = {}; + public boolean bReadMaterials = false; + public boolean bRunUModel = true; + public String UModelAdditionalArgs = ""; + public boolean bDumpAssets = false; + public String ExportPackage; + + private static class EncryptionKey { + public FGuid Guid = FGuid.Companion.getMainGuid(); + public String FileName; + public byte[] Key = {}; + } + } + + private static class MainException extends Exception { + public MainException(String message) { + super(message); + } } } diff --git a/src/main/resources/config.json b/src/main/resources/config.json new file mode 100644 index 0000000..eb1fcd9 --- /dev/null +++ b/src/main/resources/config.json @@ -0,0 +1,20 @@ +{ + "_Documentation": "https://github.com/Amrsatrio/BlenderUmap/blob/master/README.md", + "PaksDirectory": "C:\\Program Files\\Epic Games\\Fortnite\\FortniteGame\\Content\\Paks", + "UEVersion": "GAME_UE4_25", + "EncryptionKeys": [ + { + "Guid": "00000000000000000000000000000000", + "Key": "0x2ba4708c17abf803ab821c0c89fa1cf3dfe7aa91d526d2e11526bc1ac4e34d13" + }, + { + "FileName": "pakchunk1006-WindowsClient.pak", + "Key": "0x7dec1e6b26ce85b7680555f97064ceaa5c788dfdc674f98a6a711f726dedb943" + } + ], + "bReadMaterials": true, + "bRunUModel": true, + "UModelAdditionalArgs": "", + "bDumpAssets": false, + "ExportPackage": "/Game/Content/Athena/Apollo/Maps/Buildings/3x3/Apollo_3x3_BoatRental.umap" +} \ No newline at end of file diff --git a/src/main/resources/config.properties b/src/main/resources/config.properties deleted file mode 100644 index 8987d5a..0000000 --- a/src/main/resources/config.properties +++ /dev/null @@ -1,21 +0,0 @@ -# Path to the Paks folder. -gamePath=C:\\Program Files\\Epic Games\\Fortnite\\FortniteGame\\Content\\Paks - -# 0 for 4.22, 1 for 4.23, 2 for 4.24, 3 for 4.25 -gameVersion=2 - -# Game AES key in hex (0x is optional). -# For Fortnite, you can get it from https://fnbot.shop/api/aes -aes=3f3717f4f206ff21bda8d3bf62b323556d1d2e7d9b0f7abd572d3cfe5b569fac - -# Materials are experimental! No support for unique textures for basic building blocks yet. -readMaterials=false - -# Export meshes, materials, and textures with UModel automatically. -runUmodel=true - -# With GLTF you will have faster import times but no support (yet) for nested umaps. -useGltf=false - -# The .umap to export. -package=FortniteGame/Content/Athena/Apollo/Maps/Buildings/3x3/Apollo_3x3_BoatRental.umap \ No newline at end of file diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties deleted file mode 100644 index fab0d4b..0000000 --- a/src/main/resources/log4j.properties +++ /dev/null @@ -1,18 +0,0 @@ -log4j.rootLogger=ALL, console -#log4j.logger.JFortniteParse=ALL, error -#log4j.logger.Oodle=ALL, error -#log4j.logger.PakFile=ALL, console - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.out -log4j.appender.console.immediateFlush=true -log4j.appender.console.encoding=UTF-8 -log4j.appender.console.layout=org.apache.log4j.EnhancedPatternLayout -log4j.appender.console.layout.conversionPattern=[%d{yyyy.MM.dd-HH.mm.ss:SSS}{GMT}]%c: %p: %m%n - -log4j.appender.error=org.apache.log4j.ConsoleAppender -log4j.appender.error.target=System.err -log4j.appender.error.immediateFlush=true -log4j.appender.error.encoding=UTF-8 -log4j.appender.error.layout=org.apache.log4j.EnhancedPatternLayout -log4j.appender.error.layout.conversionPattern=[%d{yyyy.MM.dd-HH.mm.ss:SSS}{GMT}]%c: %p: %m%n \ No newline at end of file diff --git a/src/main/resources/log4j2.xml b/src/main/resources/log4j2.xml new file mode 100644 index 0000000..a7aa74a --- /dev/null +++ b/src/main/resources/log4j2.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/umap.py b/umap.py index 9cb7701..8dee29c 100644 --- a/umap.py +++ b/umap.py @@ -1,284 +1,428 @@ """ +BlenderUmap v0.2.0 (C) 2020 amrsatrio. All rights reserved. """ +import bpy +import json +import os +import time +from math import * # Change the value to the working directory of the Java program with the bat. I'm leaving mine here. -# Must end with the path separator (\\ on Windows, / on *nix) or it will fail. -data_dir = "C:\\Users\\satri\\Documents\\AppProjects\\BlenderUmap\\run\\" +data_dir = r"C:\Users\satri\Documents\AppProjects\BlenderUmap\run" -# Wondering what makes stuff so long? Or if something weren't right? Flip this to True. +reuse_meshes = True +use_cube_as_fallback = True +use_gltf = False verbose = True -use_gltf = False # ---------- END INPUTS, DO NOT MODIFY ANYTHING BELOW UNLESS YOU NEED TO ---------- -import bpy -import json -import os -import time -from math import * -from mathutils import Vector - -all_meshes = {} - - -def import_umap(comps, attach_parent=None): - for comp_i, comp in enumerate(comps): - guid = comp[0] - export_type = comp[1] - mesh = comp[2] - mat = comp[3] - textures = comp[4] - comp_location = comp[5] or [0, 0, 0] - comp_rotation = comp[6] or [0, 0, 0] - comp_scale = comp[7] or [1, 1, 1] - child_comps = comp[8] - - name = export_type + ("" if guid is None else ("_" + guid[:4])) - print("Actor " + str(comp_i + 1) + " of " + str(len(comps)) + ": " + name) - - if child_comps is not None and len(child_comps) > 0: - bpy.ops.mesh.primitive_plane_add(size=100) - bpy.context.selected_objects[0].data = bpy.data.meshes["__empty"] - elif mesh is None: - print("WARNING: No mesh, defaulting to cube") - cube() - else: - if mesh.startswith("/"): mesh = mesh[1:] - - key = mesh + ":" + str(mat) - existing_mesh = all_meshes.get(key) - - if existing_mesh is not None: - if verbose: print("Using existing mesh") - bpy.ops.mesh.primitive_plane_add(size=100) - bpy.context.selected_objects[0].data = bpy.data.meshes[existing_mesh] - else: - mesh_import_result = None - - if use_gltf: - if os.path.exists(os.path.join(data_dir, mesh + ".gltf")): - mesh += ".gltf" - final_dir = os.path.join(data_dir, mesh) - if verbose: print("Mesh:", final_dir) - if os.path.exists(final_dir): - mesh_import_result = bpy.ops.import_scene.gltf(filepath=final_dir) - else: - print("WARNING: Mesh not found, defaulting to cube") - cube() - else: - if os.path.exists(os.path.join(data_dir, mesh + ".psk")): - mesh += ".psk" - elif os.path.exists(os.path.join(data_dir, mesh + ".pskx")): - mesh += ".pskx" - final_dir = os.path.join(data_dir, mesh) - if verbose: print("Mesh:", final_dir) - if os.path.exists(final_dir): - mesh_import_result = bpy.ops.import_scene.psk(bReorientBones=True, directory=data_dir, files=[{"name": mesh}]) - else: - print("WARNING: Mesh not found, defaulting to cube") - cube() - - if mesh_import_result == {"FINISHED"}: - if verbose: print("Mesh imported") - bpy.ops.object.shade_smooth() - else: - print("WARNING: Failure importing mesh, defaulting to cube") - cube() - - if mat is not None: - import_and_apply_material(os.path.join(data_dir, mat[1:] + ".mat"), textures, True) - - all_meshes[key] = bpy.context.selected_objects[0].data.name - - created = bpy.context.selected_objects[0] - created.name = name - - if verbose: print("Applying transformation properties") - created.location = [comp_location[0] * 0.01, - comp_location[1] * 0.01 * -1, - comp_location[2] * 0.01] - created.rotation_mode = "XYZ" - created.rotation_euler = [radians(comp_rotation[2] + (90 if use_gltf else 0)), - radians(comp_rotation[0] * -1), - radians(comp_rotation[1] * -1)] - created.scale = comp_scale - - if attach_parent is not None: - if verbose: print("Attaching to parent", attach_parent.name) - created.parent = attach_parent - - if child_comps is not None: - if use_gltf: - print("Nested worlds aren't supported yet with GLTF") - else: - for child_comp in child_comps: - import_umap(child_comp, created) - - print("") - - -# credit Lucas7yoshi -def import_and_apply_material(dot_mat_path, textures, apply_to_selected): - # make the material - outputMaterialName = os.path.basename(dot_mat_path) - - if outputMaterialName in bpy.data.materials: - if apply_to_selected: - bpy.context.active_object.data.materials[0] = bpy.data.materials[outputMaterialName] - return - - mat = bpy.data.materials.new(name=outputMaterialName) - mat.use_nodes = True - materialOutput = mat.node_tree.nodes.get('Material Output') - principleBSDF = mat.node_tree.nodes.get('Principled BSDF') - mat.node_tree.links.remove(principleBSDF.outputs[0].links[0]) # remove inital link - - addShader = mat.node_tree.nodes.new("ShaderNodeAddShader") - mat.node_tree.links.new(principleBSDF.outputs[0], addShader.inputs[0]) - mat.node_tree.links.new(addShader.outputs[0], materialOutput.inputs[0]) - addShader.location = Vector((400, -250)) - materialOutput.location = Vector((650, -250)) - - if textures[0] is not None: # diffuse - diffuseImgPath = os.path.join(data_dir, textures[0][1:] + ".tga") - if verbose: print(diffuseImgPath) - - if os.path.exists(diffuseImgPath): - diffuseTex = mat.node_tree.nodes.new("ShaderNodeTexImage") - diffuseImg = bpy.data.images.load(filepath=diffuseImgPath) - diffuseTex.image = diffuseImg - diffuseTex.location = Vector((-400, 450)) - # diffuseTex.hide = True - # connect diffuseTexture to principle - mat.node_tree.links.new(diffuseTex.outputs[0], principleBSDF.inputs[0]) - else: - print("WARNING: " + diffuseImgPath + " not found") - - if textures[1] is not None: # normal - normalImgPath = os.path.join(data_dir, textures[1][1:] + ".tga") - if verbose: print(normalImgPath) - - if os.path.exists(normalImgPath): - normY = -125 - - normTex = mat.node_tree.nodes.new("ShaderNodeTexImage") - normCurve = mat.node_tree.nodes.new("ShaderNodeRGBCurve") - normMap = mat.node_tree.nodes.new("ShaderNodeNormalMap") - normImage = bpy.data.images.load(filepath=normalImgPath) - # location crap - normTex.location = Vector((-800, normY)) - normCurve.location = Vector((-500, normY)) - normMap.location = Vector((-200, normY)) - - normImage.colorspace_settings.name = 'Non-Color' - normTex.image = normImage - # normTex.hide = True - # setup rgb curve - normCurve.mapping.curves[1].points[0].location = (0, 1) - normCurve.mapping.curves[1].points[1].location = (1, 0) - # connect everything - mat.node_tree.links.new(normTex.outputs[0], normCurve.inputs[1]) - mat.node_tree.links.new(normCurve.outputs[0], normMap.inputs[1]) - mat.node_tree.links.new(normMap.outputs[0], principleBSDF.inputs['Normal']) - else: - print("WARNING: " + normalImgPath + " not found") - - if textures[2] is not None: # specular - specularImgPath = os.path.join(data_dir, textures[2][1:] + ".tga") - if verbose: print(specularImgPath) - - if os.path.exists(specularImgPath): - specY = 140 - - specTex = mat.node_tree.nodes.new("ShaderNodeTexImage") - - specSeperateRGB = mat.node_tree.nodes.new("ShaderNodeSeparateRGB") - specSeperateRGB.location = Vector((-250, specY)) - # specSeperateRGB.hide = True - - specImage = bpy.data.images.load(filepath=specularImgPath) - specImage.colorspace_settings.name = 'Non-Color' - - specTex.image = specImage - specTex.location = Vector((-600, specY)) - # specTex.hide = True - # connect spec texture to rgb split - mat.node_tree.links.new(specTex.outputs[0], specSeperateRGB.inputs[0]) - # connect rgb splits to principle - mat.node_tree.links.new(specSeperateRGB.outputs[0], principleBSDF.inputs['Specular']) - mat.node_tree.links.new(specSeperateRGB.outputs[1], principleBSDF.inputs['Metallic']) - mat.node_tree.links.new(specSeperateRGB.outputs[2], principleBSDF.inputs['Roughness']) - else: - print("WARNING: " + specularImgPath + " not found") - - if textures[3] is not None: # emission - emissiveImgPath = os.path.join(data_dir, textures[3][1:] + ".tga") - if verbose: print(emissiveImgPath) - - if os.path.exists(emissiveImgPath): - emiTex = mat.node_tree.nodes.new("ShaderNodeTexImage") - emiShader = mat.node_tree.nodes.new("ShaderNodeEmission") - emiImage = bpy.data.images.load(filepath=emissiveImgPath) - emiTex.image = emiImage - # emission - location - emiTex.location = Vector((-200, -425)) - emiShader.location = Vector((100, -425)) - # connecting - mat.node_tree.links.new(emiTex.outputs[0], emiShader.inputs[0]) - mat.node_tree.links.new(emiShader.outputs[0], addShader.inputs[1]) - else: - print("WARNING: " + emissiveImgPath + " not found") - - if apply_to_selected: - bpy.context.active_object.data.materials[0] = mat - - print("Material imported") - - -def cube(): - bpy.ops.mesh.primitive_cube_add(size=100) - bpy.context.selected_objects[0].data = bpy.data.meshes["__fallback"] +def import_umap(comps: list, attach_parent: bpy.types.Object = None) -> None: + for comp_i, comp in enumerate(comps): + guid = comp[0] + export_type = comp[1] + mesh_path = comp[2] + mats = comp[3] + texture_data = comp[4] + location = comp[5] or [0, 0, 0] + rotation = comp[6] or [0, 0, 0] + scale = comp[7] or [1, 1, 1] + child_comps = comp[8] + + name = export_type + (("_" + guid[:8]) if guid else "") + print("\nActor %d of %d: %s" % (comp_i + 1, len(comps), name)) + + if child_comps and len(child_comps) > 0: + bpy.ops.mesh.primitive_plane_add(size=1) + bpy.context.active_object.data = bpy.data.meshes["__empty"] + elif not mesh_path: + print("WARNING: No mesh, defaulting to fallback mesh") + fallback() + else: + if mesh_path.startswith("/"): + mesh_path = mesh_path[1:] + + key = os.path.basename(mesh_path) + td_suffix = "" + + if mats and len(mats) > 0: + key += "_{:08x}".format(abs(string_hash_code(";".join(mats.keys())))) + if texture_data and len(texture_data) > 0: + td_suffix = "_{:08x}".format(abs(string_hash_code(";".join([it[0] if it else "" for it in texture_data])))) + key += td_suffix + + existing_mesh = bpy.data.meshes.get(key) if reuse_meshes else None + + if existing_mesh: + if verbose: + print("Using existing mesh") + bpy.ops.mesh.primitive_plane_add(size=1) + bpy.context.active_object.data = existing_mesh + else: + mesh_import_result = None + + if use_gltf: + final_dir = os.path.join(data_dir, mesh_path + ".gltf") + if verbose: + print("Mesh:", final_dir) + if os.path.exists(final_dir): + mesh_import_result = bpy.ops.import_scene.gltf(filepath=final_dir) + else: + print("WARNING: Mesh not found, defaulting to fallback mesh") + fallback() + else: + final_dir = os.path.join(data_dir, mesh_path) + mesh_path_ = mesh_path + if os.path.exists(final_dir + ".psk"): + final_dir += ".psk" + mesh_path_ += ".psk" + elif os.path.exists(final_dir + ".pskx"): + final_dir += ".pskx" + mesh_path_ += ".pskx" + if verbose: + print("Mesh:", final_dir) + if os.path.exists(final_dir): + mesh_import_result = bpy.ops.import_scene.psk(bReorientBones=True, directory=data_dir, files=[{"name": mesh_path_}]) + else: + print("WARNING: Mesh not found, defaulting to fallback mesh") + fallback() + + if mesh_import_result == {"FINISHED"}: + if verbose: + print("Mesh imported") + bpy.context.active_object.data.name = key + bpy.ops.object.shade_smooth() + + for m_idx, (m_path, m_textures) in enumerate(mats.items()): + import_material(m_idx, m_path, td_suffix, m_textures, texture_data) + else: + print("WARNING: Failure importing mesh, defaulting to fallback mesh") + fallback() + + created = bpy.context.active_object + created.name = name + created.location = [location[0] * 0.01, location[1] * 0.01 * -1, location[2] * 0.01] + created.rotation_mode = "XYZ" + created.rotation_euler = [radians(rotation[2] + (90 if use_gltf else 0)), radians(rotation[0] * -1), radians(rotation[1] * -1)] + created.scale = scale + + if attach_parent: + print("Attaching to parent", attach_parent.name) + created.parent = attach_parent + + if child_comps: + if use_gltf: + print("Nested worlds aren't supported yet with GLTF") + else: + for child_comp in child_comps: + import_umap(child_comp, created) + + +def import_material(m_idx: int, path: str, suffix: str, base_textures: list, tex_data: dict) -> bpy.types.Material: + # .mat is required to prevent conflicting with the empty ones imported by the PSK plugin + m_name = os.path.basename(path + ".mat" + suffix) + m = bpy.data.materials.get(m_name) + + if not m: + for td_idx, td_entry in enumerate(tex_data): + if not td_entry: + continue + index = {1: 3, 2: 2}.get(td_idx, 0) + td_textures = td_entry[1] + + for i, tex_entry in enumerate(base_textures[index]): + if i < len(td_textures) and td_textures[i]: + base_textures[index][i] = td_textures[i] + + m = bpy.data.materials.new(name=m_name) + m.use_nodes = True + tree = m.node_tree + + for node in tree.nodes: + tree.nodes.remove(node) + + m.blend_method = "OPAQUE" + + def group(sub_tex_idx, location): + sh = tree.nodes.new("ShaderNodeGroup") + sh.location = location + sh.node_tree = tex_shader + sub_textures = base_textures[sub_tex_idx] if sub_tex_idx < len(base_textures) and base_textures[sub_tex_idx] and len(base_textures[sub_tex_idx]) > 0 else base_textures[0] + + for tex_index, sub_tex in enumerate(sub_textures): + if sub_tex: + img = get_or_load_img(sub_tex) if not sub_tex.endswith("/T_EmissiveColorChart") else None + + if img: + d_tex = tree.nodes.new("ShaderNodeTexImage") + d_tex.hide = True + d_tex.location = [location[0] - 320, location[1] - tex_index * 40] + + if tex_index != 0: # other than diffuse + img.colorspace_settings.name = "Non-Color" + + d_tex.image = img + tree.links.new(d_tex.outputs[0], sh.inputs[tex_index]) + + if tex_index is 4: # change mat blend method if there's an alpha mask texture + m.blend_method = "HASHED" + + return sh + + mat_out = tree.nodes.new("ShaderNodeOutputMaterial") + mat_out.location = [300, 300] + + if bpy.context.active_object.data.uv_layers.get("EXTRAUVS0"): + uvm_ng = tree.nodes.new("ShaderNodeGroup") + uvm_ng.location = [100, 300] + uvm_ng.node_tree = bpy.data.node_groups["UV Shader Mix"] + uv_map = tree.nodes.new("ShaderNodeUVMap") + uv_map.location = [-100, 700] + uv_map.uv_map = "EXTRAUVS0" + tree.links.new(uv_map.outputs[0], uvm_ng.inputs[0]) + tree.links.new(group(0, [-100, 550]).outputs[0], uvm_ng.inputs[1]) + tree.links.new(group(1, [-100, 300]).outputs[0], uvm_ng.inputs[2]) + tree.links.new(group(2, [-100, 50]).outputs[0], uvm_ng.inputs[3]) + tree.links.new(group(3, [-100, -200]).outputs[0], uvm_ng.inputs[4]) + tree.links.new(uvm_ng.outputs[0], mat_out.inputs[0]) + else: + tree.links.new(group(0, [100, 300]).outputs[0], mat_out.inputs[0]) + + print("Material imported") + + if m_idx < len(bpy.context.active_object.data.materials): + bpy.context.active_object.data.materials[m_idx] = m + + +def fallback() -> None: + bpy.ops.mesh.primitive_plane_add(size=1) + bpy.context.active_object.data = bpy.data.meshes["__fallback" if use_cube_as_fallback else "__empty"] + + +def get_or_load_img(img_path: str) -> bpy.types.Image: + img_path = os.path.join(data_dir, img_path[1:] + ".tga") + existing = bpy.data.images.get(os.path.basename(img_path)) + + if existing: + return existing + elif os.path.exists(img_path): + if verbose: + print(img_path) + return bpy.data.images.load(filepath=img_path) + else: + print("WARNING: " + img_path + " not found") + return None + + +def cleanup() -> None: + for block in bpy.data.meshes: + if block.users == 0: + bpy.data.meshes.remove(block) + + for block in bpy.data.materials: + if block.users == 0: + bpy.data.materials.remove(block) + + for block in bpy.data.textures: + if block.users == 0: + bpy.data.textures.remove(block) + + for block in bpy.data.images: + if block.users == 0: + bpy.data.images.remove(block) + + +def string_hash_code(s: str) -> int: + h = 0 + for c in s: + h = (31 * h + ord(c)) & 0xFFFFFFFF + return ((h + 0x80000000) & 0xFFFFFFFF) - 0x80000000 start = int(time.time() * 1000.0) +# create UV shader mix node group, credits to @FriesFX +uvm = bpy.data.node_groups.get("UV Shader Mix") + +if not uvm: + uvm = bpy.data.node_groups.new(name="UV Shader Mix", type="ShaderNodeTree") + # for node in tex_shader.nodes: tex_shader.nodes.remove(node) + + mix_1 = uvm.nodes.new("ShaderNodeMixShader") + mix_2 = uvm.nodes.new("ShaderNodeMixShader") + mix_3 = uvm.nodes.new("ShaderNodeMixShader") + mix_4 = uvm.nodes.new("ShaderNodeMixShader") + mix_1.location = [-500, 300] + mix_2.location = [-300, 300] + mix_3.location = [-100, 300] + mix_4.location = [100, 300] + uvm.links.new(mix_1.outputs[0], mix_2.inputs[1]) + uvm.links.new(mix_2.outputs[0], mix_3.inputs[1]) + uvm.links.new(mix_3.outputs[0], mix_4.inputs[1]) + + x = -1700 + y = 700 + sep = uvm.nodes.new("ShaderNodeSeparateRGB") + sep.location = [x + 200, y - 200] + + m1_1 = uvm.nodes.new("ShaderNodeMath") + m1_2 = uvm.nodes.new("ShaderNodeMath") + m1_3 = uvm.nodes.new("ShaderNodeMath") + m1_1.location = [x + 400, y] + m1_2.location = [x + 400, y - 200] + m1_3.location = [x + 400, y - 400] + m1_1.operation = "LESS_THAN" + m1_2.operation = "LESS_THAN" + m1_3.operation = "LESS_THAN" + m1_1.inputs[1].default_value = 1.420 + m1_2.inputs[1].default_value = 1.720 + m1_3.inputs[1].default_value = 3.000 + uvm.links.new(sep.outputs[0], m1_1.inputs[0]) + uvm.links.new(sep.outputs[0], m1_2.inputs[0]) + uvm.links.new(sep.outputs[0], m1_3.inputs[0]) + + add_1_2 = uvm.nodes.new("ShaderNodeMath") + add_1_2.location = [x + 600, y - 300] + add_1_2.operation = "ADD" + uvm.links.new(m1_1.outputs[0], add_1_2.inputs[0]) + uvm.links.new(m1_2.outputs[0], add_1_2.inputs[1]) + + m2_1 = uvm.nodes.new("ShaderNodeMath") + m2_2 = uvm.nodes.new("ShaderNodeMath") + m2_3 = uvm.nodes.new("ShaderNodeMath") + m2_4 = uvm.nodes.new("ShaderNodeMath") + m2_1.location = [x + 800, y] + m2_2.location = [x + 800, y - 200] + m2_3.location = [x + 800, y - 400] + m2_4.location = [x + 800, y - 600] + m2_1.operation = "ADD" + m2_2.operation = "SUBTRACT" + m2_3.operation = "SUBTRACT" + m2_4.operation = "LESS_THAN" + m2_1.use_clamp = True + m2_2.use_clamp = True + m2_3.use_clamp = True + m2_4.use_clamp = True + m2_1.inputs[1].default_value = 0 + m2_4.inputs[1].default_value = 0.700 + uvm.links.new(m1_1.outputs[0], m2_1.inputs[0]) + uvm.links.new(m1_2.outputs[0], m2_2.inputs[0]) + uvm.links.new(m1_1.outputs[0], m2_2.inputs[1]) + uvm.links.new(m1_3.outputs[0], m2_3.inputs[0]) + uvm.links.new(add_1_2.outputs[0], m2_3.inputs[1]) + uvm.links.new(m1_3.outputs[0], m2_4.inputs[0]) + + uvm.links.new(m2_1.outputs[0], mix_1.inputs[0]) + uvm.links.new(m2_2.outputs[0], mix_4.inputs[0]) + uvm.links.new(m2_3.outputs[0], mix_2.inputs[0]) + uvm.links.new(m2_4.outputs[0], mix_3.inputs[0]) + + # I/O + g_in = uvm.nodes.new("NodeGroupInput") + g_out = uvm.nodes.new("NodeGroupOutput") + g_in.location = [-1700, 220] + g_out.location = [300, 300] + uvm.links.new(g_in.outputs[0], sep.inputs[0]) + uvm.links.new(g_in.outputs[1], mix_1.inputs[2]) + uvm.links.new(g_in.outputs[2], mix_2.inputs[2]) + uvm.links.new(g_in.outputs[3], mix_3.inputs[2]) + uvm.links.new(g_in.outputs[4], mix_4.inputs[2]) + uvm.links.new(mix_4.outputs[0], g_out.inputs[0]) + +# create texture shader node group, credit @Lucas7yoshi +tex_shader = bpy.data.node_groups.get("Texture Shader") + +if not tex_shader: + tex_shader = bpy.data.node_groups.new(name="Texture Shader", type="ShaderNodeTree") + # for node in tex_shader.nodes: tex_shader.nodes.remove(node) + + g_in = tex_shader.nodes.new("NodeGroupInput") + g_out = tex_shader.nodes.new("NodeGroupOutput") + g_in.location = [-700, 0] + g_out.location = [350, 300] + + principled_bsdf = tex_shader.nodes.new(type="ShaderNodeBsdfPrincipled") + principled_bsdf.location = [50, 300] + tex_shader.links.new(principled_bsdf.outputs[0], g_out.inputs[0]) + + # diffuse + tex_shader.links.new(g_in.outputs[0], principled_bsdf.inputs["Base Color"]) + + # normal + norm_y = -1 + norm_curve = tex_shader.nodes.new("ShaderNodeRGBCurve") + norm_map = tex_shader.nodes.new("ShaderNodeNormalMap") + norm_curve.location = [-500, norm_y] + norm_map.location = [-200, norm_y] + norm_curve.mapping.curves[1].points[0].location = [0, 1] + norm_curve.mapping.curves[1].points[1].location = [1, 0] + tex_shader.links.new(g_in.outputs[1], norm_curve.inputs[1]) + tex_shader.links.new(norm_curve.outputs[0], norm_map.inputs[1]) + tex_shader.links.new(norm_map.outputs[0], principled_bsdf.inputs["Normal"]) + tex_shader.inputs[1].default_value = [0.5, 0.5, 1, 1] + + # specular + spec_y = 140 + spec_separate_rgb = tex_shader.nodes.new("ShaderNodeSeparateRGB") + spec_separate_rgb.location = [-200, spec_y] + tex_shader.links.new(g_in.outputs[2], spec_separate_rgb.inputs[0]) + tex_shader.links.new(spec_separate_rgb.outputs[0], principled_bsdf.inputs["Specular"]) + tex_shader.links.new(spec_separate_rgb.outputs[1], principled_bsdf.inputs["Metallic"]) + tex_shader.links.new(spec_separate_rgb.outputs[2], principled_bsdf.inputs["Roughness"]) + tex_shader.inputs[2].default_value = [0.5, 0, 0.5, 1] + + # emission + tex_shader.links.new(g_in.outputs[3], principled_bsdf.inputs["Emission"]) + tex_shader.inputs[3].default_value = [0, 0, 0, 1] + + # alpha + alpha_separate_rgb = tex_shader.nodes.new("ShaderNodeSeparateRGB") + alpha_separate_rgb.location = [-200, -180] + tex_shader.links.new(g_in.outputs[4], alpha_separate_rgb.inputs[0]) + tex_shader.links.new(alpha_separate_rgb.outputs[0], principled_bsdf.inputs["Alpha"]) + tex_shader.inputs[4].default_value = [1, 0, 0, 1] + + tex_shader.inputs[0].name = "Diffuse" + tex_shader.inputs[1].name = "Normal" + tex_shader.inputs[2].name = "Specular" + tex_shader.inputs[3].name = "Emission" + tex_shader.inputs[4].name = "Alpha" + # clear all objects except camera for obj in bpy.context.scene.objects: - if obj.type == "CAMERA": continue - obj.select_set(True) + if obj.type != "CAMERA": + obj.select_set(True) bpy.ops.object.delete() - -# clear materials -for block in [block for block in bpy.data.materials if block.users == 0]: - bpy.data.materials.remove(block) - -# clear meshes -for block in [block for block in bpy.data.meshes if block.users == 0]: - bpy.data.meshes.remove(block) +cleanup() # setup helper objects # 1. fallback cube -bpy.ops.mesh.primitive_cube_add(size=100) -fallback_cube = bpy.context.selected_objects[0] +bpy.ops.mesh.primitive_cube_add(size=2) +fallback_cube = bpy.context.active_object fallback_cube.name = "__fallback" fallback_cube.data.name = "__fallback" # 2. empty mesh bpy.ops.mesh.primitive_cube_add(size=1) -empty_mesh = bpy.context.selected_objects[0] +empty_mesh = bpy.context.active_object empty_mesh.name = "__empty" empty_mesh.data.name = "__empty" +empty_mesh.data.clear_geometry() # do it! +if not data_dir.endswith(os.sep): + data_dir += os.sep + with open(os.path.join(data_dir, "processed.json")) as file: - import_umap(json.loads(file.read())) + import_umap(json.loads(file.read())) # delete helper objects bpy.ops.object.select_all(action="DESELECT") fallback_cube.select_set(True) empty_mesh.select_set(True) bpy.ops.object.delete() +cleanup() print("All done in " + str(int((time.time() * 1000.0) - start)) + "ms")