Add in some caching of the read field descriptions for efficiency
This commit is contained in:
parent
03bc9cf00a
commit
1a9cb9e3c9
1 changed files with 31 additions and 18 deletions
|
@ -39,6 +39,7 @@ import com.google.common.collect.ImmutableBiMap;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
|
import com.google.common.collect.ListMultimap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
@ -148,8 +149,18 @@ public class FMLDeobfuscatingRemapper extends Remapper {
|
||||||
rawFieldMaps.get(cl).put(oldName + ":null", newName);
|
rawFieldMaps.get(cl).put(oldName + ":null", newName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
/*
|
||||||
|
* Cache the field descriptions for classes so we don't repeatedly reload the same data again and again
|
||||||
|
*/
|
||||||
|
private Map<String,Map<String,String>> fieldDescriptions = Maps.newHashMap();
|
||||||
|
|
||||||
private String getFieldType(String owner, String name)
|
private String getFieldType(String owner, String name)
|
||||||
|
{
|
||||||
|
if (fieldDescriptions.containsKey(owner))
|
||||||
|
{
|
||||||
|
return fieldDescriptions.get(owner).get(name);
|
||||||
|
}
|
||||||
|
synchronized (fieldDescriptions)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -161,18 +172,20 @@ public class FMLDeobfuscatingRemapper extends Remapper {
|
||||||
ClassReader cr = new ClassReader(classBytes);
|
ClassReader cr = new ClassReader(classBytes);
|
||||||
ClassNode classNode = new ClassNode();
|
ClassNode classNode = new ClassNode();
|
||||||
cr.accept(classNode, ClassReader.SKIP_CODE | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES);
|
cr.accept(classNode, ClassReader.SKIP_CODE | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES);
|
||||||
for (FieldNode fieldNode : (List<FieldNode>) classNode.fields) {
|
Map<String,String> resMap = Maps.newHashMap();
|
||||||
if (fieldNode.name.equals(name)) {
|
for (FieldNode fieldNode : classNode.fields) {
|
||||||
return fieldNode.desc;
|
resMap.put(fieldNode.name, fieldNode.desc);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
fieldDescriptions.put(owner, resMap);
|
||||||
|
return resMap.get(name);
|
||||||
}
|
}
|
||||||
catch (IOException e)
|
catch (IOException e)
|
||||||
{
|
{
|
||||||
e.printStackTrace();
|
FMLLog.log(Level.SEVERE,e, "A critical exception occured reading a class file %s", owner);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void parseClass(Builder<String, String> builder, String[] parts)
|
private void parseClass(Builder<String, String> builder, String[] parts)
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in a new issue