Cleanup Forge's build script, fix some unneeded patches.

Add checkExcscheckAll tasks.
This commit is contained in:
LexManos 2020-08-21 02:47:04 -07:00
parent c6d512afe8
commit 9118c3dd24
14 changed files with 695 additions and 644 deletions

View File

@ -15,45 +15,32 @@ buildscript {
classpath 'org.ow2.asm:asm-tree:7.2'
}
}
import groovy.json.JsonSlurper
import groovy.json.JsonBuilder
import java.nio.file.Files
import java.text.SimpleDateFormat
import java.util.Date
import java.util.LinkedHashMap
import java.util.TreeSet
import java.util.stream.Collectors
import java.util.zip.ZipEntry
import java.util.zip.ZipInputStream
import java.util.zip.ZipOutputStream
import java.security.MessageDigest
import java.net.URL
import net.minecraftforge.forge.tasks.*
import net.minecraftforge.gradle.common.task.ArchiveChecksum
import static net.minecraftforge.forge.tasks.Util.*
import net.minecraftforge.gradle.common.task.DownloadMavenArtifact
import net.minecraftforge.gradle.common.task.ExtractInheritance
import net.minecraftforge.gradle.common.task.SignJar
import net.minecraftforge.gradle.common.util.HashStore
import net.minecraftforge.gradle.mcp.function.MCPFunction
import net.minecraftforge.gradle.mcp.util.MCPEnvironment
import net.minecraftforge.gradle.patcher.task.ApplyBinPatches
import net.minecraftforge.gradle.patcher.task.TaskReobfuscateJar
import net.minecraftforge.gradle.userdev.tasks.RenameJar
import org.apache.tools.ant.filters.ReplaceTokens
import de.undercouch.gradle.tasks.download.Download
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.objectweb.asm.ClassReader
import org.objectweb.asm.Opcodes
plugins {
id 'net.minecrell.licenser' version '0.4'
id 'org.ajoberstar.grgit' version '3.1.1'
id 'de.undercouch.download' version '3.3.0'
id 'com.github.ben-manes.versions' version '0.22.0'
}
apply plugin: 'eclipse'
Util.init() //Init all our extension methods!
println('Java: ' + System.getProperty('java.version') + ' JVM: ' + System.getProperty('java.vm.version') + '(' + System.getProperty('java.vendor') + ') Arch: ' + System.getProperty('os.arch'))
ext {
@ -138,7 +125,6 @@ project(':forge') {
apply plugin: 'eclipse'
apply plugin: 'net.minecraftforge.gradle.patcher'
apply plugin: 'net.minecrell.licenser'
apply plugin: 'de.undercouch.download'
compileJava.sourceCompatibility = compileJava.targetCompatibility = sourceCompatibility = targetCompatibility = '1.8' // Need this here so eclipse task generates correctly.
group = 'net.minecraftforge'
@ -484,419 +470,59 @@ project(':forge') {
if (changelog.exists())
extraTxts += changelog
task downloadCrowdin() {
ext {
output = file('build/crowdin_raw.zip')
update = file('build/crowdin.json')
id = 'minecraft-forge'
}
onlyIf {
project.hasProperty('crowdinKey') && !project.gradle.startParameter.isOffline()
}
doLast {
download {
src "https://api.crowdin.com/api/project/${id}/export?key=${project.crowdinKey}&json"
dest update
overwrite true
}
if (!update.text.contains('success')) {
throw new RuntimeException("Crowdin export failed, see ${update} for more info")
}
download {
src "https://api.crowdin.com/api/project/${id}/download/all.zip?key=${project.crowdinKey}"
dest output
overwrite true
}
}
}
task crowdin(type: Zip) {
dependsOn downloadCrowdin
onlyIf {
!downloadCrowdin.state.skipped
}
baseName = project.name
version = project.version
classifier = 'crowdin'
destinationDir = file('build/distributions')
from(zipTree(downloadCrowdin.output)){
eachFile { //Tired of waiting for crowdin API, rename things myself. Remove once crowdin stops being dumb
if (it.name.equals('en_us.json') && !it.path.startsWith('assets/')) {
it.name = it.path.split('/')[0] + '.json'
it.path = 'assets/forge/lang/' + it.name
}
}
exclude { it.isDirectory() }
rename { it.toLowerCase() }//Minecraft needs it lowercase.
exclude '**/*.lang' //Pre-1.13 format
}
}
task crowdin(type: CrowdinTask) {
key = project.hasProperty('crowdinKey') ? project.crowdinKey : null
//TODO: Merge our english.json?
}
// We apply the bin patches we just created to make a jar that is JUST our changes
genClientBinPatches.tool = BINPATCH_TOOL
task applyClientBinPatches(type: ApplyBinPatches, dependsOn: genClientBinPatches) {
clean = { genClientBinPatches.cleanJar }
input = genClientBinPatches.output
tool = BINPATCH_TOOL
}
genServerBinPatches.tool = BINPATCH_TOOL
task applyServerBinPatches(type: ApplyBinPatches, dependsOn: genServerBinPatches) {
clean = { genServerBinPatches.cleanJar }
input = genServerBinPatches.output
tool = BINPATCH_TOOL
}
genJoinedBinPatches.tool = BINPATCH_TOOL
task applyJoinedBinPatches(type: ApplyBinPatches, dependsOn: genJoinedBinPatches) {
clean = { genJoinedBinPatches.cleanJar }
input = genJoinedBinPatches.output
tool = BINPATCH_TOOL
}
// Create SRG named Vanilla jars, using the SpecialSource we have in the installer
task createClientSRG(type: RenameJar, dependsOn: genClientBinPatches) {
tool = SPECIAL_SOURCE + ':shaded'
args = ['--stable', '--in-jar', '{input}', '--out-jar', '{output}', '--srg-in', '{mappings}']
mappings = { genClientBinPatches.srg }
input = { genClientBinPatches.cleanJar }
output = file('build/createClientSRG/output.jar')
}
task createServerSRG(type: RenameJar, dependsOn: genServerBinPatches) {
tool = SPECIAL_SOURCE + ':shaded'
args = ['--stable', '--in-jar', '{input}', '--out-jar', '{output}', '--srg-in', '{mappings}']
mappings = { genServerBinPatches.srg }
input = { genServerBinPatches.cleanJar }
output = file('build/createServerSRG/output.jar')
}
// Checksum tasks so that we can know if the vanilla classes are what we expect them to do!
task clientBaseChecksum(type: ArchiveChecksum, dependsOn: genClientBinPatches) {
input = { genClientBinPatches.cleanJar} //Lazy because Patcher Plugin sets the value afterEvaluate
name = 'client'
}
task serverBaseChecksum(type: ArchiveChecksum, dependsOn: genServerBinPatches) {
input = { genServerBinPatches.cleanJar }
name = 'server'
}
task joinedBaseChecksum(type: ArchiveChecksum, dependsOn: genJoinedBinPatches) {
input = { genJoinedBinPatches.cleanJar }
name = 'joined'
}
task clientPatchedChecksum(type: ArchiveChecksum, dependsOn: applyClientBinPatches) {
input = applyClientBinPatches.output
name = 'client_patched'
}
task serverPatchedChecksum(type: ArchiveChecksum, dependsOn: applyServerBinPatches) {
input = applyServerBinPatches.output
name = 'server_patched'
}
task joinedPatchedChecksum(type: ArchiveChecksum, dependsOn: applyJoinedBinPatches) {
input = applyJoinedBinPatches.output
name = 'joined_patched'
}
// Utility methods for testing checksums
task cleanChecksums() {
['client', 'server', 'joined'].each { side ->
['Base', 'Patched'].each { type ->
def clean = "clean${side.capitalize()}${type}Checksum"
dependsOn(clean)
tasks.findByName("${side}${type}Checksum").mustRunAfter(clean)
}
}
}
task checksums() {
['client', 'server', 'joined'].each { side ->
['Base', 'Patched'].each { type ->
dependsOn("${side}${type}Checksum")
}
}
}
['Client', 'Server', 'Joined'].each { side ->
def gen = tasks.getByName("gen${side}BinPatches")
gen.tool = BINPATCH_TOOL
task "apply${side}BinPatches"(type: ApplyBinPatches, dependsOn: gen) {
clean = { gen.cleanJar }
input = gen.output
tool = BINPATCH_TOOL
}
if (side != 'Joined') {
// Create SRG named Vanilla jars, using the SpecialSource we have in the installer
task "create${side}SRG"(type: RenameJar, dependsOn: gen) {
tool = SPECIAL_SOURCE + ':shaded'
args = ['--stable', '--in-jar', '{input}', '--out-jar', '{output}', '--srg-in', '{mappings}']
mappings = { gen.srg }
input = { gen.cleanJar }
output = file("build/create${name}SRG/output.jar")
}
}
}
task downloadLibraries(dependsOn: ':mcp:setupMCP') {
inputs.file VERSION_JSON
doLast {
def json = new JsonSlurper().parseText(VERSION_JSON.text)
json.libraries.each {lib ->
def artifacts = [lib.downloads.artifact] + lib.downloads.get('classifiers', [:]).values()
artifacts.each{ art ->
def target = file('build/libraries/' + art.path)
if (!target.exists()) {
download {
src art.url
dest target
}
}
}
}
}
task downloadLibraries(type: DownloadLibrariesTask, dependsOn: ':mcp:setupMCP') {
input = VERSION_JSON
output = rootProject.file('build/libraries/')
}
task extractInheritance(type: ExtractInheritance, dependsOn: [genJoinedBinPatches, downloadLibraries]) {
tool = INSTALLER_TOOLS + ':fatjar'
input { genJoinedBinPatches.cleanJar }
doFirst {
def json = new JsonSlurper().parseText(VERSION_JSON.text)
json.libraries.each {lib ->
def artifacts = [lib.downloads.artifact] + lib.downloads.get('classifiers', [:]).values()
artifacts.each{ art ->
def target = file('build/libraries/' + art.path)
if (target.exists())
addLibrary(target)
}
}
}
doFirst { downloadLibraries.libraries.each{ addLibrary(it) } }
}
task checkATs(dependsOn: extractInheritance) {
inputs.file { extractInheritance.output }
inputs.files patcher.accessTransformers
doLast {
def parse = { line ->
def idx = line.indexOf('#')
def comment = idx == -1 ? null : line.substring(idx)
if (idx != -1) line = line.substring(0, idx - 1)
def (modifier, cls, desc) = (line.trim() + ' ').split(' ', -1)
def key = cls + (desc.isEmpty() ? '' : ' ' + desc)
return [modifier, cls, desc, comment, key]
}
def accessLevel = { access ->
if ((access & Opcodes.ACC_PUBLIC) != 0) return 3
if ((access & Opcodes.ACC_PROTECTED) != 0) return 2
if ((access & Opcodes.ACC_PRIVATE) != 0) return 0
return 1
}
def accessStr = { access ->
if (access.endsWith('-f') || access.endsWith('+f'))
return 4
switch (access.toLowerCase()) {
case 'public': return 3
case 'protected': return 2
case 'default': return 1
case 'private': return 0
default: return -1
}
}
def json = new JsonSlurper().parseText(extractInheritance.output.text)
patcher.accessTransformers.each { f ->
TreeMap lines = [:]
def group = null
for (def line : f.readLines()) {
if (line.isEmpty()) continue
if (line.startsWith('#group ')) {
def (modifier, cls, desc, comment, key) = parse.call(line.substring(7))
if (!desc.equals('*') && !desc.equals('*()') && !desc.equals('<init>'))
throw new IllegalStateException('Invalid group: ' + line)
group = [modifier: modifier, cls: cls, desc: desc, comment: comment,
'existing': [] as Set,
'children': [] as TreeSet,
group: true
]
if (lines.containsKey(key))
throw new IllegalStateException('Duplicate group: ' + line)
lines[key] = group
} else if (group != null) {
if (line.startsWith('#endgroup')) {
group = null
} else {
def (modifier, cls, desc, comment, key) = parse.call(line)
group['existing'].add(key)
}
} else if (line.startsWith('#endgroup')) {
throw new IllegalStateException('Invalid group ending: ' + line)
} else if (line.startsWith('#')) {
//Nom
} else {
def (modifier, cls, desc, comment, key) = parse.call(line)
if (lines.containsKey(key)) {
println('Duplicate: ' + line)
continue
}
lines[key] = [modifier: modifier, cls: cls, desc: desc, comment: comment, group: false]
}
}
// Process Groups, this will remove any entries outside the group that is covered by the group
for (def key : new ArrayList<>(lines.keySet())) {
def entry = lines.get(key)
if (entry != null && entry['group']) {
def cls = entry['cls']
def jcls = json.get(cls.replaceAll('\\.', '/'))
if (jcls == null) {
lines.remove(key)
println('Invalid Group: ' + key)
} else if ('*'.equals(entry['desc'])) {
if (!jcls.containsKey('fields')) {
lines.remove(key)
println('Invalid Group, Class has no fields: ' + key)
} else {
jcls['fields'].each { field, value ->
def fkey = cls + ' ' + field
if (accessLevel.call(value['access']) < accessStr.call(entry['modifier'])) {
if (lines.containsKey(fkey)) {
lines.remove(fkey)
} else if (!entry['existing'].contains(fkey)) {
println('Added: ' + fkey)
}
entry['children'].add(fkey)
} else if (lines.containsKey(fkey)) {
lines.remove(fkey)
println('Removed: ' + fkey)
}
}
entry['existing'].stream().findAll{ !entry['children'].contains(it) }.each{ println('Removed: ' + it) }
}
} else if ('*()'.equals(entry['desc'])) {
if (!jcls.containsKey('methods')) {
lines.remove(key)
println('Invalid Group, Class has no methods: ' + key)
} else {
jcls['methods'].each{ mtd, value ->
if (mtd.startsWith('<clinit>'))
return
key = cls + ' ' + mtd.replace(' ', '')
if (accessLevel.call(value['access']) < accessStr.call(entry['modifier'])) {
if (lines.containsKey(key)) {
lines.remove(key)
} else if (!entry['existing'].contains(key)) {
println('Added: ' + key)
}
entry['children'].add(key)
} else if (lines.containsKey(key)) {
lines.remove(key)
println('Removed: ' + key)
}
}
entry['existing'].stream().findAll{ !entry['children'].contains(it) }.each{ println('Removed: ' + it) }
}
} else if ('<init>'.equals(entry['desc'])) { //Make all public non-abstract subclasses
json.each{ tcls,value ->
if (!value.containsKey('methods') || ((value['access'] & Opcodes.ACC_ABSTRACT) != 0))
return
def parents = [] as Set
def parent = tcls
while (parent != null && json.containsKey(parent)) {
parents.add(parent)
def p = json[parent]
parent = p == null ? null : p['superName']
}
if (parents.contains(cls.replaceAll('\\.', '/'))) {
value['methods'].each{ mtd, v ->
if (mtd.startsWith('<init>')) {
def child = tcls.replaceAll('/', '\\.') + ' ' + mtd.replace(' ', '')
if (accessLevel.call(v['access']) < 3) {
if (lines.containsKey(child)) {
lines.remove(child)
} else if (!entry['existing'].contains(child)) {
println('Added: ' + child)
}
entry['children'].add(child)
} else if (lines.containsKey(child)) {
lines.remove(child)
println('Removed: ' + child)
}
}
}
}
}
entry['existing'].stream().findAll{ !entry['children'].contains(it) }.each{ println('Removed: ' + it) }
}
}
}
// Process normal lines, remove invalid and remove narrowing
for (def key : new ArrayList<>(lines.keySet())) {
def entry = lines.get(key)
if (entry != null && !entry['group']) {
def cls = entry['cls']
def jcls = json.get(cls.replaceAll('\\.', '/'))
if (jcls == null) {
lines.remove(key)
println('Invalid: ' + key)
} else if (entry['desc'] == '') {
if (accessLevel.call(jcls['access']) >= accessStr.call(entry['modifier']) && (entry.comment == null || !entry.comment.startsWith('#force '))) {
lines.remove(key)
println('Invalid Narrowing: ' + key)
}
} else if (!entry['desc'].contains('(')) {
if (!jcls.containsKey('fields') || !jcls['fields'].containsKey(entry['desc'])) {
lines.remove(key)
println('Invalid: ' + key)
} else {
def value = jcls['fields'][entry['desc']]
if (accessLevel.call(value['access']) >= accessStr.call(entry['modifier']) && (entry.comment == null || !entry.comment.startsWith('#force '))) {
lines.remove(key)
println('Invalid Narrowing: ' + key)
println(entry.comment)
}
}
} else {
def jdesc = entry['desc'].replace('(', ' (')
if (!jcls.containsKey('methods') || !jcls['methods'].containsKey(jdesc)) {
lines.remove(key)
println('Invalid: ' + key)
} else {
def value = jcls['methods'][jdesc]
if (accessLevel.call(value['access']) >= accessStr.call(entry['modifier']) && (entry.comment == null || !entry.comment.startsWith('#force '))) {
lines.remove(key)
println('Invalid Narrowing: ' + key)
}
}
}
}
}
def data = []
lines.each { key,value ->
if (!value.group) {
data.add(value.modifier + ' ' + key + (value.comment == null ? '' : ' ' + value.comment))
} else {
data.add('#group ' + value.modifier + ' ' + key + (value.comment == null ? '' : ' ' + value.comment))
value.children.each{ data.add(value.modifier + ' ' + it) }
data.add('#endgroup')
}
}
f.text = data.join('\n')
}
}
task checkATs(type: CheckATs, dependsOn: extractInheritance) {
inheritance = extractInheritance.output
ats = patcher.accessTransformers
}
task checkSAS(dependsOn: extractInheritance) {
inputs.file { extractInheritance.output }
inputs.files patcher.sideAnnotationStrippers
doLast {
def json = new JsonSlurper().parseText(extractInheritance.output.text)
patcher.sideAnnotationStrippers.each { f ->
def lines = []
f.eachLine { line ->
if (line[0] == '\t') return //Skip any tabed lines, those are ones we add
def idx = line.indexOf('#')
if (idx == 0 || line.isEmpty()) {
lines.add(line)
return
}
def comment = idx == -1 ? null : line.substring(idx)
if (idx != -1) line = line.substring(0, idx - 1)
def (cls, desc) = (line.trim() + ' ').split(' ', -1)
cls = cls.replaceAll('\\.', '/')
desc = desc.replace('(', ' (')
if (desc.isEmpty() || json[cls] == null || json[cls]['methods'] == null || json[cls]['methods'][desc] == null) {
println('Invalid: ' + line)
return
}
def mtd = json[cls]['methods'][desc]
lines.add(cls + ' ' + desc.replace(' ', '') + (comment == null ? '' : ' ' + comment))
def children = json.values().findAll{ it.methods != null && it.methods[desc] != null && it.methods[desc].override == cls}
.collect { it.name + ' ' + desc.replace(' ', '') } as TreeSet
children.each { lines.add('\t' + it) }
}
f.text = lines.join('\n')
}
}
task checkSAS(type: CheckSAS, dependsOn: extractInheritance) {
inheritance = extractInheritance.output
sass = patcher.sideAnnotationStrippers
}
task checkExcs(type: CheckExcs, dependsOn: jar) {
binary = jar.archivePath
excs = patcher.excs
}
task checkAll(dependsOn: [checkATs, checkSAS, checkExcs]){}
task checkPatchesAndFix(type: CheckPatches) {
patchDir = file("$rootDir/patches")
@ -915,7 +541,7 @@ project(':forge') {
ext {
output = file('build/version.json')
vanilla = project(':mcp').file('build/mcp/downloadJson/version.json')
timestamp = dateToIso8601(new Date())
timestamp = iso8601Now()
comment = [
"Please do not automate the download and installation of Forge.",
"Our efforts are supported by ads from the download page.",
@ -927,7 +553,7 @@ project(':forge') {
inputs.file vanilla
outputs.file output
doLast {
def json_vanilla = new JsonSlurper().parseText(vanilla.text)
def json_vanilla = vanilla.json()
def json = [
_comment_: comment,
id: id,
@ -948,7 +574,7 @@ project(':forge') {
artifact: [
path: "${project.group.replace('.', '/')}/${project.name}/${project.version}/${project.name}-${project.version}.jar",
url: "", //Do not include the URL so that the installer/launcher won't grab it. This is also why we don't have the universal classifier
sha1: sha1(launcherJar.archivePath),
sha1: launcherJar.archivePath.sha1(),
size: launcherJar.archivePath.length()
]
]
@ -990,7 +616,7 @@ project(':forge') {
artifact: [
path: "${project.group.replace('.', '/')}/${project.name}/${project.version}/${project.name}-${project.version}-universal.jar",
url: "", //Do not include the URL so that the installer/launcher won't grab it. This is also why we don't have the universal classifier
sha1: sha1(universalJar.archivePath),
sha1: universalJar.archivePath.sha1(),
size: universalJar.archivePath.length()
]
]
@ -1021,32 +647,32 @@ project(':forge') {
server: "[net.minecraft:server:${MC_VERSION}-${MCP_VERSION}:slim]"
],
MC_SLIM_SHA: [
client: "'${sha1(tasks.getByName('downloadClientSlim').output)}'",
server: "'${sha1(tasks.getByName('downloadServerSlim').output)}'"
client: "'${tasks.getByName('downloadClientSlim').output.sha1()}'",
server: "'${tasks.getByName('downloadServerSlim').output.sha1()}'"
],
MC_EXTRA: [
client: "[net.minecraft:client:${MC_VERSION}-${MCP_VERSION}:extra]",
server: "[net.minecraft:server:${MC_VERSION}-${MCP_VERSION}:extra]"
],
MC_EXTRA_SHA: [
client: "'${sha1(tasks.getByName('downloadClientExtra').output)}'",
server: "'${sha1(tasks.getByName('downloadServerExtra').output)}'"
client: "'${tasks.getByName('downloadClientExtra').output.sha1()}'",
server: "'${tasks.getByName('downloadServerExtra').output.sha1()}'"
],
MC_SRG: [
client: "[net.minecraft:client:${MC_VERSION}-${MCP_VERSION}:srg]",
server: "[net.minecraft:server:${MC_VERSION}-${MCP_VERSION}:srg]"
],
/*MC_SRG_SHA: [
client: "'${sha1(createClientSRG.output)}'",
server: "'${sha1(createServerSRG.output)}'"
client: "'${createClientSRG.output.sha1()}'",
server: "'${createServerSRG.output.sha1()}'"
],*/
PATCHED: [
client: "[${project.group}:${project.name}:${project.version}:client]",
server: "[${project.group}:${project.name}:${project.version}:server]"
],
PATCHED_SHA: [
client: "'${sha1(applyClientBinPatches.output)}'",
server: "'${sha1(applyServerBinPatches.output)}'"
client: "'${applyClientBinPatches.output.sha1()}'",
server: "'${applyServerBinPatches.output.sha1()}'"
],
MCP_VERSION: [
client: "'${MCP_VERSION}'",
@ -1124,28 +750,10 @@ project(':forge') {
universalJar {
from extraTxts
/* TODO: Annotation Cache? need to talk to cpw about his new design.
from(fixAnnotationsJson){
into 'META-INF'
}
dependsOn fixAnnotationsJson
*/
/* We do not actually check this anywhere, so it's 400KB of uselless data
// Add checksum files of clean and patched vanilla classes.
['client', 'server'].each { side ->
['Base', 'Patched'].each { type ->
from(tasks.getByName("${side}${type}Checksum").output) {
into 'checksums/'
}
}
}
dependsOn checksums
*/
// add crowdin locales
from { !crowdin.state.skipped ? zipTree(crowdin.archivePath) : null}
from { crowdin.output.exists() ? zipTree(crowdin.output) : null}
dependsOn crowdin
duplicatesStrategy = 'exclude'
doFirst {
MANIFESTS.each{ pkg, values ->
@ -1446,99 +1054,6 @@ project(':forge') {
}
}
def dateToIso8601(date) {
def format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")
def result = format.format(date)
return result[0..21] + ':' + result[22..-1]
}
def sha1(file) {
MessageDigest md = MessageDigest.getInstance('SHA-1')
file.eachByte 4096, {bytes, size ->
md.update(bytes, 0, size)
}
return md.digest().collect {String.format "%02x", it}.join()
}
def artifactTree(project, artifact) {
if (!project.ext.has('tree_resolver'))
project.ext.tree_resolver = 1
def cfg = project.configurations.create('tree_resolver_' + project.ext.tree_resolver++)
def dep = project.dependencies.create(artifact)
cfg.dependencies.add(dep)
def files = cfg.resolve()
return getArtifacts(project, cfg, true)
}
def getArtifacts(project, config, classifiers) {
def ret = [:]
config.resolvedConfiguration.resolvedArtifacts.each {
def art = [
group: it.moduleVersion.id.group,
name: it.moduleVersion.id.name,
version: it.moduleVersion.id.version,
classifier: it.classifier,
extension: it.extension,
file: it.file
]
def key = art.group + ':' + art.name
def folder = "${art.group.replace('.', '/')}/${art.name}/${art.version}/"
def filename = "${art.name}-${art.version}"
if (art.classifier != null)
filename += "-${art.classifier}"
filename += ".${art.extension}"
def path = "${folder}${filename}"
def url = "https://libraries.minecraft.net/${path}"
if (!checkExists(url)) {
url = "https://files.minecraftforge.net/maven/${path}"
}
//TODO remove when Mojang launcher is updated
if (!classifiers && art.classifier != null) { //Mojang launcher doesn't currently support classifiers, so... move it to part of the version, and force the extension to 'jar'
art.version = "${art.version}-${art.classifier}"
art.classifier = null
art.extension = 'jar'
path = "${art.group.replace('.', '/')}/${art.name}/${art.version}/${art.name}-${art.version}.jar"
}
ret[key] = [
name: "${art.group}:${art.name}:${art.version}" + (art.classifier == null ? '' : ":${art.classifier}") + (art.extension == 'jar' ? '' : "@${art.extension}"),
downloads: [
artifact: [
path: path,
url: url,
sha1: sha1(art.file),
size: art.file.length()
]
]
]
}
return ret
}
def checkExists(url) {
try {
def code = new URL(url).openConnection().with {
requestMethod = 'HEAD'
connect()
responseCode
}
return code == 200
} catch (Exception e) {
if (e.toString().contains('unable to find valid certification path to requested target'))
throw new RuntimeException('Failed to connect to ' + url + ': Missing certificate root authority, try updating java')
throw e
}
}
def getClasspath(project, libs, artifact) {
def ret = []
artifactTree(project, artifact).each { key, lib ->
libs[lib.name] = lib
if (lib.name != artifact)
ret.add(lib.name)
}
return ret
}
//evaluationDependsOnChildren()
task setup() {
dependsOn ':clean:extractMapped'

10
buildSrc/build.gradle Normal file
View File

@ -0,0 +1,10 @@
repositories {
//mavenCentral() //TODO: Update Gradle to use HTTPS by default
maven {
name 'maven_central'
url 'https://repo.maven.apache.org/maven2/'
}
}
dependencies {
implementation 'org.ow2.asm:asm:7.2'
}

View File

@ -0,0 +1,231 @@
package net.minecraftforge.forge.tasks
import java.util.ArrayList
import java.util.TreeMap
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.InputFile
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.TaskAction
import org.objectweb.asm.Opcodes
public class CheckATs extends DefaultTask {
@InputFile File inheritance
@InputFiles File[] ats
@TaskAction
protected void exec() {
Util.init()
def parse = { line ->
def idx = line.indexOf('#')
def comment = idx == -1 ? null : line.substring(idx)
if (idx != -1) line = line.substring(0, idx - 1)
def (modifier, cls, desc) = (line.trim() + ' ').split(' ', -1)
def key = cls + (desc.isEmpty() ? '' : ' ' + desc)
return [modifier, cls, desc, comment, key]
}
def accessLevel = { access ->
if ((access & Opcodes.ACC_PUBLIC) != 0) return 3
if ((access & Opcodes.ACC_PROTECTED) != 0) return 2
if ((access & Opcodes.ACC_PRIVATE) != 0) return 0
return 1
}
def accessStr = { access ->
if (access.endsWith('-f') || access.endsWith('+f'))
return 4
switch (access.toLowerCase()) {
case 'public': return 3
case 'protected': return 2
case 'default': return 1
case 'private': return 0
default: return -1
}
}
def json = inheritance.json()
ats.each { f ->
TreeMap lines = [:]
def group = null
for (def line : f.readLines()) {
if (line.isEmpty()) continue
if (line.startsWith('#group ')) {
def (modifier, cls, desc, comment, key) = parse.call(line.substring(7))
if (!desc.equals('*') && !desc.equals('*()') && !desc.equals('<init>'))
throw new IllegalStateException('Invalid group: ' + line)
group = [modifier: modifier, cls: cls, desc: desc, comment: comment,
'existing': [] as Set,
'children': [] as TreeSet,
group: true
]
if (lines.containsKey(key))
throw new IllegalStateException('Duplicate group: ' + line)
lines[key] = group
} else if (group != null) {
if (line.startsWith('#endgroup')) {
group = null
} else {
def (modifier, cls, desc, comment, key) = parse.call(line)
group['existing'].add(key)
}
} else if (line.startsWith('#endgroup')) {
throw new IllegalStateException('Invalid group ending: ' + line)
} else if (line.startsWith('#')) {
//Nom
} else {
def (modifier, cls, desc, comment, key) = parse.call(line)
if (lines.containsKey(key)) {
println('Duplicate: ' + line)
continue
}
lines[key] = [modifier: modifier, cls: cls, desc: desc, comment: comment, group: false]
}
}
// Process Groups, this will remove any entries outside the group that is covered by the group
for (def key : new ArrayList<>(lines.keySet())) {
def entry = lines.get(key)
if (entry != null && entry['group']) {
def cls = entry['cls']
def jcls = json.get(cls.replaceAll('\\.', '/'))
if (jcls == null) {
lines.remove(key)
println('Invalid Group: ' + key)
} else if ('*'.equals(entry['desc'])) {
if (!jcls.containsKey('fields')) {
lines.remove(key)
println('Invalid Group, Class has no fields: ' + key)
} else {
jcls['fields'].each { field, value ->
def fkey = cls + ' ' + field
if (accessLevel.call(value['access']) < accessStr.call(entry['modifier'])) {
if (lines.containsKey(fkey)) {
lines.remove(fkey)
} else if (!entry['existing'].contains(fkey)) {
println('Added: ' + fkey)
}
entry['children'].add(fkey)
} else if (lines.containsKey(fkey)) {
lines.remove(fkey)
println('Removed: ' + fkey)
}
}
entry['existing'].stream().findAll{ !entry['children'].contains(it) }.each{ println('Removed: ' + it) }
}
} else if ('*()'.equals(entry['desc'])) {
if (!jcls.containsKey('methods')) {
lines.remove(key)
println('Invalid Group, Class has no methods: ' + key)
} else {
jcls['methods'].each{ mtd, value ->
if (mtd.startsWith('<clinit>'))
return
key = cls + ' ' + mtd.replace(' ', '')
if (accessLevel.call(value['access']) < accessStr.call(entry['modifier'])) {
if (lines.containsKey(key)) {
lines.remove(key)
} else if (!entry['existing'].contains(key)) {
println('Added: ' + key)
}
entry['children'].add(key)
} else if (lines.containsKey(key)) {
lines.remove(key)
println('Removed: ' + key)
}
}
entry['existing'].stream().findAll{ !entry['children'].contains(it) }.each{ println('Removed: ' + it) }
}
} else if ('<init>'.equals(entry['desc'])) { //Make all public non-abstract subclasses
json.each{ tcls,value ->
if (!value.containsKey('methods') || ((value['access'] & Opcodes.ACC_ABSTRACT) != 0))
return
def parents = [] as Set
def parent = tcls
while (parent != null && json.containsKey(parent)) {
parents.add(parent)
def p = json[parent]
parent = p == null ? null : p['superName']
}
if (parents.contains(cls.replaceAll('\\.', '/'))) {
value['methods'].each{ mtd, v ->
if (mtd.startsWith('<init>')) {
def child = tcls.replaceAll('/', '\\.') + ' ' + mtd.replace(' ', '')
if (accessLevel.call(v['access']) < 3) {
if (lines.containsKey(child)) {
lines.remove(child)
} else if (!entry['existing'].contains(child)) {
println('Added: ' + child)
}
entry['children'].add(child)
} else if (lines.containsKey(child)) {
lines.remove(child)
println('Removed: ' + child)
}
}
}
}
}
entry['existing'].stream().findAll{ !entry['children'].contains(it) }.each{ println('Removed: ' + it) }
}
}
}
// Process normal lines, remove invalid and remove narrowing
for (def key : new ArrayList<>(lines.keySet())) {
def entry = lines.get(key)
if (entry != null && !entry['group']) {
def cls = entry['cls']
def jcls = json.get(cls.replaceAll('\\.', '/'))
if (jcls == null) {
lines.remove(key)
println('Invalid: ' + key)
} else if (entry['desc'] == '') {
if (accessLevel.call(jcls['access']) >= accessStr.call(entry['modifier']) && (entry.comment == null || !entry.comment.startsWith('#force '))) {
lines.remove(key)
println('Invalid Narrowing: ' + key)
}
} else if (!entry['desc'].contains('(')) {
if (!jcls.containsKey('fields') || !jcls['fields'].containsKey(entry['desc'])) {
lines.remove(key)
println('Invalid: ' + key)
} else {
def value = jcls['fields'][entry['desc']]
if (accessLevel.call(value['access']) >= accessStr.call(entry['modifier']) && (entry.comment == null || !entry.comment.startsWith('#force '))) {
lines.remove(key)
println('Invalid Narrowing: ' + key)
println(entry.comment)
}
}
} else {
def jdesc = entry['desc'].replace('(', ' (')
if (!jcls.containsKey('methods') || !jcls['methods'].containsKey(jdesc)) {
lines.remove(key)
println('Invalid: ' + key)
} else {
def value = jcls['methods'][jdesc]
if (accessLevel.call(value['access']) >= accessStr.call(entry['modifier']) && (entry.comment == null || !entry.comment.startsWith('#force '))) {
lines.remove(key)
println('Invalid Narrowing: ' + key)
}
}
}
}
}
def data = []
lines.each { key,value ->
if (!value.group) {
data.add(value.modifier + ' ' + key + (value.comment == null ? '' : ' ' + value.comment))
} else {
data.add('#group ' + value.modifier + ' ' + key + (value.comment == null ? '' : ' ' + value.comment))
value.children.each{ data.add(value.modifier + ' ' + it) }
data.add('#endgroup')
}
}
f.text = data.join('\n')
}
}
}

View File

@ -0,0 +1,95 @@
package net.minecraftforge.forge.tasks
import java.util.ArrayList
import java.util.TreeMap
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.InputFile
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.TaskAction
import org.objectweb.asm.ClassReader
import org.objectweb.asm.ClassVisitor
import org.objectweb.asm.MethodVisitor
import org.objectweb.asm.Opcodes
import org.objectweb.asm.Type
import java.util.zip.ZipEntry
import java.util.zip.ZipInputStream
public class CheckExcs extends DefaultTask {
@InputFile File binary
@InputFiles File[] excs
@TaskAction
protected void exec() {
Util.init()
def known = []
binary.withInputStream { i ->
new ZipInputStream(i).withCloseable { zin ->
def visitor = new ClassVisitor(Opcodes.ASM7) {
private String cls
@Override
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
this.cls = name
}
@Override
public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) {
known.add(this.cls + '.' + name + descriptor)
super.visitMethod(access, name, descriptor, signature, exceptions)
}
}
ZipEntry zein
while ((zein = zin.nextEntry) != null) {
if (zein.name.endsWith('.class')) {
ClassReader reader = new ClassReader(zin)
reader.accept(visitor, ClassReader.SKIP_CODE | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES)
}
}
}
}
excs.each { f ->
def lines = []
f.eachLine { line ->
def idx = line.indexOf('#')
if (idx == 0 || line.isEmpty()) {
return
}
def comment = idx == -1 ? null : line.substring(idx)
if (idx != -1) line = line.substring(0, idx - 1)
if (!line.contains('=')) {
println('Invalid: ' + line)
return
}
def (key, value) = line.split('=', 2)
if (!known.contains(key)) {
println(key)
println('Invalid: ' + line)
return
}
def (cls, desc) = key.split('\\.', 2)
if (!desc.contains('(')) {
println('Invalid: ' + line)
return
}
def name = desc.split('\\(', 2)[0]
desc = '(' + desc.split('\\(', 2)[1]
def (exceptions, args) = value.contains('|') ? value.split('|', 2) : [value, '']
if (args.split(',').length != Type.getArgumentTypes(desc).length) {
println('Invalid: ' + line)
return
}
lines.add(line)
}
f.text = lines.sort().join('\n')
}
}
}

View File

@ -0,0 +1,51 @@
package net.minecraftforge.forge.tasks
import java.util.ArrayList
import java.util.TreeMap
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.InputFile
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.TaskAction
import org.objectweb.asm.Opcodes
public class CheckSAS extends DefaultTask {
@InputFile File inheritance
@InputFiles File[] sass
@TaskAction
protected void exec() {
Util.init()
def json = inheritance.json()
sass.each { f ->
def lines = []
f.eachLine { line ->
if (line[0] == '\t') return //Skip any tabed lines, those are ones we add
def idx = line.indexOf('#')
if (idx == 0 || line.isEmpty()) {
lines.add(line)
return
}
def comment = idx == -1 ? null : line.substring(idx)
if (idx != -1) line = line.substring(0, idx - 1)
def (cls, desc) = (line.trim() + ' ').split(' ', -1)
cls = cls.replaceAll('\\.', '/')
desc = desc.replace('(', ' (')
if (desc.isEmpty() || json[cls] == null || json[cls]['methods'] == null || json[cls]['methods'][desc] == null) {
println('Invalid: ' + line)
return
}
def mtd = json[cls]['methods'][desc]
lines.add(cls + ' ' + desc.replace(' ', '') + (comment == null ? '' : ' ' + comment))
def children = json.values().findAll{ it.methods != null && it.methods[desc] != null && it.methods[desc].override == cls}
.collect { it.name + ' ' + desc.replace(' ', '') } as TreeSet
children.each { lines.add('\t' + it) }
}
f.text = lines.join('\n')
}
}
}

View File

@ -0,0 +1,65 @@
package net.minecraftforge.forge.tasks
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskAction
import org.gradle.api.tasks.Optional
import org.gradle.api.tasks.OutputFile
import java.util.zip.ZipEntry
import java.util.zip.ZipInputStream
import java.util.zip.ZipOutputStream
import java.net.URL
public class CrowdinTask extends DefaultTask {
@Input String id = 'minecraft-forge'
@Input @Optional String key
@Input boolean json = true
@OutputFile output = project.file("build/${name}/output.zip")
@OutputFile export = project.file("build/${name}/export.json")
CrowdinTask() {
outputs.upToDateWhen{ false }
}
@TaskAction
def run() {
if (output.exists())
output.delete()
if (key == null)
return
// Force an export
new URL("https://api.crowdin.com/api/project/${id}/export?key=${key}").withInputStream { i ->
export.withOutputStream { it << i }
}
if (!export.text.contains('success')) {
throw new RuntimeException("Crowdin export failed, see ${export} for more info")
}
new URL("https://api.crowdin.com/api/project/${id}/download/all.zip?key=${key}").withInputStream { i ->
new ZipInputStream(i).withCloseable { zin ->
output.withOutputStream { out ->
new ZipOutputStream(out).withCloseable { zout ->
ZipEntry zein
while ((zein = zin.nextEntry) != null) {
if (zein.isDirectory()) {
zout.putNextEntry(new ZipEntry(zein.name))
} else {
// 1.13+ uses json
if (zein.name.endsWith('.json') == json) {
ZipEntry zeout = new ZipEntry(json ? zein.name.toLowerCase() : zein.name)
zeout.time = 1
zout.putNextEntry(zeout)
zout << zin
}
}
}
}
}
}
}
}
}

View File

@ -0,0 +1,43 @@
package net.minecraftforge.forge.tasks
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.InputFile
import org.gradle.api.tasks.TaskAction
import org.gradle.api.tasks.OutputDirectory
import java.io.File
import java.util.HashSet
import java.util.Set
import java.net.URL
public class DownloadLibrariesTask extends DefaultTask {
@InputFile File input
@OutputDirectory File output = project.file("build/${name}/")
Set<File> libraries = new HashSet<>()
@TaskAction
def run() {
Util.init()
def json = input.json().libraries.each { lib ->
//TODO: Thread?
def artifacts = [lib.downloads.artifact] + lib.downloads.get('classifiers', [:]).values()
artifacts.each{ art ->
def target = new File(output, art.path)
libraries.add(target)
if (!target.exists() || !art.sha1.equals(target.sha1())) {
project.logger.lifecycle("Downloading ${art.url}")
if (!target.parentFile.exists()) {
target.parentFile.mkdirs()
}
new URL(art.url).withInputStream { i ->
target.withOutputStream { it << i }
}
if (!art.sha1.equals(target.sha1())) {
throw new IllegalStateException("Failed to download ${art.url} to ${target.canonicalPath} SHA Mismatch")
}
}
}
}
}
}

View File

@ -0,0 +1,117 @@
package net.minecraftforge.forge.tasks
import groovy.json.JsonSlurper
import java.io.File
import java.security.MessageDigest
import java.text.SimpleDateFormat
import java.util.Date
public class Util {
static void init() {
File.metaClass.sha1 = { ->
MessageDigest md = MessageDigest.getInstance('SHA-1')
delegate.eachByte 4096, {bytes, size ->
md.update(bytes, 0, size)
}
return md.digest().collect {String.format "%02x", it}.join()
}
File.metaClass.json = { -> new JsonSlurper().parseText(delegate.text) }
Date.metaClass.iso8601 = { ->
def format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")
def result = format.format(delegate)
return result[0..21] + ':' + result[22..-1]
}
}
public static String[] getClasspath(project, libs, artifact) {
def ret = []
artifactTree(project, artifact).each { key, lib ->
libs[lib.name] = lib
if (lib.name != artifact)
ret.add(lib.name)
}
return ret
}
public static def getArtifacts(project, config, classifiers) {
def ret = [:]
config.resolvedConfiguration.resolvedArtifacts.each {
def art = [
group: it.moduleVersion.id.group,
name: it.moduleVersion.id.name,
version: it.moduleVersion.id.version,
classifier: it.classifier,
extension: it.extension,
file: it.file
]
def key = art.group + ':' + art.name
def folder = "${art.group.replace('.', '/')}/${art.name}/${art.version}/"
def filename = "${art.name}-${art.version}"
if (art.classifier != null)
filename += "-${art.classifier}"
filename += ".${art.extension}"
def path = "${folder}${filename}"
def url = "https://libraries.minecraft.net/${path}"
if (!checkExists(url)) {
url = "https://files.minecraftforge.net/maven/${path}"
}
//TODO remove when Mojang launcher is updated
if (!classifiers && art.classifier != null) { //Mojang launcher doesn't currently support classifiers, so... move it to part of the version, and force the extension to 'jar'
art.version = "${art.version}-${art.classifier}"
art.classifier = null
art.extension = 'jar'
path = "${art.group.replace('.', '/')}/${art.name}/${art.version}/${art.name}-${art.version}.jar"
}
ret[key] = [
name: "${art.group}:${art.name}:${art.version}" + (art.classifier == null ? '' : ":${art.classifier}") + (art.extension == 'jar' ? '' : "@${art.extension}"),
downloads: [
artifact: [
path: path,
url: url,
sha1: sha1(art.file),
size: art.file.length()
]
]
]
}
return ret
}
public static def iso8601Now() { new Date().iso8601() }
public static def sha1(file) {
MessageDigest md = MessageDigest.getInstance('SHA-1')
file.eachByte 4096, {bytes, size ->
md.update(bytes, 0, size)
}
return md.digest().collect {String.format "%02x", it}.join()
}
private static def artifactTree(project, artifact) {
if (!project.ext.has('tree_resolver'))
project.ext.tree_resolver = 1
def cfg = project.configurations.create('tree_resolver_' + project.ext.tree_resolver++)
def dep = project.dependencies.create(artifact)
cfg.dependencies.add(dep)
def files = cfg.resolve()
return getArtifacts(project, cfg, true)
}
private static boolean checkExists(url) {
try {
def code = new URL(url).openConnection().with {
requestMethod = 'HEAD'
connect()
responseCode
}
return code == 200
} catch (Exception e) {
if (e.toString().contains('unable to find valid certification path to requested target'))
throw new RuntimeException('Failed to connect to ' + url + ': Missing certificate root authority, try updating java')
throw e
}
}
}

View File

@ -9,14 +9,10 @@
func_238472_a_(p_230431_1_, fontrenderer, this.func_230458_i_(), this.field_230690_l_ + this.field_230688_j_ / 2, this.field_230691_m_ + (this.field_230689_k_ - 8) / 2, j | MathHelper.func_76123_f(this.field_230695_q_ * 255.0F) << 24);
}
@@ -206,6 +206,14 @@
@@ -206,6 +206,10 @@
this.field_230688_j_ = p_230991_1_;
}
+ public int getHeight() {
+ return this.field_230689_k_;
+ }
+
+ public void setHeight(int value) {
+ this.field_230689_k_ = value;
+ }
@ -24,7 +20,7 @@
public void func_230986_a_(float p_230986_1_) {
this.field_230695_q_ = p_230986_1_;
}
@@ -233,4 +241,17 @@
@@ -233,4 +237,17 @@
protected void func_230996_d_(boolean p_230996_1_) {
this.field_230686_c_ = p_230996_1_;
}